id stringlengths 28 33 | content stringlengths 14 265k ⌀ | max_stars_repo_path stringlengths 49 55 |
|---|---|---|
crossvul-python_data_bad_3920_1 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Wrapper over a QWebEngineView."""
import math
import functools
import re
import html as html_utils
import typing
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, Qt, QPoint, QPointF, QUrl,
QTimer, QObject)
from PyQt5.QtNetwork import QAuthenticator
from PyQt5.QtWidgets import QApplication, QWidget
from PyQt5.QtWebEngineWidgets import QWebEnginePage, QWebEngineScript
from qutebrowser.config import configdata, config
from qutebrowser.browser import (browsertab, eventfilter, shared, webelem,
history, greasemonkey)
from qutebrowser.browser.webengine import (webview, webengineelem, tabhistory,
interceptor, webenginequtescheme,
cookies, webenginedownloads,
webenginesettings, certificateerror)
from qutebrowser.misc import miscwidgets, objects
from qutebrowser.utils import (usertypes, qtutils, log, javascript, utils,
message, objreg, jinja, debug)
from qutebrowser.qt import sip
_qute_scheme_handler = None
def init():
"""Initialize QtWebEngine-specific modules."""
# For some reason we need to keep a reference, otherwise the scheme handler
# won't work...
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-September/038075.html
global _qute_scheme_handler
app = QApplication.instance()
log.init.debug("Initializing qute://* handler...")
_qute_scheme_handler = webenginequtescheme.QuteSchemeHandler(parent=app)
_qute_scheme_handler.install(webenginesettings.default_profile)
if webenginesettings.private_profile:
_qute_scheme_handler.install(webenginesettings.private_profile)
log.init.debug("Initializing request interceptor...")
req_interceptor = interceptor.RequestInterceptor(parent=app)
req_interceptor.install(webenginesettings.default_profile)
if webenginesettings.private_profile:
req_interceptor.install(webenginesettings.private_profile)
log.init.debug("Initializing QtWebEngine downloads...")
download_manager = webenginedownloads.DownloadManager(parent=app)
download_manager.install(webenginesettings.default_profile)
if webenginesettings.private_profile:
download_manager.install(webenginesettings.private_profile)
objreg.register('webengine-download-manager', download_manager)
log.init.debug("Initializing cookie filter...")
cookies.install_filter(webenginesettings.default_profile)
if webenginesettings.private_profile:
cookies.install_filter(webenginesettings.private_profile)
# Clear visited links on web history clear
for p in [webenginesettings.default_profile,
webenginesettings.private_profile]:
if not p:
continue
history.web_history.history_cleared.connect(p.clearAllVisitedLinks)
history.web_history.url_cleared.connect(
lambda url, profile=p: profile.clearVisitedLinks([url]))
# Mapping worlds from usertypes.JsWorld to QWebEngineScript world IDs.
_JS_WORLD_MAP = {
usertypes.JsWorld.main: QWebEngineScript.MainWorld,
usertypes.JsWorld.application: QWebEngineScript.ApplicationWorld,
usertypes.JsWorld.user: QWebEngineScript.UserWorld,
usertypes.JsWorld.jseval: QWebEngineScript.UserWorld + 1,
}
class WebEngineAction(browsertab.AbstractAction):
"""QtWebEngine implementations related to web actions."""
action_class = QWebEnginePage
action_base = QWebEnginePage.WebAction
def exit_fullscreen(self):
self._widget.triggerPageAction(QWebEnginePage.ExitFullScreen)
def save_page(self):
"""Save the current page."""
self._widget.triggerPageAction(QWebEnginePage.SavePage)
def show_source(self, pygments=False):
if pygments:
self._show_source_pygments()
return
try:
self._widget.triggerPageAction(QWebEnginePage.ViewSource)
except AttributeError:
# Qt < 5.8
tb = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
urlstr = self._tab.url().toString(
QUrl.RemoveUserInfo) # type: ignore
# The original URL becomes the path of a view-source: URL
# (without a host), but query/fragment should stay.
url = QUrl('view-source:' + urlstr)
tb.tabopen(url, background=False, related=True)
class WebEnginePrinting(browsertab.AbstractPrinting):
"""QtWebEngine implementations related to printing."""
def check_pdf_support(self):
pass
def check_printer_support(self):
if not hasattr(self._widget.page(), 'print'):
raise browsertab.WebTabError(
"Printing is unsupported with QtWebEngine on Qt < 5.8")
def check_preview_support(self):
raise browsertab.WebTabError(
"Print previews are unsupported with QtWebEngine")
def to_pdf(self, filename):
self._widget.page().printToPdf(filename)
def to_printer(self, printer, callback=None):
if callback is None:
callback = lambda _ok: None
self._widget.page().print(printer, callback)
class WebEngineSearch(browsertab.AbstractSearch):
"""QtWebEngine implementations related to searching on the page.
Attributes:
_flags: The QWebEnginePage.FindFlags of the last search.
_pending_searches: How many searches have been started but not called
back yet.
"""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._flags = QWebEnginePage.FindFlags(0) # type: ignore
self._pending_searches = 0
def _find(self, text, flags, callback, caller):
"""Call findText on the widget."""
self.search_displayed = True
self._pending_searches += 1
def wrapped_callback(found):
"""Wrap the callback to do debug logging."""
self._pending_searches -= 1
if self._pending_searches > 0:
# See https://github.com/qutebrowser/qutebrowser/issues/2442
# and https://github.com/qt/qtwebengine/blob/5.10/src/core/web_contents_adapter.cpp#L924-L934
log.webview.debug("Ignoring cancelled search callback with "
"{} pending searches".format(
self._pending_searches))
return
if sip.isdeleted(self._widget):
# This happens when starting a search, and closing the tab
# before results arrive.
log.webview.debug("Ignoring finished search for deleted "
"widget")
return
found_text = 'found' if found else "didn't find"
if flags:
flag_text = 'with flags {}'.format(debug.qflags_key(
QWebEnginePage, flags, klass=QWebEnginePage.FindFlag))
else:
flag_text = ''
log.webview.debug(' '.join([caller, found_text, text, flag_text])
.strip())
if callback is not None:
callback(found)
self.finished.emit(found)
self._widget.findText(text, flags, wrapped_callback)
def search(self, text, *, ignore_case=usertypes.IgnoreCase.never,
reverse=False, result_cb=None):
# Don't go to next entry on duplicate search
if self.text == text and self.search_displayed:
log.webview.debug("Ignoring duplicate search request"
" for {}".format(text))
return
self.text = text
self._flags = QWebEnginePage.FindFlags(0) # type: ignore
if self._is_case_sensitive(ignore_case):
self._flags |= QWebEnginePage.FindCaseSensitively
if reverse:
self._flags |= QWebEnginePage.FindBackward
self._find(text, self._flags, result_cb, 'search')
def clear(self):
if self.search_displayed:
self.cleared.emit()
self.search_displayed = False
self._widget.findText('')
def prev_result(self, *, result_cb=None):
# The int() here makes sure we get a copy of the flags.
flags = QWebEnginePage.FindFlags(int(self._flags)) # type: ignore
if flags & QWebEnginePage.FindBackward:
flags &= ~QWebEnginePage.FindBackward
else:
flags |= QWebEnginePage.FindBackward
self._find(self.text, flags, result_cb, 'prev_result')
def next_result(self, *, result_cb=None):
self._find(self.text, self._flags, result_cb, 'next_result')
class WebEngineCaret(browsertab.AbstractCaret):
"""QtWebEngine implementations related to moving the cursor/selection."""
def _flags(self):
"""Get flags to pass to JS."""
flags = set()
if qtutils.version_check('5.7.1', compiled=False):
flags.add('filter-prefix')
if utils.is_windows:
flags.add('windows')
return list(flags)
@pyqtSlot(usertypes.KeyMode)
def _on_mode_entered(self, mode):
if mode != usertypes.KeyMode.caret:
return
if self._tab.search.search_displayed:
# We are currently in search mode.
# convert the search to a blue selection so we can operate on it
# https://bugreports.qt.io/browse/QTBUG-60673
self._tab.search.clear()
self._tab.run_js_async(
javascript.assemble('caret', 'setFlags', self._flags()))
self._js_call('setInitialCursor', callback=self._selection_cb)
def _selection_cb(self, enabled):
"""Emit selection_toggled based on setInitialCursor."""
if self._mode_manager.mode != usertypes.KeyMode.caret:
log.webview.debug("Ignoring selection cb due to mode change.")
return
if enabled is None:
log.webview.debug("Ignoring selection status None")
return
self.selection_toggled.emit(enabled)
@pyqtSlot(usertypes.KeyMode)
def _on_mode_left(self, mode):
if mode != usertypes.KeyMode.caret:
return
self.drop_selection()
self._js_call('disableCaret')
def move_to_next_line(self, count=1):
self._js_call('moveDown', count)
def move_to_prev_line(self, count=1):
self._js_call('moveUp', count)
def move_to_next_char(self, count=1):
self._js_call('moveRight', count)
def move_to_prev_char(self, count=1):
self._js_call('moveLeft', count)
def move_to_end_of_word(self, count=1):
self._js_call('moveToEndOfWord', count)
def move_to_next_word(self, count=1):
self._js_call('moveToNextWord', count)
def move_to_prev_word(self, count=1):
self._js_call('moveToPreviousWord', count)
def move_to_start_of_line(self):
self._js_call('moveToStartOfLine')
def move_to_end_of_line(self):
self._js_call('moveToEndOfLine')
def move_to_start_of_next_block(self, count=1):
self._js_call('moveToStartOfNextBlock', count)
def move_to_start_of_prev_block(self, count=1):
self._js_call('moveToStartOfPrevBlock', count)
def move_to_end_of_next_block(self, count=1):
self._js_call('moveToEndOfNextBlock', count)
def move_to_end_of_prev_block(self, count=1):
self._js_call('moveToEndOfPrevBlock', count)
def move_to_start_of_document(self):
self._js_call('moveToStartOfDocument')
def move_to_end_of_document(self):
self._js_call('moveToEndOfDocument')
def toggle_selection(self):
self._js_call('toggleSelection', callback=self.selection_toggled.emit)
def drop_selection(self):
self._js_call('dropSelection')
def selection(self, callback):
# Not using selectedText() as WORKAROUND for
# https://bugreports.qt.io/browse/QTBUG-53134
# Even on Qt 5.10 selectedText() seems to work poorly, see
# https://github.com/qutebrowser/qutebrowser/issues/3523
self._tab.run_js_async(javascript.assemble('caret', 'getSelection'),
callback)
def reverse_selection(self):
self._js_call('reverseSelection')
def _follow_selected_cb_wrapped(self, js_elem, tab):
try:
self._follow_selected_cb(js_elem, tab)
finally:
self.follow_selected_done.emit()
def _follow_selected_cb(self, js_elem, tab):
"""Callback for javascript which clicks the selected element.
Args:
js_elem: The element serialized from javascript.
tab: Open in a new tab.
"""
if js_elem is None:
return
if js_elem == "focused":
# we had a focused element, not a selected one. Just send <enter>
self._follow_enter(tab)
return
assert isinstance(js_elem, dict), js_elem
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
if tab:
click_type = usertypes.ClickTarget.tab
else:
click_type = usertypes.ClickTarget.normal
# Only click if we see a link
if elem.is_link():
log.webview.debug("Found link in selection, clicking. ClickTarget "
"{}, elem {}".format(click_type, elem))
try:
elem.click(click_type)
except webelem.Error as e:
message.error(str(e))
def follow_selected(self, *, tab=False):
if self._tab.search.search_displayed:
# We are currently in search mode.
# let's click the link via a fake-click
# https://bugreports.qt.io/browse/QTBUG-60673
self._tab.search.clear()
log.webview.debug("Clicking a searched link via fake key press.")
# send a fake enter, clicking the orange selection box
self._follow_enter(tab)
else:
# click an existing blue selection
js_code = javascript.assemble('webelem',
'find_selected_focused_link')
self._tab.run_js_async(
js_code,
lambda jsret: self._follow_selected_cb_wrapped(jsret, tab))
def _js_call(self, command, *args, callback=None):
code = javascript.assemble('caret', command, *args)
self._tab.run_js_async(code, callback)
class WebEngineScroller(browsertab.AbstractScroller):
"""QtWebEngine implementations related to scrolling."""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._pos_perc = (0, 0)
self._pos_px = QPoint()
self._at_bottom = False
def _init_widget(self, widget):
super()._init_widget(widget)
page = widget.page()
page.scrollPositionChanged.connect(self._update_pos)
def _repeated_key_press(self, key, count=1, modifier=Qt.NoModifier):
"""Send count fake key presses to this scroller's WebEngineTab."""
for _ in range(min(count, 1000)):
self._tab.fake_key_press(key, modifier)
@pyqtSlot(QPointF)
def _update_pos(self, pos):
"""Update the scroll position attributes when it changed."""
self._pos_px = pos.toPoint()
contents_size = self._widget.page().contentsSize()
scrollable_x = contents_size.width() - self._widget.width()
if scrollable_x == 0:
perc_x = 0
else:
try:
perc_x = min(100, round(100 / scrollable_x * pos.x()))
except ValueError:
# https://github.com/qutebrowser/qutebrowser/issues/3219
log.misc.debug("Got ValueError for perc_x!")
log.misc.debug("contents_size.width(): {}".format(
contents_size.width()))
log.misc.debug("self._widget.width(): {}".format(
self._widget.width()))
log.misc.debug("scrollable_x: {}".format(scrollable_x))
log.misc.debug("pos.x(): {}".format(pos.x()))
raise
scrollable_y = contents_size.height() - self._widget.height()
if scrollable_y == 0:
perc_y = 0
else:
try:
perc_y = min(100, round(100 / scrollable_y * pos.y()))
except ValueError:
# https://github.com/qutebrowser/qutebrowser/issues/3219
log.misc.debug("Got ValueError for perc_y!")
log.misc.debug("contents_size.height(): {}".format(
contents_size.height()))
log.misc.debug("self._widget.height(): {}".format(
self._widget.height()))
log.misc.debug("scrollable_y: {}".format(scrollable_y))
log.misc.debug("pos.y(): {}".format(pos.y()))
raise
self._at_bottom = math.ceil(pos.y()) >= scrollable_y
if (self._pos_perc != (perc_x, perc_y) or
'no-scroll-filtering' in objects.debug_flags):
self._pos_perc = perc_x, perc_y
self.perc_changed.emit(*self._pos_perc)
def pos_px(self):
return self._pos_px
def pos_perc(self):
return self._pos_perc
def to_perc(self, x=None, y=None):
js_code = javascript.assemble('scroll', 'to_perc', x, y)
self._tab.run_js_async(js_code)
def to_point(self, point):
js_code = javascript.assemble('window', 'scroll', point.x(), point.y())
self._tab.run_js_async(js_code)
def to_anchor(self, name):
url = self._tab.url()
url.setFragment(name)
self._tab.load_url(url)
def delta(self, x=0, y=0):
self._tab.run_js_async(javascript.assemble('window', 'scrollBy', x, y))
def delta_page(self, x=0, y=0):
js_code = javascript.assemble('scroll', 'delta_page', x, y)
self._tab.run_js_async(js_code)
def up(self, count=1):
self._repeated_key_press(Qt.Key_Up, count)
def down(self, count=1):
self._repeated_key_press(Qt.Key_Down, count)
def left(self, count=1):
self._repeated_key_press(Qt.Key_Left, count)
def right(self, count=1):
self._repeated_key_press(Qt.Key_Right, count)
def top(self):
self._tab.fake_key_press(Qt.Key_Home)
def bottom(self):
self._tab.fake_key_press(Qt.Key_End)
def page_up(self, count=1):
self._repeated_key_press(Qt.Key_PageUp, count)
def page_down(self, count=1):
self._repeated_key_press(Qt.Key_PageDown, count)
def at_top(self):
return self.pos_px().y() == 0
def at_bottom(self):
return self._at_bottom
class WebEngineHistoryPrivate(browsertab.AbstractHistoryPrivate):
"""History-related methods which are not part of the extension API."""
def serialize(self):
if not qtutils.version_check('5.9', compiled=False):
# WORKAROUND for
# https://github.com/qutebrowser/qutebrowser/issues/2289
# Don't use the history's currentItem here, because of
# https://bugreports.qt.io/browse/QTBUG-59599 and because it doesn't
# contain view-source.
scheme = self._tab.url().scheme()
if scheme in ['view-source', 'chrome']:
raise browsertab.WebTabError("Can't serialize special URL!")
return qtutils.serialize(self._history)
def deserialize(self, data):
qtutils.deserialize(data, self._history)
def load_items(self, items):
if items:
self._tab.before_load_started.emit(items[-1].url)
stream, _data, cur_data = tabhistory.serialize(items)
qtutils.deserialize_stream(stream, self._history)
@pyqtSlot()
def _on_load_finished():
self._tab.scroller.to_point(cur_data['scroll-pos'])
self._tab.load_finished.disconnect(_on_load_finished)
if cur_data is not None:
if 'zoom' in cur_data:
self._tab.zoom.set_factor(cur_data['zoom'])
if ('scroll-pos' in cur_data and
self._tab.scroller.pos_px() == QPoint(0, 0)):
self._tab.load_finished.connect(_on_load_finished)
class WebEngineHistory(browsertab.AbstractHistory):
"""QtWebEngine implementations related to page history."""
def __init__(self, tab):
super().__init__(tab)
self.private_api = WebEngineHistoryPrivate(tab)
def __len__(self):
return len(self._history)
def __iter__(self):
return iter(self._history.items())
def current_idx(self):
return self._history.currentItemIndex()
def can_go_back(self):
return self._history.canGoBack()
def can_go_forward(self):
return self._history.canGoForward()
def _item_at(self, i):
return self._history.itemAt(i)
def _go_to_item(self, item):
self._tab.before_load_started.emit(item.url())
self._history.goToItem(item)
class WebEngineZoom(browsertab.AbstractZoom):
"""QtWebEngine implementations related to zooming."""
def _set_factor_internal(self, factor):
self._widget.setZoomFactor(factor)
class WebEngineElements(browsertab.AbstractElements):
"""QtWebEngine implemementations related to elements on the page."""
def _js_cb_multiple(self, callback, error_cb, js_elems):
"""Handle found elements coming from JS and call the real callback.
Args:
callback: The callback to call with the found elements.
error_cb: The callback to call in case of an error.
js_elems: The elements serialized from javascript.
"""
if js_elems is None:
error_cb(webelem.Error("Unknown error while getting "
"elements"))
return
elif not js_elems['success']:
error_cb(webelem.Error(js_elems['error']))
return
elems = []
for js_elem in js_elems['result']:
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
elems.append(elem)
callback(elems)
def _js_cb_single(self, callback, js_elem):
"""Handle a found focus elem coming from JS and call the real callback.
Args:
callback: The callback to call with the found element.
Called with a WebEngineElement or None.
js_elem: The element serialized from javascript.
"""
debug_str = ('None' if js_elem is None
else utils.elide(repr(js_elem), 1000))
log.webview.debug("Got element from JS: {}".format(debug_str))
if js_elem is None:
callback(None)
else:
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
callback(elem)
def find_css(self, selector, callback, error_cb, *,
only_visible=False):
js_code = javascript.assemble('webelem', 'find_css', selector,
only_visible)
js_cb = functools.partial(self._js_cb_multiple, callback, error_cb)
self._tab.run_js_async(js_code, js_cb)
def find_id(self, elem_id, callback):
js_code = javascript.assemble('webelem', 'find_id', elem_id)
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
def find_focused(self, callback):
js_code = javascript.assemble('webelem', 'find_focused')
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
def find_at_pos(self, pos, callback):
assert pos.x() >= 0, pos
assert pos.y() >= 0, pos
pos /= self._tab.zoom.factor()
js_code = javascript.assemble('webelem', 'find_at_pos',
pos.x(), pos.y())
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
class WebEngineAudio(browsertab.AbstractAudio):
"""QtWebEngine implemementations related to audio/muting.
Attributes:
_overridden: Whether the user toggled muting manually.
If that's the case, we leave it alone.
"""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._overridden = False
def _connect_signals(self):
page = self._widget.page()
page.audioMutedChanged.connect(self.muted_changed)
page.recentlyAudibleChanged.connect(self.recently_audible_changed)
self._tab.url_changed.connect(self._on_url_changed)
config.instance.changed.connect(self._on_config_changed)
def set_muted(self, muted: bool, override: bool = False) -> None:
self._overridden = override
assert self._widget is not None
page = self._widget.page()
page.setAudioMuted(muted)
def is_muted(self):
page = self._widget.page()
return page.isAudioMuted()
def is_recently_audible(self):
page = self._widget.page()
return page.recentlyAudible()
@pyqtSlot(QUrl)
def _on_url_changed(self, url):
if self._overridden:
return
mute = config.instance.get('content.mute', url=url)
self.set_muted(mute)
@config.change_filter('content.mute')
def _on_config_changed(self):
self._on_url_changed(self._tab.url())
class _WebEnginePermissions(QObject):
"""Handling of various permission-related signals."""
# Using 0 as WORKAROUND for:
# https://www.riverbankcomputing.com/pipermail/pyqt/2019-July/041903.html
_options = {
0: 'content.notifications',
QWebEnginePage.Geolocation: 'content.geolocation',
QWebEnginePage.MediaAudioCapture: 'content.media_capture',
QWebEnginePage.MediaVideoCapture: 'content.media_capture',
QWebEnginePage.MediaAudioVideoCapture: 'content.media_capture',
}
_messages = {
0: 'show notifications',
QWebEnginePage.Geolocation: 'access your location',
QWebEnginePage.MediaAudioCapture: 'record audio',
QWebEnginePage.MediaVideoCapture: 'record video',
QWebEnginePage.MediaAudioVideoCapture: 'record audio/video',
}
def __init__(self, tab, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
try:
self._options.update({
QWebEnginePage.MouseLock:
'content.mouse_lock',
})
self._messages.update({
QWebEnginePage.MouseLock:
'hide your mouse pointer',
})
except AttributeError:
# Added in Qt 5.8
pass
try:
self._options.update({
QWebEnginePage.DesktopVideoCapture:
'content.desktop_capture',
QWebEnginePage.DesktopAudioVideoCapture:
'content.desktop_capture',
})
self._messages.update({
QWebEnginePage.DesktopVideoCapture:
'capture your desktop',
QWebEnginePage.DesktopAudioVideoCapture:
'capture your desktop and audio',
})
except AttributeError:
# Added in Qt 5.10
pass
assert self._options.keys() == self._messages.keys()
def connect_signals(self):
"""Connect related signals from the QWebEnginePage."""
page = self._widget.page()
page.fullScreenRequested.connect(
self._on_fullscreen_requested)
page.featurePermissionRequested.connect(
self._on_feature_permission_requested)
if qtutils.version_check('5.11'):
page.quotaRequested.connect(
self._on_quota_requested)
page.registerProtocolHandlerRequested.connect(
self._on_register_protocol_handler_requested)
@pyqtSlot('QWebEngineFullScreenRequest')
def _on_fullscreen_requested(self, request):
request.accept()
on = request.toggleOn()
self._tab.data.fullscreen = on
self._tab.fullscreen_requested.emit(on)
if on:
notification = miscwidgets.FullscreenNotification(self._widget)
notification.show()
notification.set_timeout(3000)
@pyqtSlot(QUrl, 'QWebEnginePage::Feature')
def _on_feature_permission_requested(self, url, feature):
"""Ask the user for approval for geolocation/media/etc.."""
page = self._widget.page()
grant_permission = functools.partial(
page.setFeaturePermission, url, feature,
QWebEnginePage.PermissionGrantedByUser)
deny_permission = functools.partial(
page.setFeaturePermission, url, feature,
QWebEnginePage.PermissionDeniedByUser)
if feature not in self._options:
log.webview.error("Unhandled feature permission {}".format(
debug.qenum_key(QWebEnginePage, feature)))
deny_permission()
return
if (
hasattr(QWebEnginePage, 'DesktopVideoCapture') and
feature in [QWebEnginePage.DesktopVideoCapture,
QWebEnginePage.DesktopAudioVideoCapture] and
qtutils.version_check('5.13', compiled=False) and
not qtutils.version_check('5.13.2', compiled=False)
):
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-78016
log.webview.warning("Ignoring desktop sharing request due to "
"crashes in Qt < 5.13.2")
deny_permission()
return
question = shared.feature_permission(
url=url.adjusted(QUrl.RemovePath),
option=self._options[feature], msg=self._messages[feature],
yes_action=grant_permission, no_action=deny_permission,
abort_on=[self._tab.abort_questions])
if question is not None:
page.featurePermissionRequestCanceled.connect(
functools.partial(self._on_feature_permission_cancelled,
question, url, feature))
def _on_feature_permission_cancelled(self, question, url, feature,
cancelled_url, cancelled_feature):
"""Slot invoked when a feature permission request was cancelled.
To be used with functools.partial.
"""
if url == cancelled_url and feature == cancelled_feature:
try:
question.abort()
except RuntimeError:
# The question could already be deleted, e.g. because it was
# aborted after a loadStarted signal.
pass
def _on_quota_requested(self, request):
size = utils.format_size(request.requestedSize())
shared.feature_permission(
url=request.origin().adjusted(QUrl.RemovePath),
option='content.persistent_storage',
msg='use {} of persistent storage'.format(size),
yes_action=request.accept, no_action=request.reject,
abort_on=[self._tab.abort_questions],
blocking=True)
def _on_register_protocol_handler_requested(self, request):
shared.feature_permission(
url=request.origin().adjusted(QUrl.RemovePath),
option='content.register_protocol_handler',
msg='open all {} links'.format(request.scheme()),
yes_action=request.accept, no_action=request.reject,
abort_on=[self._tab.abort_questions],
blocking=True)
class _WebEngineScripts(QObject):
def __init__(self, tab, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
self._greasemonkey = greasemonkey.gm_manager
def connect_signals(self):
"""Connect signals to our private slots."""
config.instance.changed.connect(self._on_config_changed)
self._tab.search.cleared.connect(functools.partial(
self._update_stylesheet, searching=False))
self._tab.search.finished.connect(self._update_stylesheet)
@pyqtSlot(str)
def _on_config_changed(self, option):
if option in ['scrolling.bar', 'content.user_stylesheets']:
self._init_stylesheet()
self._update_stylesheet()
@pyqtSlot(bool)
def _update_stylesheet(self, searching=False):
"""Update the custom stylesheet in existing tabs."""
css = shared.get_user_stylesheet(searching=searching)
code = javascript.assemble('stylesheet', 'set_css', css)
self._tab.run_js_async(code)
def _inject_early_js(self, name, js_code, *,
world=QWebEngineScript.ApplicationWorld,
subframes=False):
"""Inject the given script to run early on a page load.
This runs the script both on DocumentCreation and DocumentReady as on
some internal pages, DocumentCreation will not work.
That is a WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66011
"""
scripts = self._widget.page().scripts()
for injection in ['creation', 'ready']:
injection_points = {
'creation': QWebEngineScript.DocumentCreation,
'ready': QWebEngineScript.DocumentReady,
}
script = QWebEngineScript()
script.setInjectionPoint(injection_points[injection])
script.setSourceCode(js_code)
script.setWorldId(world)
script.setRunsOnSubFrames(subframes)
script.setName('_qute_{}_{}'.format(name, injection))
scripts.insert(script)
def _remove_early_js(self, name):
"""Remove an early QWebEngineScript."""
scripts = self._widget.page().scripts()
for injection in ['creation', 'ready']:
full_name = '_qute_{}_{}'.format(name, injection)
script = scripts.findScript(full_name)
if not script.isNull():
scripts.remove(script)
def init(self):
"""Initialize global qutebrowser JavaScript."""
js_code = javascript.wrap_global(
'scripts',
utils.read_file('javascript/scroll.js'),
utils.read_file('javascript/webelem.js'),
utils.read_file('javascript/caret.js'),
)
if not qtutils.version_check('5.12'):
# WORKAROUND for Qt versions < 5.12 not exposing window.print().
# Qt 5.12 has a printRequested() signal so we don't need this hack
# anymore.
self._inject_early_js('js',
utils.read_file('javascript/print.js'),
subframes=True,
world=QWebEngineScript.MainWorld)
# FIXME:qtwebengine what about subframes=True?
self._inject_early_js('js', js_code, subframes=True)
self._init_stylesheet()
# The Greasemonkey metadata block support in QtWebEngine only starts at
# Qt 5.8. With 5.7.1, we need to inject the scripts ourselves in
# response to urlChanged.
if not qtutils.version_check('5.8'):
self._tab.url_changed.connect(
self._inject_greasemonkey_scripts_for_url)
else:
self._greasemonkey.scripts_reloaded.connect(
self._inject_all_greasemonkey_scripts)
self._inject_all_greasemonkey_scripts()
def _init_stylesheet(self):
"""Initialize custom stylesheets.
Partially inspired by QupZilla:
https://github.com/QupZilla/qupzilla/blob/v2.0/src/lib/app/mainapplication.cpp#L1063-L1101
"""
self._remove_early_js('stylesheet')
css = shared.get_user_stylesheet()
js_code = javascript.wrap_global(
'stylesheet',
utils.read_file('javascript/stylesheet.js'),
javascript.assemble('stylesheet', 'set_css', css),
)
self._inject_early_js('stylesheet', js_code, subframes=True)
@pyqtSlot(QUrl)
def _inject_greasemonkey_scripts_for_url(self, url):
matching_scripts = self._greasemonkey.scripts_for(url)
self._inject_greasemonkey_scripts(
matching_scripts.start, QWebEngineScript.DocumentCreation, True)
self._inject_greasemonkey_scripts(
matching_scripts.end, QWebEngineScript.DocumentReady, False)
self._inject_greasemonkey_scripts(
matching_scripts.idle, QWebEngineScript.Deferred, False)
@pyqtSlot()
def _inject_all_greasemonkey_scripts(self):
scripts = self._greasemonkey.all_scripts()
self._inject_greasemonkey_scripts(scripts)
def _remove_all_greasemonkey_scripts(self):
page_scripts = self._widget.page().scripts()
for script in page_scripts.toList():
if script.name().startswith("GM-"):
log.greasemonkey.debug('Removing script: {}'
.format(script.name()))
removed = page_scripts.remove(script)
assert removed, script.name()
def _inject_greasemonkey_scripts(self, scripts=None, injection_point=None,
remove_first=True):
"""Register user JavaScript files with the current tab.
Args:
scripts: A list of GreasemonkeyScripts, or None to add all
known by the Greasemonkey subsystem.
injection_point: The QWebEngineScript::InjectionPoint stage
to inject the script into, None to use
auto-detection.
remove_first: Whether to remove all previously injected
scripts before adding these ones.
"""
if sip.isdeleted(self._widget):
return
# Since we are inserting scripts into a per-tab collection,
# rather than just injecting scripts on page load, we need to
# make sure we replace existing scripts, not just add new ones.
# While, taking care not to remove any other scripts that might
# have been added elsewhere, like the one for stylesheets.
page_scripts = self._widget.page().scripts()
if remove_first:
self._remove_all_greasemonkey_scripts()
if not scripts:
return
for script in scripts:
new_script = QWebEngineScript()
try:
world = int(script.jsworld)
if not 0 <= world <= qtutils.MAX_WORLD_ID:
log.greasemonkey.error(
"script {} has invalid value for '@qute-js-world'"
": {}, should be between 0 and {}"
.format(
script.name,
script.jsworld,
qtutils.MAX_WORLD_ID))
continue
except ValueError:
try:
world = _JS_WORLD_MAP[usertypes.JsWorld[
script.jsworld.lower()]]
except KeyError:
log.greasemonkey.error(
"script {} has invalid value for '@qute-js-world'"
": {}".format(script.name, script.jsworld))
continue
new_script.setWorldId(world)
new_script.setSourceCode(script.code())
new_script.setName("GM-{}".format(script.name))
new_script.setRunsOnSubFrames(script.runs_on_sub_frames)
# Override the @run-at value parsed by QWebEngineScript if desired.
if injection_point:
new_script.setInjectionPoint(injection_point)
elif script.needs_document_end_workaround():
log.greasemonkey.debug("Forcing @run-at document-end for {}"
.format(script.name))
new_script.setInjectionPoint(QWebEngineScript.DocumentReady)
log.greasemonkey.debug('adding script: {}'
.format(new_script.name()))
page_scripts.insert(new_script)
class WebEngineTabPrivate(browsertab.AbstractTabPrivate):
"""QtWebEngine-related methods which aren't part of the public API."""
def networkaccessmanager(self):
return None
def user_agent(self):
return None
def clear_ssl_errors(self):
raise browsertab.UnsupportedOperationError
def event_target(self):
return self._widget.render_widget()
def shutdown(self):
self._tab.shutting_down.emit()
self._tab.action.exit_fullscreen()
self._widget.shutdown()
class WebEngineTab(browsertab.AbstractTab):
"""A QtWebEngine tab in the browser.
Signals:
abort_questions: Emitted when a new load started or we're shutting
down.
"""
abort_questions = pyqtSignal()
def __init__(self, *, win_id, mode_manager, private, parent=None):
super().__init__(win_id=win_id, private=private, parent=parent)
widget = webview.WebEngineView(tabdata=self.data, win_id=win_id,
private=private)
self.history = WebEngineHistory(tab=self)
self.scroller = WebEngineScroller(tab=self, parent=self)
self.caret = WebEngineCaret(mode_manager=mode_manager,
tab=self, parent=self)
self.zoom = WebEngineZoom(tab=self, parent=self)
self.search = WebEngineSearch(tab=self, parent=self)
self.printing = WebEnginePrinting(tab=self)
self.elements = WebEngineElements(tab=self)
self.action = WebEngineAction(tab=self)
self.audio = WebEngineAudio(tab=self, parent=self)
self.private_api = WebEngineTabPrivate(mode_manager=mode_manager,
tab=self)
self._permissions = _WebEnginePermissions(tab=self, parent=self)
self._scripts = _WebEngineScripts(tab=self, parent=self)
# We're assigning settings in _set_widget
self.settings = webenginesettings.WebEngineSettings(settings=None)
self._set_widget(widget)
self._connect_signals()
self.backend = usertypes.Backend.QtWebEngine
self._child_event_filter = None
self._saved_zoom = None
self._reload_url = None # type: typing.Optional[QUrl]
self._scripts.init()
def _set_widget(self, widget):
# pylint: disable=protected-access
super()._set_widget(widget)
self._permissions._widget = widget
self._scripts._widget = widget
def _install_event_filter(self):
fp = self._widget.focusProxy()
if fp is not None:
fp.installEventFilter(self._tab_event_filter)
self._child_event_filter = eventfilter.ChildEventFilter(
eventfilter=self._tab_event_filter, widget=self._widget,
win_id=self.win_id, parent=self)
self._widget.installEventFilter(self._child_event_filter)
@pyqtSlot()
def _restore_zoom(self):
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3498
return
if self._saved_zoom is None:
return
self.zoom.set_factor(self._saved_zoom)
self._saved_zoom = None
def load_url(self, url, *, emit_before_load_started=True):
"""Load the given URL in this tab.
Arguments:
url: The QUrl to load.
emit_before_load_started: If set to False, before_load_started is
not emitted.
"""
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3896
return
self._saved_zoom = self.zoom.factor()
self._load_url_prepare(
url, emit_before_load_started=emit_before_load_started)
self._widget.load(url)
def url(self, *, requested=False):
page = self._widget.page()
if requested:
return page.requestedUrl()
else:
return page.url()
def dump_async(self, callback, *, plain=False):
if plain:
self._widget.page().toPlainText(callback)
else:
self._widget.page().toHtml(callback)
def run_js_async(self, code, callback=None, *, world=None):
world_id_type = typing.Union[QWebEngineScript.ScriptWorldId, int]
if world is None:
world_id = QWebEngineScript.ApplicationWorld # type: world_id_type
elif isinstance(world, int):
world_id = world
if not 0 <= world_id <= qtutils.MAX_WORLD_ID:
raise browsertab.WebTabError(
"World ID should be between 0 and {}"
.format(qtutils.MAX_WORLD_ID))
else:
world_id = _JS_WORLD_MAP[world]
if callback is None:
self._widget.page().runJavaScript(code, world_id)
else:
self._widget.page().runJavaScript(code, world_id, callback)
def reload(self, *, force=False):
if force:
action = QWebEnginePage.ReloadAndBypassCache
else:
action = QWebEnginePage.Reload
self._widget.triggerPageAction(action)
def stop(self):
self._widget.stop()
def title(self):
return self._widget.title()
def icon(self):
return self._widget.icon()
def set_html(self, html, base_url=QUrl()):
# FIXME:qtwebengine
# check this and raise an exception if too big:
# Warning: The content will be percent encoded before being sent to the
# renderer via IPC. This may increase its size. The maximum size of the
# percent encoded content is 2 megabytes minus 30 bytes.
self._widget.setHtml(html, base_url)
def _show_error_page(self, url, error):
"""Show an error page in the tab."""
log.misc.debug("Showing error page for {}".format(error))
url_string = url.toDisplayString()
error_page = jinja.render(
'error.html',
title="Error loading page: {}".format(url_string),
url=url_string, error=error)
self.set_html(error_page)
@pyqtSlot()
def _on_history_trigger(self):
try:
self._widget.page()
except RuntimeError:
# Looks like this slot can be triggered on destroyed tabs:
# https://crashes.qutebrowser.org/view/3abffbed (Qt 5.9.1)
# wrapped C/C++ object of type WebEngineView has been deleted
log.misc.debug("Ignoring history trigger for destroyed tab")
return
url = self.url()
requested_url = self.url(requested=True)
# Don't save the title if it's generated from the URL
title = self.title()
title_url = QUrl(url)
title_url.setScheme('')
title_url_str = title_url.toDisplayString(
QUrl.RemoveScheme) # type: ignore
if title == title_url_str.strip('/'):
title = ""
# Don't add history entry if the URL is invalid anyways
if not url.isValid():
log.misc.debug("Ignoring invalid URL being added to history")
return
self.history_item_triggered.emit(url, requested_url, title)
@pyqtSlot(QUrl, 'QAuthenticator*', 'QString')
def _on_proxy_authentication_required(self, url, authenticator,
proxy_host):
"""Called when a proxy needs authentication."""
msg = "<b>{}</b> requires a username and password.".format(
html_utils.escape(proxy_host))
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
answer = message.ask(
title="Proxy authentication required", text=msg,
mode=usertypes.PromptMode.user_pwd,
abort_on=[self.abort_questions], url=urlstr)
if answer is not None:
authenticator.setUser(answer.user)
authenticator.setPassword(answer.password)
else:
try:
sip.assign(authenticator, QAuthenticator()) # type: ignore
except AttributeError:
self._show_error_page(url, "Proxy authentication required")
@pyqtSlot(QUrl, 'QAuthenticator*')
def _on_authentication_required(self, url, authenticator):
log.network.debug("Authentication requested for {}, netrc_used {}"
.format(url.toDisplayString(), self.data.netrc_used))
netrc_success = False
if not self.data.netrc_used:
self.data.netrc_used = True
netrc_success = shared.netrc_authentication(url, authenticator)
if not netrc_success:
log.network.debug("Asking for credentials")
answer = shared.authentication_required(
url, authenticator, abort_on=[self.abort_questions])
if not netrc_success and answer is None:
log.network.debug("Aborting auth")
try:
sip.assign(authenticator, QAuthenticator()) # type: ignore
except AttributeError:
# WORKAROUND for
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-December/038400.html
self._show_error_page(url, "Authentication required")
@pyqtSlot()
def _on_load_started(self):
"""Clear search when a new load is started if needed."""
# WORKAROUND for
# https://bugreports.qt.io/browse/QTBUG-61506
# (seems to be back in later Qt versions as well)
self.search.clear()
super()._on_load_started()
self.data.netrc_used = False
@pyqtSlot(QWebEnginePage.RenderProcessTerminationStatus, int)
def _on_render_process_terminated(self, status, exitcode):
"""Show an error when the renderer process terminated."""
if (status == QWebEnginePage.AbnormalTerminationStatus and
exitcode == 256):
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-58697
status = QWebEnginePage.CrashedTerminationStatus
status_map = {
QWebEnginePage.NormalTerminationStatus:
browsertab.TerminationStatus.normal,
QWebEnginePage.AbnormalTerminationStatus:
browsertab.TerminationStatus.abnormal,
QWebEnginePage.CrashedTerminationStatus:
browsertab.TerminationStatus.crashed,
QWebEnginePage.KilledTerminationStatus:
browsertab.TerminationStatus.killed,
-1:
browsertab.TerminationStatus.unknown,
}
self.renderer_process_terminated.emit(status_map[status], exitcode)
def _error_page_workaround(self, js_enabled, html):
"""Check if we're displaying a Chromium error page.
This gets called if we got a loadFinished(False), so we can display at
least some error page in situations where Chromium's can't be
displayed.
WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66643
WORKAROUND for https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=882805
Needs to check the page content as a WORKAROUND for
https://bugreports.qt.io/browse/QTBUG-66661
"""
missing_jst = 'jstProcess(' in html and 'jstProcess=' not in html
if js_enabled and not missing_jst:
return
match = re.search(r'"errorCode":"([^"]*)"', html)
if match is None:
return
self._show_error_page(self.url(), error=match.group(1))
@pyqtSlot(int)
def _on_load_progress(self, perc: int) -> None:
"""QtWebEngine-specific loadProgress workarounds.
WORKAROUND for https://bugreports.qt.io/browse/QTBUG-65223
"""
super()._on_load_progress(perc)
if (perc == 100 and
qtutils.version_check('5.10', compiled=False) and
self.load_status() != usertypes.LoadStatus.error):
self._update_load_status(ok=True)
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
"""QtWebEngine-specific loadFinished workarounds."""
super()._on_load_finished(ok)
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-65223
if qtutils.version_check('5.10', compiled=False):
if not ok:
self._update_load_status(ok)
else:
self._update_load_status(ok)
if not ok:
self.dump_async(functools.partial(
self._error_page_workaround,
self.settings.test_attribute('content.javascript.enabled')))
if ok and self._reload_url is not None:
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
log.config.debug(
"Loading {} again because of config change".format(
self._reload_url.toDisplayString()))
QTimer.singleShot(100, functools.partial(
self.load_url, self._reload_url,
emit_before_load_started=False))
self._reload_url = None
@pyqtSlot(certificateerror.CertificateErrorWrapper)
def _on_ssl_errors(self, error):
self._has_ssl_errors = True
url = error.url()
log.webview.debug("Certificate error: {}".format(error))
if error.is_overridable():
error.ignore = shared.ignore_certificate_errors(
url, [error], abort_on=[self.abort_questions])
else:
log.webview.error("Non-overridable certificate error: "
"{}".format(error))
log.webview.debug("ignore {}, URL {}, requested {}".format(
error.ignore, url, self.url(requested=True)))
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-56207
show_cert_error = (
not qtutils.version_check('5.9') and
not error.ignore
)
# WORKAROUND for https://codereview.qt-project.org/c/qt/qtwebengine/+/270556
show_non_overr_cert_error = (
not error.is_overridable() and (
# Affected Qt versions:
# 5.13 before 5.13.2
# 5.12 before 5.12.6
# < 5.12
(qtutils.version_check('5.13') and
not qtutils.version_check('5.13.2')) or
(qtutils.version_check('5.12') and
not qtutils.version_check('5.12.6')) or
not qtutils.version_check('5.12')
)
)
# We can't really know when to show an error page, as the error might
# have happened when loading some resource.
# However, self.url() is not available yet and the requested URL
# might not match the URL we get from the error - so we just apply a
# heuristic here.
if ((show_cert_error or show_non_overr_cert_error) and
url.matches(self.data.last_navigation.url, QUrl.RemoveScheme)):
self._show_error_page(url, str(error))
@pyqtSlot(QUrl)
def _on_before_load_started(self, url):
"""If we know we're going to visit a URL soon, change the settings.
This is a WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
"""
super()._on_before_load_started(url)
if not qtutils.version_check('5.11.1', compiled=False):
self.settings.update_for_url(url)
@pyqtSlot()
def _on_print_requested(self):
"""Slot for window.print() in JS."""
try:
self.printing.show_dialog()
except browsertab.WebTabError as e:
message.error(str(e))
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(self, navigation):
super()._on_navigation_request(navigation)
if navigation.url == QUrl('qute://print'):
self._on_print_requested()
navigation.accepted = False
if not navigation.accepted or not navigation.is_main_frame:
return
settings_needing_reload = {
'content.plugins',
'content.javascript.enabled',
'content.javascript.can_access_clipboard',
'content.print_element_backgrounds',
'input.spatial_navigation',
}
assert settings_needing_reload.issubset(configdata.DATA)
changed = self.settings.update_for_url(navigation.url)
reload_needed = bool(changed & settings_needing_reload)
# On Qt < 5.11, we don't don't need a reload when type == link_clicked.
# On Qt 5.11.0, we always need a reload.
# On Qt > 5.11.0, we never need a reload:
# https://codereview.qt-project.org/#/c/229525/1
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
if qtutils.version_check('5.11.1', compiled=False):
reload_needed = False
elif not qtutils.version_check('5.11.0', exact=True, compiled=False):
if navigation.navigation_type == navigation.Type.link_clicked:
reload_needed = False
if reload_needed:
self._reload_url = navigation.url
def _on_select_client_certificate(self, selection):
"""Handle client certificates.
Currently, we simply pick the first available certificate and show an
additional note if there are multiple matches.
"""
certificate = selection.certificates()[0]
text = ('<b>Subject:</b> {subj}<br/>'
'<b>Issuer:</b> {issuer}<br/>'
'<b>Serial:</b> {serial}'.format(
subj=html_utils.escape(certificate.subjectDisplayName()),
issuer=html_utils.escape(certificate.issuerDisplayName()),
serial=bytes(certificate.serialNumber()).decode('ascii')))
if len(selection.certificates()) > 1:
text += ('<br/><br/><b>Note:</b> Multiple matching certificates '
'were found, but certificate selection is not '
'implemented yet!')
urlstr = selection.host().host()
present = message.ask(
title='Present client certificate to {}?'.format(urlstr),
text=text,
mode=usertypes.PromptMode.yesno,
abort_on=[self.abort_questions],
url=urlstr)
if present:
selection.select(certificate)
else:
selection.selectNone()
def _connect_signals(self):
view = self._widget
page = view.page()
page.windowCloseRequested.connect(self.window_close_requested)
page.linkHovered.connect(self.link_hovered)
page.loadProgress.connect(self._on_load_progress)
page.loadStarted.connect(self._on_load_started)
page.certificate_error.connect(self._on_ssl_errors)
page.authenticationRequired.connect(self._on_authentication_required)
page.proxyAuthenticationRequired.connect(
self._on_proxy_authentication_required)
page.contentsSizeChanged.connect(self.contents_size_changed)
page.navigation_request.connect(self._on_navigation_request)
if qtutils.version_check('5.12'):
page.printRequested.connect(self._on_print_requested)
try:
# pylint: disable=unused-import
from PyQt5.QtWebEngineWidgets import ( # type: ignore
QWebEngineClientCertificateSelection)
except ImportError:
pass
else:
page.selectClientCertificate.connect(
self._on_select_client_certificate)
view.titleChanged.connect(self.title_changed)
view.urlChanged.connect(self._on_url_changed)
view.renderProcessTerminated.connect(
self._on_render_process_terminated)
view.iconChanged.connect(self.icon_changed)
page.loadFinished.connect(self._on_history_trigger)
page.loadFinished.connect(self._restore_zoom)
page.loadFinished.connect(self._on_load_finished)
self.before_load_started.connect(self._on_before_load_started)
self.shutting_down.connect(self.abort_questions) # type: ignore
self.load_started.connect(self.abort_questions) # type: ignore
# pylint: disable=protected-access
self.audio._connect_signals()
self._permissions.connect_signals()
self._scripts.connect_signals()
| ./CrossVul/dataset_final_sorted/CWE-684/py/bad_3920_1 |
crossvul-python_data_good_3922_2 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Wrapper over our (QtWebKit) WebView."""
import re
import functools
import xml.etree.ElementTree
from PyQt5.QtCore import pyqtSlot, Qt, QUrl, QPoint, QTimer, QSizeF, QSize
from PyQt5.QtGui import QIcon
from PyQt5.QtWebKitWidgets import QWebPage, QWebFrame
from PyQt5.QtWebKit import QWebSettings
from PyQt5.QtPrintSupport import QPrinter
from qutebrowser.browser import browsertab, shared
from qutebrowser.browser.webkit import (webview, tabhistory, webkitelem,
webkitsettings)
from qutebrowser.utils import qtutils, usertypes, utils, log, debug
from qutebrowser.qt import sip
class WebKitAction(browsertab.AbstractAction):
"""QtWebKit implementations related to web actions."""
action_class = QWebPage
action_base = QWebPage.WebAction
def exit_fullscreen(self):
raise browsertab.UnsupportedOperationError
def save_page(self):
"""Save the current page."""
raise browsertab.UnsupportedOperationError
def show_source(self, pygments=False):
self._show_source_pygments()
class WebKitPrinting(browsertab.AbstractPrinting):
"""QtWebKit implementations related to printing."""
def check_pdf_support(self):
pass
def check_printer_support(self):
pass
def check_preview_support(self):
pass
def to_pdf(self, filename):
printer = QPrinter()
printer.setOutputFileName(filename)
self.to_printer(printer)
def to_printer(self, printer, callback=None):
self._widget.print(printer)
# Can't find out whether there was an error...
if callback is not None:
callback(True)
class WebKitSearch(browsertab.AbstractSearch):
"""QtWebKit implementations related to searching on the page."""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._flags = QWebPage.FindFlags(0) # type: ignore
def _call_cb(self, callback, found, text, flags, caller):
"""Call the given callback if it's non-None.
Delays the call via a QTimer so the website is re-rendered in between.
Args:
callback: What to call
found: If the text was found
text: The text searched for
flags: The flags searched with
caller: Name of the caller.
"""
found_text = 'found' if found else "didn't find"
# Removing FindWrapsAroundDocument to get the same logging as with
# QtWebEngine
debug_flags = debug.qflags_key(
QWebPage, flags & ~QWebPage.FindWrapsAroundDocument,
klass=QWebPage.FindFlag)
if debug_flags != '0x0000':
flag_text = 'with flags {}'.format(debug_flags)
else:
flag_text = ''
log.webview.debug(' '.join([caller, found_text, text, flag_text])
.strip())
if callback is not None:
QTimer.singleShot(0, functools.partial(callback, found))
self.finished.emit(found)
def clear(self):
if self.search_displayed:
self.cleared.emit()
self.search_displayed = False
# We first clear the marked text, then the highlights
self._widget.findText('')
self._widget.findText('', QWebPage.HighlightAllOccurrences)
def search(self, text, *, ignore_case=usertypes.IgnoreCase.never,
reverse=False, result_cb=None):
# Don't go to next entry on duplicate search
if self.text == text and self.search_displayed:
log.webview.debug("Ignoring duplicate search request"
" for {}".format(text))
return
# Clear old search results, this is done automatically on QtWebEngine.
self.clear()
self.text = text
self.search_displayed = True
self._flags = QWebPage.FindWrapsAroundDocument
if self._is_case_sensitive(ignore_case):
self._flags |= QWebPage.FindCaseSensitively
if reverse:
self._flags |= QWebPage.FindBackward
# We actually search *twice* - once to highlight everything, then again
# to get a mark so we can navigate.
found = self._widget.findText(text, self._flags)
self._widget.findText(text,
self._flags | QWebPage.HighlightAllOccurrences)
self._call_cb(result_cb, found, text, self._flags, 'search')
def next_result(self, *, result_cb=None):
self.search_displayed = True
found = self._widget.findText(self.text, self._flags)
self._call_cb(result_cb, found, self.text, self._flags, 'next_result')
def prev_result(self, *, result_cb=None):
self.search_displayed = True
# The int() here makes sure we get a copy of the flags.
flags = QWebPage.FindFlags(int(self._flags)) # type: ignore
if flags & QWebPage.FindBackward:
flags &= ~QWebPage.FindBackward
else:
flags |= QWebPage.FindBackward
found = self._widget.findText(self.text, flags)
self._call_cb(result_cb, found, self.text, flags, 'prev_result')
class WebKitCaret(browsertab.AbstractCaret):
"""QtWebKit implementations related to moving the cursor/selection."""
@pyqtSlot(usertypes.KeyMode)
def _on_mode_entered(self, mode):
if mode != usertypes.KeyMode.caret:
return
self.selection_enabled = self._widget.hasSelection()
self.selection_toggled.emit(self.selection_enabled)
settings = self._widget.settings()
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, True)
if self._widget.isVisible():
# Sometimes the caret isn't immediately visible, but unfocusing
# and refocusing it fixes that.
self._widget.clearFocus()
self._widget.setFocus(Qt.OtherFocusReason)
# Move the caret to the first element in the viewport if there
# isn't any text which is already selected.
#
# Note: We can't use hasSelection() here, as that's always
# true in caret mode.
if not self.selection_enabled:
self._widget.page().currentFrame().evaluateJavaScript(
utils.read_file('javascript/position_caret.js'))
@pyqtSlot(usertypes.KeyMode)
def _on_mode_left(self, _mode):
settings = self._widget.settings()
if settings.testAttribute(QWebSettings.CaretBrowsingEnabled):
if self.selection_enabled and self._widget.hasSelection():
# Remove selection if it exists
self._widget.triggerPageAction(QWebPage.MoveToNextChar)
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, False)
self.selection_enabled = False
def move_to_next_line(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToNextLine
else:
act = QWebPage.SelectNextLine
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_prev_line(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousLine
else:
act = QWebPage.SelectPreviousLine
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_next_char(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToNextChar
else:
act = QWebPage.SelectNextChar
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_prev_char(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousChar
else:
act = QWebPage.SelectPreviousChar
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_end_of_word(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextWord]
if utils.is_windows: # pragma: no cover
act.append(QWebPage.MoveToPreviousChar)
else:
act = [QWebPage.SelectNextWord]
if utils.is_windows: # pragma: no cover
act.append(QWebPage.SelectPreviousChar)
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_next_word(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextWord]
if not utils.is_windows: # pragma: no branch
act.append(QWebPage.MoveToNextChar)
else:
act = [QWebPage.SelectNextWord]
if not utils.is_windows: # pragma: no branch
act.append(QWebPage.SelectNextChar)
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_prev_word(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousWord
else:
act = QWebPage.SelectPreviousWord
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_start_of_line(self):
if not self.selection_enabled:
act = QWebPage.MoveToStartOfLine
else:
act = QWebPage.SelectStartOfLine
self._widget.triggerPageAction(act)
def move_to_end_of_line(self):
if not self.selection_enabled:
act = QWebPage.MoveToEndOfLine
else:
act = QWebPage.SelectEndOfLine
self._widget.triggerPageAction(act)
def move_to_start_of_next_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextLine,
QWebPage.MoveToStartOfBlock]
else:
act = [QWebPage.SelectNextLine,
QWebPage.SelectStartOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_start_of_prev_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToPreviousLine,
QWebPage.MoveToStartOfBlock]
else:
act = [QWebPage.SelectPreviousLine,
QWebPage.SelectStartOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_end_of_next_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextLine,
QWebPage.MoveToEndOfBlock]
else:
act = [QWebPage.SelectNextLine,
QWebPage.SelectEndOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_end_of_prev_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToPreviousLine, QWebPage.MoveToEndOfBlock]
else:
act = [QWebPage.SelectPreviousLine, QWebPage.SelectEndOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_start_of_document(self):
if not self.selection_enabled:
act = QWebPage.MoveToStartOfDocument
else:
act = QWebPage.SelectStartOfDocument
self._widget.triggerPageAction(act)
def move_to_end_of_document(self):
if not self.selection_enabled:
act = QWebPage.MoveToEndOfDocument
else:
act = QWebPage.SelectEndOfDocument
self._widget.triggerPageAction(act)
def toggle_selection(self):
self.selection_enabled = not self.selection_enabled
self.selection_toggled.emit(self.selection_enabled)
def drop_selection(self):
self._widget.triggerPageAction(QWebPage.MoveToNextChar)
def selection(self, callback):
callback(self._widget.selectedText())
def reverse_selection(self):
self._tab.run_js_async("""{
const sel = window.getSelection();
sel.setBaseAndExtent(
sel.extentNode, sel.extentOffset, sel.baseNode,
sel.baseOffset
);
}""")
def _follow_selected(self, *, tab=False):
if QWebSettings.globalSettings().testAttribute(
QWebSettings.JavascriptEnabled):
if tab:
self._tab.data.override_target = usertypes.ClickTarget.tab
self._tab.run_js_async("""
const aElm = document.activeElement;
if (window.getSelection().anchorNode) {
window.getSelection().anchorNode.parentNode.click();
} else if (aElm && aElm !== document.body) {
aElm.click();
}
""")
else:
selection = self._widget.selectedHtml()
if not selection:
# Getting here may mean we crashed, but we can't do anything
# about that until this commit is released:
# https://github.com/annulen/webkit/commit/0e75f3272d149bc64899c161f150eb341a2417af
# TODO find a way to check if something is focused
self._follow_enter(tab)
return
try:
selected_element = xml.etree.ElementTree.fromstring(
'<html>{}</html>'.format(selection)).find('a')
except xml.etree.ElementTree.ParseError:
raise browsertab.WebTabError('Could not parse selected '
'element!')
if selected_element is not None:
try:
url = selected_element.attrib['href']
except KeyError:
raise browsertab.WebTabError('Anchor element without '
'href!')
url = self._tab.url().resolved(QUrl(url))
if tab:
self._tab.new_tab_requested.emit(url)
else:
self._tab.load_url(url)
def follow_selected(self, *, tab=False):
try:
self._follow_selected(tab=tab)
finally:
self.follow_selected_done.emit()
class WebKitZoom(browsertab.AbstractZoom):
"""QtWebKit implementations related to zooming."""
def _set_factor_internal(self, factor):
self._widget.setZoomFactor(factor)
class WebKitScroller(browsertab.AbstractScroller):
"""QtWebKit implementations related to scrolling."""
# FIXME:qtwebengine When to use the main frame, when the current one?
def pos_px(self):
return self._widget.page().mainFrame().scrollPosition()
def pos_perc(self):
return self._widget.scroll_pos
def to_point(self, point):
self._widget.page().mainFrame().setScrollPosition(point)
def to_anchor(self, name):
self._widget.page().mainFrame().scrollToAnchor(name)
def delta(self, x: int = 0, y: int = 0) -> None:
qtutils.check_overflow(x, 'int')
qtutils.check_overflow(y, 'int')
self._widget.page().mainFrame().scroll(x, y)
def delta_page(self, x: float = 0.0, y: float = 0.0) -> None:
if y.is_integer():
y = int(y)
if y == 0:
pass
elif y < 0:
self.page_up(count=-y)
elif y > 0:
self.page_down(count=y)
y = 0
if x == 0 and y == 0:
return
size = self._widget.page().mainFrame().geometry()
self.delta(int(x * size.width()), int(y * size.height()))
def to_perc(self, x=None, y=None):
if x is None and y == 0:
self.top()
elif x is None and y == 100:
self.bottom()
else:
for val, orientation in [(x, Qt.Horizontal), (y, Qt.Vertical)]:
if val is not None:
frame = self._widget.page().mainFrame()
maximum = frame.scrollBarMaximum(orientation)
if maximum == 0:
continue
pos = int(maximum * val / 100)
pos = qtutils.check_overflow(pos, 'int', fatal=False)
frame.setScrollBarValue(orientation, pos)
def _key_press(self, key, count=1, getter_name=None, direction=None):
frame = self._widget.page().mainFrame()
getter = None if getter_name is None else getattr(frame, getter_name)
# FIXME:qtwebengine needed?
# self._widget.setFocus()
for _ in range(min(count, 5000)):
# Abort scrolling if the minimum/maximum was reached.
if (getter is not None and
frame.scrollBarValue(direction) == getter(direction)):
return
self._tab.fake_key_press(key)
def up(self, count=1):
self._key_press(Qt.Key_Up, count, 'scrollBarMinimum', Qt.Vertical)
def down(self, count=1):
self._key_press(Qt.Key_Down, count, 'scrollBarMaximum', Qt.Vertical)
def left(self, count=1):
self._key_press(Qt.Key_Left, count, 'scrollBarMinimum', Qt.Horizontal)
def right(self, count=1):
self._key_press(Qt.Key_Right, count, 'scrollBarMaximum', Qt.Horizontal)
def top(self):
self._key_press(Qt.Key_Home)
def bottom(self):
self._key_press(Qt.Key_End)
def page_up(self, count=1):
self._key_press(Qt.Key_PageUp, count, 'scrollBarMinimum', Qt.Vertical)
def page_down(self, count=1):
self._key_press(Qt.Key_PageDown, count, 'scrollBarMaximum',
Qt.Vertical)
def at_top(self):
return self.pos_px().y() == 0
def at_bottom(self):
frame = self._widget.page().currentFrame()
return self.pos_px().y() >= frame.scrollBarMaximum(Qt.Vertical)
class WebKitHistoryPrivate(browsertab.AbstractHistoryPrivate):
"""History-related methods which are not part of the extension API."""
def serialize(self):
return qtutils.serialize(self._history)
def deserialize(self, data):
qtutils.deserialize(data, self._history)
def load_items(self, items):
if items:
self._tab.before_load_started.emit(items[-1].url)
stream, _data, user_data = tabhistory.serialize(items)
qtutils.deserialize_stream(stream, self._history)
for i, data in enumerate(user_data):
self._history.itemAt(i).setUserData(data)
cur_data = self._history.currentItem().userData()
if cur_data is not None:
if 'zoom' in cur_data:
self._tab.zoom.set_factor(cur_data['zoom'])
if ('scroll-pos' in cur_data and
self._tab.scroller.pos_px() == QPoint(0, 0)):
QTimer.singleShot(0, functools.partial(
self._tab.scroller.to_point, cur_data['scroll-pos']))
class WebKitHistory(browsertab.AbstractHistory):
"""QtWebKit implementations related to page history."""
def __init__(self, tab):
super().__init__(tab)
self.private_api = WebKitHistoryPrivate(tab)
def __len__(self):
return len(self._history)
def __iter__(self):
return iter(self._history.items())
def current_idx(self):
return self._history.currentItemIndex()
def can_go_back(self):
return self._history.canGoBack()
def can_go_forward(self):
return self._history.canGoForward()
def _item_at(self, i):
return self._history.itemAt(i)
def _go_to_item(self, item):
self._tab.before_load_started.emit(item.url())
self._history.goToItem(item)
class WebKitElements(browsertab.AbstractElements):
"""QtWebKit implemementations related to elements on the page."""
def find_css(self, selector, callback, error_cb, *, only_visible=False):
utils.unused(error_cb)
mainframe = self._widget.page().mainFrame()
if mainframe is None:
raise browsertab.WebTabError("No frame focused!")
elems = []
frames = webkitelem.get_child_frames(mainframe)
for f in frames:
for elem in f.findAllElements(selector):
elems.append(webkitelem.WebKitElement(elem, tab=self._tab))
if only_visible:
# pylint: disable=protected-access
elems = [e for e in elems if e._is_visible(mainframe)]
# pylint: enable=protected-access
callback(elems)
def find_id(self, elem_id, callback):
def find_id_cb(elems):
"""Call the real callback with the found elements."""
if not elems:
callback(None)
else:
callback(elems[0])
# Escape non-alphanumeric characters in the selector
# https://www.w3.org/TR/CSS2/syndata.html#value-def-identifier
elem_id = re.sub(r'[^a-zA-Z0-9_-]', r'\\\g<0>', elem_id)
self.find_css('#' + elem_id, find_id_cb, error_cb=lambda exc: None)
def find_focused(self, callback):
frame = self._widget.page().currentFrame()
if frame is None:
callback(None)
return
elem = frame.findFirstElement('*:focus')
if elem.isNull():
callback(None)
else:
callback(webkitelem.WebKitElement(elem, tab=self._tab))
def find_at_pos(self, pos, callback):
assert pos.x() >= 0
assert pos.y() >= 0
frame = self._widget.page().frameAt(pos)
if frame is None:
# This happens when we click inside the webview, but not actually
# on the QWebPage - for example when clicking the scrollbar
# sometimes.
log.webview.debug("Hit test at {} but frame is None!".format(pos))
callback(None)
return
# You'd think we have to subtract frame.geometry().topLeft() from the
# position, but it seems QWebFrame::hitTestContent wants a position
# relative to the QWebView, not to the frame. This makes no sense to
# me, but it works this way.
hitresult = frame.hitTestContent(pos)
if hitresult.isNull():
# For some reason, the whole hit result can be null sometimes (e.g.
# on doodle menu links).
log.webview.debug("Hit test result is null!")
callback(None)
return
try:
elem = webkitelem.WebKitElement(hitresult.element(), tab=self._tab)
except webkitelem.IsNullError:
# For some reason, the hit result element can be a null element
# sometimes (e.g. when clicking the timetable fields on
# http://www.sbb.ch/ ).
log.webview.debug("Hit test result element is null!")
callback(None)
return
callback(elem)
class WebKitAudio(browsertab.AbstractAudio):
"""Dummy handling of audio status for QtWebKit."""
def set_muted(self, muted: bool, override: bool = False) -> None:
raise browsertab.WebTabError('Muting is not supported on QtWebKit!')
def is_muted(self):
return False
def is_recently_audible(self):
return False
class WebKitTabPrivate(browsertab.AbstractTabPrivate):
"""QtWebKit-related methods which aren't part of the public API."""
def networkaccessmanager(self):
return self._widget.page().networkAccessManager()
def clear_ssl_errors(self):
self.networkaccessmanager().clear_all_ssl_errors()
def event_target(self):
return self._widget
def shutdown(self):
self._widget.shutdown()
class WebKitTab(browsertab.AbstractTab):
"""A QtWebKit tab in the browser."""
def __init__(self, *, win_id, mode_manager, private, parent=None):
super().__init__(win_id=win_id, private=private, parent=parent)
widget = webview.WebView(win_id=win_id, tab_id=self.tab_id,
private=private, tab=self)
if private:
self._make_private(widget)
self.history = WebKitHistory(tab=self)
self.scroller = WebKitScroller(tab=self, parent=self)
self.caret = WebKitCaret(mode_manager=mode_manager,
tab=self, parent=self)
self.zoom = WebKitZoom(tab=self, parent=self)
self.search = WebKitSearch(tab=self, parent=self)
self.printing = WebKitPrinting(tab=self)
self.elements = WebKitElements(tab=self)
self.action = WebKitAction(tab=self)
self.audio = WebKitAudio(tab=self, parent=self)
self.private_api = WebKitTabPrivate(mode_manager=mode_manager,
tab=self)
# We're assigning settings in _set_widget
self.settings = webkitsettings.WebKitSettings(settings=None)
self._set_widget(widget)
self._connect_signals()
self.backend = usertypes.Backend.QtWebKit
def _install_event_filter(self):
self._widget.installEventFilter(self._tab_event_filter)
def _make_private(self, widget):
settings = widget.settings()
settings.setAttribute(QWebSettings.PrivateBrowsingEnabled, True)
def load_url(self, url, *, emit_before_load_started=True):
self._load_url_prepare(
url, emit_before_load_started=emit_before_load_started)
self._widget.load(url)
def url(self, *, requested=False):
frame = self._widget.page().mainFrame()
if requested:
return frame.requestedUrl()
else:
return frame.url()
def dump_async(self, callback, *, plain=False):
frame = self._widget.page().mainFrame()
if plain:
callback(frame.toPlainText())
else:
callback(frame.toHtml())
def run_js_async(self, code, callback=None, *, world=None):
if world is not None and world != usertypes.JsWorld.jseval:
log.webview.warning("Ignoring world ID {}".format(world))
document_element = self._widget.page().mainFrame().documentElement()
result = document_element.evaluateJavaScript(code)
if callback is not None:
callback(result)
def icon(self):
return self._widget.icon()
def reload(self, *, force=False):
if force:
action = QWebPage.ReloadAndBypassCache
else:
action = QWebPage.Reload
self._widget.triggerPageAction(action)
def stop(self):
self._widget.stop()
def title(self):
return self._widget.title()
@pyqtSlot()
def _on_history_trigger(self):
url = self.url()
requested_url = self.url(requested=True)
self.history_item_triggered.emit(url, requested_url, self.title())
def set_html(self, html, base_url=QUrl()):
self._widget.setHtml(html, base_url)
@pyqtSlot()
def _on_load_started(self):
super()._on_load_started()
nam = self._widget.page().networkAccessManager()
nam.netrc_used = False
# Make sure the icon is cleared when navigating to a page without one.
self.icon_changed.emit(QIcon())
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
super()._on_load_finished(ok)
self._update_load_status(ok)
@pyqtSlot()
def _on_frame_load_finished(self):
"""Make sure we emit an appropriate status when loading finished.
While Qt has a bool "ok" attribute for loadFinished, it always is True
when using error pages... See
https://github.com/qutebrowser/qutebrowser/issues/84
"""
self._on_load_finished(not self._widget.page().error_occurred)
@pyqtSlot()
def _on_webkit_icon_changed(self):
"""Emit iconChanged with a QIcon like QWebEngineView does."""
if sip.isdeleted(self._widget):
log.webview.debug("Got _on_webkit_icon_changed for deleted view!")
return
self.icon_changed.emit(self._widget.icon())
@pyqtSlot(QWebFrame)
def _on_frame_created(self, frame):
"""Connect the contentsSizeChanged signal of each frame."""
# FIXME:qtwebengine those could theoretically regress:
# https://github.com/qutebrowser/qutebrowser/issues/152
# https://github.com/qutebrowser/qutebrowser/issues/263
frame.contentsSizeChanged.connect(self._on_contents_size_changed)
@pyqtSlot(QSize)
def _on_contents_size_changed(self, size):
self.contents_size_changed.emit(QSizeF(size))
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(self, navigation):
super()._on_navigation_request(navigation)
if not navigation.accepted:
return
log.webview.debug("target {} override {}".format(
self.data.open_target, self.data.override_target))
if self.data.override_target is not None:
target = self.data.override_target
self.data.override_target = None
else:
target = self.data.open_target
if (navigation.navigation_type == navigation.Type.link_clicked and
target != usertypes.ClickTarget.normal):
tab = shared.get_tab(self.win_id, target)
tab.load_url(navigation.url)
self.data.open_target = usertypes.ClickTarget.normal
navigation.accepted = False
if navigation.is_main_frame:
self.settings.update_for_url(navigation.url)
@pyqtSlot('QNetworkReply*')
def _on_ssl_errors(self, reply):
self._insecure_hosts.add(reply.url().host())
def _connect_signals(self):
view = self._widget
page = view.page()
frame = page.mainFrame()
page.windowCloseRequested.connect(self.window_close_requested)
page.linkHovered.connect(self.link_hovered)
page.loadProgress.connect(self._on_load_progress)
frame.loadStarted.connect(self._on_load_started)
view.scroll_pos_changed.connect(self.scroller.perc_changed)
view.titleChanged.connect(self.title_changed)
view.urlChanged.connect(self._on_url_changed)
view.shutting_down.connect(self.shutting_down)
page.networkAccessManager().sslErrors.connect(self._on_ssl_errors)
frame.loadFinished.connect(self._on_frame_load_finished)
view.iconChanged.connect(self._on_webkit_icon_changed)
page.frameCreated.connect(self._on_frame_created)
frame.contentsSizeChanged.connect(self._on_contents_size_changed)
frame.initialLayoutCompleted.connect(self._on_history_trigger)
page.navigation_request.connect(self._on_navigation_request)
| ./CrossVul/dataset_final_sorted/CWE-684/py/good_3922_2 |
crossvul-python_data_good_3920_1 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Wrapper over a QWebEngineView."""
import math
import functools
import re
import html as html_utils
import typing
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, Qt, QPoint, QPointF, QUrl,
QTimer, QObject)
from PyQt5.QtNetwork import QAuthenticator
from PyQt5.QtWidgets import QApplication, QWidget
from PyQt5.QtWebEngineWidgets import QWebEnginePage, QWebEngineScript
from qutebrowser.config import configdata, config
from qutebrowser.browser import (browsertab, eventfilter, shared, webelem,
history, greasemonkey)
from qutebrowser.browser.webengine import (webview, webengineelem, tabhistory,
interceptor, webenginequtescheme,
cookies, webenginedownloads,
webenginesettings, certificateerror)
from qutebrowser.misc import miscwidgets, objects
from qutebrowser.utils import (usertypes, qtutils, log, javascript, utils,
message, objreg, jinja, debug)
from qutebrowser.qt import sip
_qute_scheme_handler = None
def init():
"""Initialize QtWebEngine-specific modules."""
# For some reason we need to keep a reference, otherwise the scheme handler
# won't work...
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-September/038075.html
global _qute_scheme_handler
app = QApplication.instance()
log.init.debug("Initializing qute://* handler...")
_qute_scheme_handler = webenginequtescheme.QuteSchemeHandler(parent=app)
_qute_scheme_handler.install(webenginesettings.default_profile)
if webenginesettings.private_profile:
_qute_scheme_handler.install(webenginesettings.private_profile)
log.init.debug("Initializing request interceptor...")
req_interceptor = interceptor.RequestInterceptor(parent=app)
req_interceptor.install(webenginesettings.default_profile)
if webenginesettings.private_profile:
req_interceptor.install(webenginesettings.private_profile)
log.init.debug("Initializing QtWebEngine downloads...")
download_manager = webenginedownloads.DownloadManager(parent=app)
download_manager.install(webenginesettings.default_profile)
if webenginesettings.private_profile:
download_manager.install(webenginesettings.private_profile)
objreg.register('webengine-download-manager', download_manager)
log.init.debug("Initializing cookie filter...")
cookies.install_filter(webenginesettings.default_profile)
if webenginesettings.private_profile:
cookies.install_filter(webenginesettings.private_profile)
# Clear visited links on web history clear
for p in [webenginesettings.default_profile,
webenginesettings.private_profile]:
if not p:
continue
history.web_history.history_cleared.connect(p.clearAllVisitedLinks)
history.web_history.url_cleared.connect(
lambda url, profile=p: profile.clearVisitedLinks([url]))
# Mapping worlds from usertypes.JsWorld to QWebEngineScript world IDs.
_JS_WORLD_MAP = {
usertypes.JsWorld.main: QWebEngineScript.MainWorld,
usertypes.JsWorld.application: QWebEngineScript.ApplicationWorld,
usertypes.JsWorld.user: QWebEngineScript.UserWorld,
usertypes.JsWorld.jseval: QWebEngineScript.UserWorld + 1,
}
class WebEngineAction(browsertab.AbstractAction):
"""QtWebEngine implementations related to web actions."""
action_class = QWebEnginePage
action_base = QWebEnginePage.WebAction
def exit_fullscreen(self):
self._widget.triggerPageAction(QWebEnginePage.ExitFullScreen)
def save_page(self):
"""Save the current page."""
self._widget.triggerPageAction(QWebEnginePage.SavePage)
def show_source(self, pygments=False):
if pygments:
self._show_source_pygments()
return
try:
self._widget.triggerPageAction(QWebEnginePage.ViewSource)
except AttributeError:
# Qt < 5.8
tb = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
urlstr = self._tab.url().toString(
QUrl.RemoveUserInfo) # type: ignore
# The original URL becomes the path of a view-source: URL
# (without a host), but query/fragment should stay.
url = QUrl('view-source:' + urlstr)
tb.tabopen(url, background=False, related=True)
class WebEnginePrinting(browsertab.AbstractPrinting):
"""QtWebEngine implementations related to printing."""
def check_pdf_support(self):
pass
def check_printer_support(self):
if not hasattr(self._widget.page(), 'print'):
raise browsertab.WebTabError(
"Printing is unsupported with QtWebEngine on Qt < 5.8")
def check_preview_support(self):
raise browsertab.WebTabError(
"Print previews are unsupported with QtWebEngine")
def to_pdf(self, filename):
self._widget.page().printToPdf(filename)
def to_printer(self, printer, callback=None):
if callback is None:
callback = lambda _ok: None
self._widget.page().print(printer, callback)
class WebEngineSearch(browsertab.AbstractSearch):
"""QtWebEngine implementations related to searching on the page.
Attributes:
_flags: The QWebEnginePage.FindFlags of the last search.
_pending_searches: How many searches have been started but not called
back yet.
"""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._flags = QWebEnginePage.FindFlags(0) # type: ignore
self._pending_searches = 0
def _find(self, text, flags, callback, caller):
"""Call findText on the widget."""
self.search_displayed = True
self._pending_searches += 1
def wrapped_callback(found):
"""Wrap the callback to do debug logging."""
self._pending_searches -= 1
if self._pending_searches > 0:
# See https://github.com/qutebrowser/qutebrowser/issues/2442
# and https://github.com/qt/qtwebengine/blob/5.10/src/core/web_contents_adapter.cpp#L924-L934
log.webview.debug("Ignoring cancelled search callback with "
"{} pending searches".format(
self._pending_searches))
return
if sip.isdeleted(self._widget):
# This happens when starting a search, and closing the tab
# before results arrive.
log.webview.debug("Ignoring finished search for deleted "
"widget")
return
found_text = 'found' if found else "didn't find"
if flags:
flag_text = 'with flags {}'.format(debug.qflags_key(
QWebEnginePage, flags, klass=QWebEnginePage.FindFlag))
else:
flag_text = ''
log.webview.debug(' '.join([caller, found_text, text, flag_text])
.strip())
if callback is not None:
callback(found)
self.finished.emit(found)
self._widget.findText(text, flags, wrapped_callback)
def search(self, text, *, ignore_case=usertypes.IgnoreCase.never,
reverse=False, result_cb=None):
# Don't go to next entry on duplicate search
if self.text == text and self.search_displayed:
log.webview.debug("Ignoring duplicate search request"
" for {}".format(text))
return
self.text = text
self._flags = QWebEnginePage.FindFlags(0) # type: ignore
if self._is_case_sensitive(ignore_case):
self._flags |= QWebEnginePage.FindCaseSensitively
if reverse:
self._flags |= QWebEnginePage.FindBackward
self._find(text, self._flags, result_cb, 'search')
def clear(self):
if self.search_displayed:
self.cleared.emit()
self.search_displayed = False
self._widget.findText('')
def prev_result(self, *, result_cb=None):
# The int() here makes sure we get a copy of the flags.
flags = QWebEnginePage.FindFlags(int(self._flags)) # type: ignore
if flags & QWebEnginePage.FindBackward:
flags &= ~QWebEnginePage.FindBackward
else:
flags |= QWebEnginePage.FindBackward
self._find(self.text, flags, result_cb, 'prev_result')
def next_result(self, *, result_cb=None):
self._find(self.text, self._flags, result_cb, 'next_result')
class WebEngineCaret(browsertab.AbstractCaret):
"""QtWebEngine implementations related to moving the cursor/selection."""
def _flags(self):
"""Get flags to pass to JS."""
flags = set()
if qtutils.version_check('5.7.1', compiled=False):
flags.add('filter-prefix')
if utils.is_windows:
flags.add('windows')
return list(flags)
@pyqtSlot(usertypes.KeyMode)
def _on_mode_entered(self, mode):
if mode != usertypes.KeyMode.caret:
return
if self._tab.search.search_displayed:
# We are currently in search mode.
# convert the search to a blue selection so we can operate on it
# https://bugreports.qt.io/browse/QTBUG-60673
self._tab.search.clear()
self._tab.run_js_async(
javascript.assemble('caret', 'setFlags', self._flags()))
self._js_call('setInitialCursor', callback=self._selection_cb)
def _selection_cb(self, enabled):
"""Emit selection_toggled based on setInitialCursor."""
if self._mode_manager.mode != usertypes.KeyMode.caret:
log.webview.debug("Ignoring selection cb due to mode change.")
return
if enabled is None:
log.webview.debug("Ignoring selection status None")
return
self.selection_toggled.emit(enabled)
@pyqtSlot(usertypes.KeyMode)
def _on_mode_left(self, mode):
if mode != usertypes.KeyMode.caret:
return
self.drop_selection()
self._js_call('disableCaret')
def move_to_next_line(self, count=1):
self._js_call('moveDown', count)
def move_to_prev_line(self, count=1):
self._js_call('moveUp', count)
def move_to_next_char(self, count=1):
self._js_call('moveRight', count)
def move_to_prev_char(self, count=1):
self._js_call('moveLeft', count)
def move_to_end_of_word(self, count=1):
self._js_call('moveToEndOfWord', count)
def move_to_next_word(self, count=1):
self._js_call('moveToNextWord', count)
def move_to_prev_word(self, count=1):
self._js_call('moveToPreviousWord', count)
def move_to_start_of_line(self):
self._js_call('moveToStartOfLine')
def move_to_end_of_line(self):
self._js_call('moveToEndOfLine')
def move_to_start_of_next_block(self, count=1):
self._js_call('moveToStartOfNextBlock', count)
def move_to_start_of_prev_block(self, count=1):
self._js_call('moveToStartOfPrevBlock', count)
def move_to_end_of_next_block(self, count=1):
self._js_call('moveToEndOfNextBlock', count)
def move_to_end_of_prev_block(self, count=1):
self._js_call('moveToEndOfPrevBlock', count)
def move_to_start_of_document(self):
self._js_call('moveToStartOfDocument')
def move_to_end_of_document(self):
self._js_call('moveToEndOfDocument')
def toggle_selection(self):
self._js_call('toggleSelection', callback=self.selection_toggled.emit)
def drop_selection(self):
self._js_call('dropSelection')
def selection(self, callback):
# Not using selectedText() as WORKAROUND for
# https://bugreports.qt.io/browse/QTBUG-53134
# Even on Qt 5.10 selectedText() seems to work poorly, see
# https://github.com/qutebrowser/qutebrowser/issues/3523
self._tab.run_js_async(javascript.assemble('caret', 'getSelection'),
callback)
def reverse_selection(self):
self._js_call('reverseSelection')
def _follow_selected_cb_wrapped(self, js_elem, tab):
try:
self._follow_selected_cb(js_elem, tab)
finally:
self.follow_selected_done.emit()
def _follow_selected_cb(self, js_elem, tab):
"""Callback for javascript which clicks the selected element.
Args:
js_elem: The element serialized from javascript.
tab: Open in a new tab.
"""
if js_elem is None:
return
if js_elem == "focused":
# we had a focused element, not a selected one. Just send <enter>
self._follow_enter(tab)
return
assert isinstance(js_elem, dict), js_elem
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
if tab:
click_type = usertypes.ClickTarget.tab
else:
click_type = usertypes.ClickTarget.normal
# Only click if we see a link
if elem.is_link():
log.webview.debug("Found link in selection, clicking. ClickTarget "
"{}, elem {}".format(click_type, elem))
try:
elem.click(click_type)
except webelem.Error as e:
message.error(str(e))
def follow_selected(self, *, tab=False):
if self._tab.search.search_displayed:
# We are currently in search mode.
# let's click the link via a fake-click
# https://bugreports.qt.io/browse/QTBUG-60673
self._tab.search.clear()
log.webview.debug("Clicking a searched link via fake key press.")
# send a fake enter, clicking the orange selection box
self._follow_enter(tab)
else:
# click an existing blue selection
js_code = javascript.assemble('webelem',
'find_selected_focused_link')
self._tab.run_js_async(
js_code,
lambda jsret: self._follow_selected_cb_wrapped(jsret, tab))
def _js_call(self, command, *args, callback=None):
code = javascript.assemble('caret', command, *args)
self._tab.run_js_async(code, callback)
class WebEngineScroller(browsertab.AbstractScroller):
"""QtWebEngine implementations related to scrolling."""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._pos_perc = (0, 0)
self._pos_px = QPoint()
self._at_bottom = False
def _init_widget(self, widget):
super()._init_widget(widget)
page = widget.page()
page.scrollPositionChanged.connect(self._update_pos)
def _repeated_key_press(self, key, count=1, modifier=Qt.NoModifier):
"""Send count fake key presses to this scroller's WebEngineTab."""
for _ in range(min(count, 1000)):
self._tab.fake_key_press(key, modifier)
@pyqtSlot(QPointF)
def _update_pos(self, pos):
"""Update the scroll position attributes when it changed."""
self._pos_px = pos.toPoint()
contents_size = self._widget.page().contentsSize()
scrollable_x = contents_size.width() - self._widget.width()
if scrollable_x == 0:
perc_x = 0
else:
try:
perc_x = min(100, round(100 / scrollable_x * pos.x()))
except ValueError:
# https://github.com/qutebrowser/qutebrowser/issues/3219
log.misc.debug("Got ValueError for perc_x!")
log.misc.debug("contents_size.width(): {}".format(
contents_size.width()))
log.misc.debug("self._widget.width(): {}".format(
self._widget.width()))
log.misc.debug("scrollable_x: {}".format(scrollable_x))
log.misc.debug("pos.x(): {}".format(pos.x()))
raise
scrollable_y = contents_size.height() - self._widget.height()
if scrollable_y == 0:
perc_y = 0
else:
try:
perc_y = min(100, round(100 / scrollable_y * pos.y()))
except ValueError:
# https://github.com/qutebrowser/qutebrowser/issues/3219
log.misc.debug("Got ValueError for perc_y!")
log.misc.debug("contents_size.height(): {}".format(
contents_size.height()))
log.misc.debug("self._widget.height(): {}".format(
self._widget.height()))
log.misc.debug("scrollable_y: {}".format(scrollable_y))
log.misc.debug("pos.y(): {}".format(pos.y()))
raise
self._at_bottom = math.ceil(pos.y()) >= scrollable_y
if (self._pos_perc != (perc_x, perc_y) or
'no-scroll-filtering' in objects.debug_flags):
self._pos_perc = perc_x, perc_y
self.perc_changed.emit(*self._pos_perc)
def pos_px(self):
return self._pos_px
def pos_perc(self):
return self._pos_perc
def to_perc(self, x=None, y=None):
js_code = javascript.assemble('scroll', 'to_perc', x, y)
self._tab.run_js_async(js_code)
def to_point(self, point):
js_code = javascript.assemble('window', 'scroll', point.x(), point.y())
self._tab.run_js_async(js_code)
def to_anchor(self, name):
url = self._tab.url()
url.setFragment(name)
self._tab.load_url(url)
def delta(self, x=0, y=0):
self._tab.run_js_async(javascript.assemble('window', 'scrollBy', x, y))
def delta_page(self, x=0, y=0):
js_code = javascript.assemble('scroll', 'delta_page', x, y)
self._tab.run_js_async(js_code)
def up(self, count=1):
self._repeated_key_press(Qt.Key_Up, count)
def down(self, count=1):
self._repeated_key_press(Qt.Key_Down, count)
def left(self, count=1):
self._repeated_key_press(Qt.Key_Left, count)
def right(self, count=1):
self._repeated_key_press(Qt.Key_Right, count)
def top(self):
self._tab.fake_key_press(Qt.Key_Home)
def bottom(self):
self._tab.fake_key_press(Qt.Key_End)
def page_up(self, count=1):
self._repeated_key_press(Qt.Key_PageUp, count)
def page_down(self, count=1):
self._repeated_key_press(Qt.Key_PageDown, count)
def at_top(self):
return self.pos_px().y() == 0
def at_bottom(self):
return self._at_bottom
class WebEngineHistoryPrivate(browsertab.AbstractHistoryPrivate):
"""History-related methods which are not part of the extension API."""
def serialize(self):
if not qtutils.version_check('5.9', compiled=False):
# WORKAROUND for
# https://github.com/qutebrowser/qutebrowser/issues/2289
# Don't use the history's currentItem here, because of
# https://bugreports.qt.io/browse/QTBUG-59599 and because it doesn't
# contain view-source.
scheme = self._tab.url().scheme()
if scheme in ['view-source', 'chrome']:
raise browsertab.WebTabError("Can't serialize special URL!")
return qtutils.serialize(self._history)
def deserialize(self, data):
qtutils.deserialize(data, self._history)
def load_items(self, items):
if items:
self._tab.before_load_started.emit(items[-1].url)
stream, _data, cur_data = tabhistory.serialize(items)
qtutils.deserialize_stream(stream, self._history)
@pyqtSlot()
def _on_load_finished():
self._tab.scroller.to_point(cur_data['scroll-pos'])
self._tab.load_finished.disconnect(_on_load_finished)
if cur_data is not None:
if 'zoom' in cur_data:
self._tab.zoom.set_factor(cur_data['zoom'])
if ('scroll-pos' in cur_data and
self._tab.scroller.pos_px() == QPoint(0, 0)):
self._tab.load_finished.connect(_on_load_finished)
class WebEngineHistory(browsertab.AbstractHistory):
"""QtWebEngine implementations related to page history."""
def __init__(self, tab):
super().__init__(tab)
self.private_api = WebEngineHistoryPrivate(tab)
def __len__(self):
return len(self._history)
def __iter__(self):
return iter(self._history.items())
def current_idx(self):
return self._history.currentItemIndex()
def can_go_back(self):
return self._history.canGoBack()
def can_go_forward(self):
return self._history.canGoForward()
def _item_at(self, i):
return self._history.itemAt(i)
def _go_to_item(self, item):
self._tab.before_load_started.emit(item.url())
self._history.goToItem(item)
class WebEngineZoom(browsertab.AbstractZoom):
"""QtWebEngine implementations related to zooming."""
def _set_factor_internal(self, factor):
self._widget.setZoomFactor(factor)
class WebEngineElements(browsertab.AbstractElements):
"""QtWebEngine implemementations related to elements on the page."""
def _js_cb_multiple(self, callback, error_cb, js_elems):
"""Handle found elements coming from JS and call the real callback.
Args:
callback: The callback to call with the found elements.
error_cb: The callback to call in case of an error.
js_elems: The elements serialized from javascript.
"""
if js_elems is None:
error_cb(webelem.Error("Unknown error while getting "
"elements"))
return
elif not js_elems['success']:
error_cb(webelem.Error(js_elems['error']))
return
elems = []
for js_elem in js_elems['result']:
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
elems.append(elem)
callback(elems)
def _js_cb_single(self, callback, js_elem):
"""Handle a found focus elem coming from JS and call the real callback.
Args:
callback: The callback to call with the found element.
Called with a WebEngineElement or None.
js_elem: The element serialized from javascript.
"""
debug_str = ('None' if js_elem is None
else utils.elide(repr(js_elem), 1000))
log.webview.debug("Got element from JS: {}".format(debug_str))
if js_elem is None:
callback(None)
else:
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
callback(elem)
def find_css(self, selector, callback, error_cb, *,
only_visible=False):
js_code = javascript.assemble('webelem', 'find_css', selector,
only_visible)
js_cb = functools.partial(self._js_cb_multiple, callback, error_cb)
self._tab.run_js_async(js_code, js_cb)
def find_id(self, elem_id, callback):
js_code = javascript.assemble('webelem', 'find_id', elem_id)
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
def find_focused(self, callback):
js_code = javascript.assemble('webelem', 'find_focused')
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
def find_at_pos(self, pos, callback):
assert pos.x() >= 0, pos
assert pos.y() >= 0, pos
pos /= self._tab.zoom.factor()
js_code = javascript.assemble('webelem', 'find_at_pos',
pos.x(), pos.y())
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
class WebEngineAudio(browsertab.AbstractAudio):
"""QtWebEngine implemementations related to audio/muting.
Attributes:
_overridden: Whether the user toggled muting manually.
If that's the case, we leave it alone.
"""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._overridden = False
def _connect_signals(self):
page = self._widget.page()
page.audioMutedChanged.connect(self.muted_changed)
page.recentlyAudibleChanged.connect(self.recently_audible_changed)
self._tab.url_changed.connect(self._on_url_changed)
config.instance.changed.connect(self._on_config_changed)
def set_muted(self, muted: bool, override: bool = False) -> None:
self._overridden = override
assert self._widget is not None
page = self._widget.page()
page.setAudioMuted(muted)
def is_muted(self):
page = self._widget.page()
return page.isAudioMuted()
def is_recently_audible(self):
page = self._widget.page()
return page.recentlyAudible()
@pyqtSlot(QUrl)
def _on_url_changed(self, url):
if self._overridden:
return
mute = config.instance.get('content.mute', url=url)
self.set_muted(mute)
@config.change_filter('content.mute')
def _on_config_changed(self):
self._on_url_changed(self._tab.url())
class _WebEnginePermissions(QObject):
"""Handling of various permission-related signals."""
# Using 0 as WORKAROUND for:
# https://www.riverbankcomputing.com/pipermail/pyqt/2019-July/041903.html
_options = {
0: 'content.notifications',
QWebEnginePage.Geolocation: 'content.geolocation',
QWebEnginePage.MediaAudioCapture: 'content.media_capture',
QWebEnginePage.MediaVideoCapture: 'content.media_capture',
QWebEnginePage.MediaAudioVideoCapture: 'content.media_capture',
}
_messages = {
0: 'show notifications',
QWebEnginePage.Geolocation: 'access your location',
QWebEnginePage.MediaAudioCapture: 'record audio',
QWebEnginePage.MediaVideoCapture: 'record video',
QWebEnginePage.MediaAudioVideoCapture: 'record audio/video',
}
def __init__(self, tab, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
try:
self._options.update({
QWebEnginePage.MouseLock:
'content.mouse_lock',
})
self._messages.update({
QWebEnginePage.MouseLock:
'hide your mouse pointer',
})
except AttributeError:
# Added in Qt 5.8
pass
try:
self._options.update({
QWebEnginePage.DesktopVideoCapture:
'content.desktop_capture',
QWebEnginePage.DesktopAudioVideoCapture:
'content.desktop_capture',
})
self._messages.update({
QWebEnginePage.DesktopVideoCapture:
'capture your desktop',
QWebEnginePage.DesktopAudioVideoCapture:
'capture your desktop and audio',
})
except AttributeError:
# Added in Qt 5.10
pass
assert self._options.keys() == self._messages.keys()
def connect_signals(self):
"""Connect related signals from the QWebEnginePage."""
page = self._widget.page()
page.fullScreenRequested.connect(
self._on_fullscreen_requested)
page.featurePermissionRequested.connect(
self._on_feature_permission_requested)
if qtutils.version_check('5.11'):
page.quotaRequested.connect(
self._on_quota_requested)
page.registerProtocolHandlerRequested.connect(
self._on_register_protocol_handler_requested)
@pyqtSlot('QWebEngineFullScreenRequest')
def _on_fullscreen_requested(self, request):
request.accept()
on = request.toggleOn()
self._tab.data.fullscreen = on
self._tab.fullscreen_requested.emit(on)
if on:
notification = miscwidgets.FullscreenNotification(self._widget)
notification.show()
notification.set_timeout(3000)
@pyqtSlot(QUrl, 'QWebEnginePage::Feature')
def _on_feature_permission_requested(self, url, feature):
"""Ask the user for approval for geolocation/media/etc.."""
page = self._widget.page()
grant_permission = functools.partial(
page.setFeaturePermission, url, feature,
QWebEnginePage.PermissionGrantedByUser)
deny_permission = functools.partial(
page.setFeaturePermission, url, feature,
QWebEnginePage.PermissionDeniedByUser)
if feature not in self._options:
log.webview.error("Unhandled feature permission {}".format(
debug.qenum_key(QWebEnginePage, feature)))
deny_permission()
return
if (
hasattr(QWebEnginePage, 'DesktopVideoCapture') and
feature in [QWebEnginePage.DesktopVideoCapture,
QWebEnginePage.DesktopAudioVideoCapture] and
qtutils.version_check('5.13', compiled=False) and
not qtutils.version_check('5.13.2', compiled=False)
):
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-78016
log.webview.warning("Ignoring desktop sharing request due to "
"crashes in Qt < 5.13.2")
deny_permission()
return
question = shared.feature_permission(
url=url.adjusted(QUrl.RemovePath),
option=self._options[feature], msg=self._messages[feature],
yes_action=grant_permission, no_action=deny_permission,
abort_on=[self._tab.abort_questions])
if question is not None:
page.featurePermissionRequestCanceled.connect(
functools.partial(self._on_feature_permission_cancelled,
question, url, feature))
def _on_feature_permission_cancelled(self, question, url, feature,
cancelled_url, cancelled_feature):
"""Slot invoked when a feature permission request was cancelled.
To be used with functools.partial.
"""
if url == cancelled_url and feature == cancelled_feature:
try:
question.abort()
except RuntimeError:
# The question could already be deleted, e.g. because it was
# aborted after a loadStarted signal.
pass
def _on_quota_requested(self, request):
size = utils.format_size(request.requestedSize())
shared.feature_permission(
url=request.origin().adjusted(QUrl.RemovePath),
option='content.persistent_storage',
msg='use {} of persistent storage'.format(size),
yes_action=request.accept, no_action=request.reject,
abort_on=[self._tab.abort_questions],
blocking=True)
def _on_register_protocol_handler_requested(self, request):
shared.feature_permission(
url=request.origin().adjusted(QUrl.RemovePath),
option='content.register_protocol_handler',
msg='open all {} links'.format(request.scheme()),
yes_action=request.accept, no_action=request.reject,
abort_on=[self._tab.abort_questions],
blocking=True)
class _WebEngineScripts(QObject):
def __init__(self, tab, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
self._greasemonkey = greasemonkey.gm_manager
def connect_signals(self):
"""Connect signals to our private slots."""
config.instance.changed.connect(self._on_config_changed)
self._tab.search.cleared.connect(functools.partial(
self._update_stylesheet, searching=False))
self._tab.search.finished.connect(self._update_stylesheet)
@pyqtSlot(str)
def _on_config_changed(self, option):
if option in ['scrolling.bar', 'content.user_stylesheets']:
self._init_stylesheet()
self._update_stylesheet()
@pyqtSlot(bool)
def _update_stylesheet(self, searching=False):
"""Update the custom stylesheet in existing tabs."""
css = shared.get_user_stylesheet(searching=searching)
code = javascript.assemble('stylesheet', 'set_css', css)
self._tab.run_js_async(code)
def _inject_early_js(self, name, js_code, *,
world=QWebEngineScript.ApplicationWorld,
subframes=False):
"""Inject the given script to run early on a page load.
This runs the script both on DocumentCreation and DocumentReady as on
some internal pages, DocumentCreation will not work.
That is a WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66011
"""
scripts = self._widget.page().scripts()
for injection in ['creation', 'ready']:
injection_points = {
'creation': QWebEngineScript.DocumentCreation,
'ready': QWebEngineScript.DocumentReady,
}
script = QWebEngineScript()
script.setInjectionPoint(injection_points[injection])
script.setSourceCode(js_code)
script.setWorldId(world)
script.setRunsOnSubFrames(subframes)
script.setName('_qute_{}_{}'.format(name, injection))
scripts.insert(script)
def _remove_early_js(self, name):
"""Remove an early QWebEngineScript."""
scripts = self._widget.page().scripts()
for injection in ['creation', 'ready']:
full_name = '_qute_{}_{}'.format(name, injection)
script = scripts.findScript(full_name)
if not script.isNull():
scripts.remove(script)
def init(self):
"""Initialize global qutebrowser JavaScript."""
js_code = javascript.wrap_global(
'scripts',
utils.read_file('javascript/scroll.js'),
utils.read_file('javascript/webelem.js'),
utils.read_file('javascript/caret.js'),
)
if not qtutils.version_check('5.12'):
# WORKAROUND for Qt versions < 5.12 not exposing window.print().
# Qt 5.12 has a printRequested() signal so we don't need this hack
# anymore.
self._inject_early_js('js',
utils.read_file('javascript/print.js'),
subframes=True,
world=QWebEngineScript.MainWorld)
# FIXME:qtwebengine what about subframes=True?
self._inject_early_js('js', js_code, subframes=True)
self._init_stylesheet()
# The Greasemonkey metadata block support in QtWebEngine only starts at
# Qt 5.8. With 5.7.1, we need to inject the scripts ourselves in
# response to urlChanged.
if not qtutils.version_check('5.8'):
self._tab.url_changed.connect(
self._inject_greasemonkey_scripts_for_url)
else:
self._greasemonkey.scripts_reloaded.connect(
self._inject_all_greasemonkey_scripts)
self._inject_all_greasemonkey_scripts()
def _init_stylesheet(self):
"""Initialize custom stylesheets.
Partially inspired by QupZilla:
https://github.com/QupZilla/qupzilla/blob/v2.0/src/lib/app/mainapplication.cpp#L1063-L1101
"""
self._remove_early_js('stylesheet')
css = shared.get_user_stylesheet()
js_code = javascript.wrap_global(
'stylesheet',
utils.read_file('javascript/stylesheet.js'),
javascript.assemble('stylesheet', 'set_css', css),
)
self._inject_early_js('stylesheet', js_code, subframes=True)
@pyqtSlot(QUrl)
def _inject_greasemonkey_scripts_for_url(self, url):
matching_scripts = self._greasemonkey.scripts_for(url)
self._inject_greasemonkey_scripts(
matching_scripts.start, QWebEngineScript.DocumentCreation, True)
self._inject_greasemonkey_scripts(
matching_scripts.end, QWebEngineScript.DocumentReady, False)
self._inject_greasemonkey_scripts(
matching_scripts.idle, QWebEngineScript.Deferred, False)
@pyqtSlot()
def _inject_all_greasemonkey_scripts(self):
scripts = self._greasemonkey.all_scripts()
self._inject_greasemonkey_scripts(scripts)
def _remove_all_greasemonkey_scripts(self):
page_scripts = self._widget.page().scripts()
for script in page_scripts.toList():
if script.name().startswith("GM-"):
log.greasemonkey.debug('Removing script: {}'
.format(script.name()))
removed = page_scripts.remove(script)
assert removed, script.name()
def _inject_greasemonkey_scripts(self, scripts=None, injection_point=None,
remove_first=True):
"""Register user JavaScript files with the current tab.
Args:
scripts: A list of GreasemonkeyScripts, or None to add all
known by the Greasemonkey subsystem.
injection_point: The QWebEngineScript::InjectionPoint stage
to inject the script into, None to use
auto-detection.
remove_first: Whether to remove all previously injected
scripts before adding these ones.
"""
if sip.isdeleted(self._widget):
return
# Since we are inserting scripts into a per-tab collection,
# rather than just injecting scripts on page load, we need to
# make sure we replace existing scripts, not just add new ones.
# While, taking care not to remove any other scripts that might
# have been added elsewhere, like the one for stylesheets.
page_scripts = self._widget.page().scripts()
if remove_first:
self._remove_all_greasemonkey_scripts()
if not scripts:
return
for script in scripts:
new_script = QWebEngineScript()
try:
world = int(script.jsworld)
if not 0 <= world <= qtutils.MAX_WORLD_ID:
log.greasemonkey.error(
"script {} has invalid value for '@qute-js-world'"
": {}, should be between 0 and {}"
.format(
script.name,
script.jsworld,
qtutils.MAX_WORLD_ID))
continue
except ValueError:
try:
world = _JS_WORLD_MAP[usertypes.JsWorld[
script.jsworld.lower()]]
except KeyError:
log.greasemonkey.error(
"script {} has invalid value for '@qute-js-world'"
": {}".format(script.name, script.jsworld))
continue
new_script.setWorldId(world)
new_script.setSourceCode(script.code())
new_script.setName("GM-{}".format(script.name))
new_script.setRunsOnSubFrames(script.runs_on_sub_frames)
# Override the @run-at value parsed by QWebEngineScript if desired.
if injection_point:
new_script.setInjectionPoint(injection_point)
elif script.needs_document_end_workaround():
log.greasemonkey.debug("Forcing @run-at document-end for {}"
.format(script.name))
new_script.setInjectionPoint(QWebEngineScript.DocumentReady)
log.greasemonkey.debug('adding script: {}'
.format(new_script.name()))
page_scripts.insert(new_script)
class WebEngineTabPrivate(browsertab.AbstractTabPrivate):
"""QtWebEngine-related methods which aren't part of the public API."""
def networkaccessmanager(self):
return None
def user_agent(self):
return None
def clear_ssl_errors(self):
raise browsertab.UnsupportedOperationError
def event_target(self):
return self._widget.render_widget()
def shutdown(self):
self._tab.shutting_down.emit()
self._tab.action.exit_fullscreen()
self._widget.shutdown()
class WebEngineTab(browsertab.AbstractTab):
"""A QtWebEngine tab in the browser.
Signals:
abort_questions: Emitted when a new load started or we're shutting
down.
"""
abort_questions = pyqtSignal()
def __init__(self, *, win_id, mode_manager, private, parent=None):
super().__init__(win_id=win_id, private=private, parent=parent)
widget = webview.WebEngineView(tabdata=self.data, win_id=win_id,
private=private)
self.history = WebEngineHistory(tab=self)
self.scroller = WebEngineScroller(tab=self, parent=self)
self.caret = WebEngineCaret(mode_manager=mode_manager,
tab=self, parent=self)
self.zoom = WebEngineZoom(tab=self, parent=self)
self.search = WebEngineSearch(tab=self, parent=self)
self.printing = WebEnginePrinting(tab=self)
self.elements = WebEngineElements(tab=self)
self.action = WebEngineAction(tab=self)
self.audio = WebEngineAudio(tab=self, parent=self)
self.private_api = WebEngineTabPrivate(mode_manager=mode_manager,
tab=self)
self._permissions = _WebEnginePermissions(tab=self, parent=self)
self._scripts = _WebEngineScripts(tab=self, parent=self)
# We're assigning settings in _set_widget
self.settings = webenginesettings.WebEngineSettings(settings=None)
self._set_widget(widget)
self._connect_signals()
self.backend = usertypes.Backend.QtWebEngine
self._child_event_filter = None
self._saved_zoom = None
self._reload_url = None # type: typing.Optional[QUrl]
self._scripts.init()
def _set_widget(self, widget):
# pylint: disable=protected-access
super()._set_widget(widget)
self._permissions._widget = widget
self._scripts._widget = widget
def _install_event_filter(self):
fp = self._widget.focusProxy()
if fp is not None:
fp.installEventFilter(self._tab_event_filter)
self._child_event_filter = eventfilter.ChildEventFilter(
eventfilter=self._tab_event_filter, widget=self._widget,
win_id=self.win_id, parent=self)
self._widget.installEventFilter(self._child_event_filter)
@pyqtSlot()
def _restore_zoom(self):
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3498
return
if self._saved_zoom is None:
return
self.zoom.set_factor(self._saved_zoom)
self._saved_zoom = None
def load_url(self, url, *, emit_before_load_started=True):
"""Load the given URL in this tab.
Arguments:
url: The QUrl to load.
emit_before_load_started: If set to False, before_load_started is
not emitted.
"""
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3896
return
self._saved_zoom = self.zoom.factor()
self._load_url_prepare(
url, emit_before_load_started=emit_before_load_started)
self._widget.load(url)
def url(self, *, requested=False):
page = self._widget.page()
if requested:
return page.requestedUrl()
else:
return page.url()
def dump_async(self, callback, *, plain=False):
if plain:
self._widget.page().toPlainText(callback)
else:
self._widget.page().toHtml(callback)
def run_js_async(self, code, callback=None, *, world=None):
world_id_type = typing.Union[QWebEngineScript.ScriptWorldId, int]
if world is None:
world_id = QWebEngineScript.ApplicationWorld # type: world_id_type
elif isinstance(world, int):
world_id = world
if not 0 <= world_id <= qtutils.MAX_WORLD_ID:
raise browsertab.WebTabError(
"World ID should be between 0 and {}"
.format(qtutils.MAX_WORLD_ID))
else:
world_id = _JS_WORLD_MAP[world]
if callback is None:
self._widget.page().runJavaScript(code, world_id)
else:
self._widget.page().runJavaScript(code, world_id, callback)
def reload(self, *, force=False):
if force:
action = QWebEnginePage.ReloadAndBypassCache
else:
action = QWebEnginePage.Reload
self._widget.triggerPageAction(action)
def stop(self):
self._widget.stop()
def title(self):
return self._widget.title()
def icon(self):
return self._widget.icon()
def set_html(self, html, base_url=QUrl()):
# FIXME:qtwebengine
# check this and raise an exception if too big:
# Warning: The content will be percent encoded before being sent to the
# renderer via IPC. This may increase its size. The maximum size of the
# percent encoded content is 2 megabytes minus 30 bytes.
self._widget.setHtml(html, base_url)
def _show_error_page(self, url, error):
"""Show an error page in the tab."""
log.misc.debug("Showing error page for {}".format(error))
url_string = url.toDisplayString()
error_page = jinja.render(
'error.html',
title="Error loading page: {}".format(url_string),
url=url_string, error=error)
self.set_html(error_page)
@pyqtSlot()
def _on_history_trigger(self):
try:
self._widget.page()
except RuntimeError:
# Looks like this slot can be triggered on destroyed tabs:
# https://crashes.qutebrowser.org/view/3abffbed (Qt 5.9.1)
# wrapped C/C++ object of type WebEngineView has been deleted
log.misc.debug("Ignoring history trigger for destroyed tab")
return
url = self.url()
requested_url = self.url(requested=True)
# Don't save the title if it's generated from the URL
title = self.title()
title_url = QUrl(url)
title_url.setScheme('')
title_url_str = title_url.toDisplayString(
QUrl.RemoveScheme) # type: ignore
if title == title_url_str.strip('/'):
title = ""
# Don't add history entry if the URL is invalid anyways
if not url.isValid():
log.misc.debug("Ignoring invalid URL being added to history")
return
self.history_item_triggered.emit(url, requested_url, title)
@pyqtSlot(QUrl, 'QAuthenticator*', 'QString')
def _on_proxy_authentication_required(self, url, authenticator,
proxy_host):
"""Called when a proxy needs authentication."""
msg = "<b>{}</b> requires a username and password.".format(
html_utils.escape(proxy_host))
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
answer = message.ask(
title="Proxy authentication required", text=msg,
mode=usertypes.PromptMode.user_pwd,
abort_on=[self.abort_questions], url=urlstr)
if answer is not None:
authenticator.setUser(answer.user)
authenticator.setPassword(answer.password)
else:
try:
sip.assign(authenticator, QAuthenticator()) # type: ignore
except AttributeError:
self._show_error_page(url, "Proxy authentication required")
@pyqtSlot(QUrl, 'QAuthenticator*')
def _on_authentication_required(self, url, authenticator):
log.network.debug("Authentication requested for {}, netrc_used {}"
.format(url.toDisplayString(), self.data.netrc_used))
netrc_success = False
if not self.data.netrc_used:
self.data.netrc_used = True
netrc_success = shared.netrc_authentication(url, authenticator)
if not netrc_success:
log.network.debug("Asking for credentials")
answer = shared.authentication_required(
url, authenticator, abort_on=[self.abort_questions])
if not netrc_success and answer is None:
log.network.debug("Aborting auth")
try:
sip.assign(authenticator, QAuthenticator()) # type: ignore
except AttributeError:
# WORKAROUND for
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-December/038400.html
self._show_error_page(url, "Authentication required")
@pyqtSlot()
def _on_load_started(self):
"""Clear search when a new load is started if needed."""
# WORKAROUND for
# https://bugreports.qt.io/browse/QTBUG-61506
# (seems to be back in later Qt versions as well)
self.search.clear()
super()._on_load_started()
self.data.netrc_used = False
@pyqtSlot(QWebEnginePage.RenderProcessTerminationStatus, int)
def _on_render_process_terminated(self, status, exitcode):
"""Show an error when the renderer process terminated."""
if (status == QWebEnginePage.AbnormalTerminationStatus and
exitcode == 256):
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-58697
status = QWebEnginePage.CrashedTerminationStatus
status_map = {
QWebEnginePage.NormalTerminationStatus:
browsertab.TerminationStatus.normal,
QWebEnginePage.AbnormalTerminationStatus:
browsertab.TerminationStatus.abnormal,
QWebEnginePage.CrashedTerminationStatus:
browsertab.TerminationStatus.crashed,
QWebEnginePage.KilledTerminationStatus:
browsertab.TerminationStatus.killed,
-1:
browsertab.TerminationStatus.unknown,
}
self.renderer_process_terminated.emit(status_map[status], exitcode)
def _error_page_workaround(self, js_enabled, html):
"""Check if we're displaying a Chromium error page.
This gets called if we got a loadFinished(False), so we can display at
least some error page in situations where Chromium's can't be
displayed.
WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66643
WORKAROUND for https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=882805
Needs to check the page content as a WORKAROUND for
https://bugreports.qt.io/browse/QTBUG-66661
"""
missing_jst = 'jstProcess(' in html and 'jstProcess=' not in html
if js_enabled and not missing_jst:
return
match = re.search(r'"errorCode":"([^"]*)"', html)
if match is None:
return
self._show_error_page(self.url(), error=match.group(1))
@pyqtSlot(int)
def _on_load_progress(self, perc: int) -> None:
"""QtWebEngine-specific loadProgress workarounds.
WORKAROUND for https://bugreports.qt.io/browse/QTBUG-65223
"""
super()._on_load_progress(perc)
if (perc == 100 and
qtutils.version_check('5.10', compiled=False) and
self.load_status() != usertypes.LoadStatus.error):
self._update_load_status(ok=True)
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
"""QtWebEngine-specific loadFinished workarounds."""
super()._on_load_finished(ok)
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-65223
if qtutils.version_check('5.10', compiled=False):
if not ok:
self._update_load_status(ok)
else:
self._update_load_status(ok)
if not ok:
self.dump_async(functools.partial(
self._error_page_workaround,
self.settings.test_attribute('content.javascript.enabled')))
if ok and self._reload_url is not None:
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
log.config.debug(
"Loading {} again because of config change".format(
self._reload_url.toDisplayString()))
QTimer.singleShot(100, functools.partial(
self.load_url, self._reload_url,
emit_before_load_started=False))
self._reload_url = None
@pyqtSlot(certificateerror.CertificateErrorWrapper)
def _on_ssl_errors(self, error):
url = error.url()
self._insecure_hosts.add(url.host())
log.webview.debug("Certificate error: {}".format(error))
if error.is_overridable():
error.ignore = shared.ignore_certificate_errors(
url, [error], abort_on=[self.abort_questions])
else:
log.webview.error("Non-overridable certificate error: "
"{}".format(error))
log.webview.debug("ignore {}, URL {}, requested {}".format(
error.ignore, url, self.url(requested=True)))
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-56207
show_cert_error = (
not qtutils.version_check('5.9') and
not error.ignore
)
# WORKAROUND for https://codereview.qt-project.org/c/qt/qtwebengine/+/270556
show_non_overr_cert_error = (
not error.is_overridable() and (
# Affected Qt versions:
# 5.13 before 5.13.2
# 5.12 before 5.12.6
# < 5.12
(qtutils.version_check('5.13') and
not qtutils.version_check('5.13.2')) or
(qtutils.version_check('5.12') and
not qtutils.version_check('5.12.6')) or
not qtutils.version_check('5.12')
)
)
# We can't really know when to show an error page, as the error might
# have happened when loading some resource.
# However, self.url() is not available yet and the requested URL
# might not match the URL we get from the error - so we just apply a
# heuristic here.
if ((show_cert_error or show_non_overr_cert_error) and
url.matches(self.data.last_navigation.url, QUrl.RemoveScheme)):
self._show_error_page(url, str(error))
@pyqtSlot(QUrl)
def _on_before_load_started(self, url):
"""If we know we're going to visit a URL soon, change the settings.
This is a WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
"""
super()._on_before_load_started(url)
if not qtutils.version_check('5.11.1', compiled=False):
self.settings.update_for_url(url)
@pyqtSlot()
def _on_print_requested(self):
"""Slot for window.print() in JS."""
try:
self.printing.show_dialog()
except browsertab.WebTabError as e:
message.error(str(e))
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(self, navigation):
super()._on_navigation_request(navigation)
if navigation.url == QUrl('qute://print'):
self._on_print_requested()
navigation.accepted = False
if not navigation.accepted or not navigation.is_main_frame:
return
settings_needing_reload = {
'content.plugins',
'content.javascript.enabled',
'content.javascript.can_access_clipboard',
'content.print_element_backgrounds',
'input.spatial_navigation',
}
assert settings_needing_reload.issubset(configdata.DATA)
changed = self.settings.update_for_url(navigation.url)
reload_needed = bool(changed & settings_needing_reload)
# On Qt < 5.11, we don't don't need a reload when type == link_clicked.
# On Qt 5.11.0, we always need a reload.
# On Qt > 5.11.0, we never need a reload:
# https://codereview.qt-project.org/#/c/229525/1
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
if qtutils.version_check('5.11.1', compiled=False):
reload_needed = False
elif not qtutils.version_check('5.11.0', exact=True, compiled=False):
if navigation.navigation_type == navigation.Type.link_clicked:
reload_needed = False
if reload_needed:
self._reload_url = navigation.url
def _on_select_client_certificate(self, selection):
"""Handle client certificates.
Currently, we simply pick the first available certificate and show an
additional note if there are multiple matches.
"""
certificate = selection.certificates()[0]
text = ('<b>Subject:</b> {subj}<br/>'
'<b>Issuer:</b> {issuer}<br/>'
'<b>Serial:</b> {serial}'.format(
subj=html_utils.escape(certificate.subjectDisplayName()),
issuer=html_utils.escape(certificate.issuerDisplayName()),
serial=bytes(certificate.serialNumber()).decode('ascii')))
if len(selection.certificates()) > 1:
text += ('<br/><br/><b>Note:</b> Multiple matching certificates '
'were found, but certificate selection is not '
'implemented yet!')
urlstr = selection.host().host()
present = message.ask(
title='Present client certificate to {}?'.format(urlstr),
text=text,
mode=usertypes.PromptMode.yesno,
abort_on=[self.abort_questions],
url=urlstr)
if present:
selection.select(certificate)
else:
selection.selectNone()
def _connect_signals(self):
view = self._widget
page = view.page()
page.windowCloseRequested.connect(self.window_close_requested)
page.linkHovered.connect(self.link_hovered)
page.loadProgress.connect(self._on_load_progress)
page.loadStarted.connect(self._on_load_started)
page.certificate_error.connect(self._on_ssl_errors)
page.authenticationRequired.connect(self._on_authentication_required)
page.proxyAuthenticationRequired.connect(
self._on_proxy_authentication_required)
page.contentsSizeChanged.connect(self.contents_size_changed)
page.navigation_request.connect(self._on_navigation_request)
if qtutils.version_check('5.12'):
page.printRequested.connect(self._on_print_requested)
try:
# pylint: disable=unused-import
from PyQt5.QtWebEngineWidgets import ( # type: ignore
QWebEngineClientCertificateSelection)
except ImportError:
pass
else:
page.selectClientCertificate.connect(
self._on_select_client_certificate)
view.titleChanged.connect(self.title_changed)
view.urlChanged.connect(self._on_url_changed)
view.renderProcessTerminated.connect(
self._on_render_process_terminated)
view.iconChanged.connect(self.icon_changed)
page.loadFinished.connect(self._on_history_trigger)
page.loadFinished.connect(self._restore_zoom)
page.loadFinished.connect(self._on_load_finished)
self.before_load_started.connect(self._on_before_load_started)
self.shutting_down.connect(self.abort_questions) # type: ignore
self.load_started.connect(self.abort_questions) # type: ignore
# pylint: disable=protected-access
self.audio._connect_signals()
self._permissions.connect_signals()
self._scripts.connect_signals()
| ./CrossVul/dataset_final_sorted/CWE-684/py/good_3920_1 |
crossvul-python_data_good_3915_0 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2019 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Base class for a wrapper over QWebView/QWebEngineView."""
import enum
import itertools
import typing
import attr
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, QUrl, QObject, QSizeF, Qt,
QEvent, QPoint)
from PyQt5.QtGui import QKeyEvent, QIcon
from PyQt5.QtWidgets import QWidget, QApplication, QDialog
from PyQt5.QtPrintSupport import QPrintDialog, QPrinter
from PyQt5.QtNetwork import QNetworkAccessManager
import pygments
import pygments.lexers
import pygments.formatters
from qutebrowser.keyinput import modeman
from qutebrowser.config import config
from qutebrowser.utils import (utils, objreg, usertypes, log, qtutils,
urlutils, message)
from qutebrowser.misc import miscwidgets, objects
from qutebrowser.browser import mouse, hints
from qutebrowser.qt import sip
MYPY = False
if MYPY:
# pylint can't interpret type comments with Python 3.7
# pylint: disable=unused-import,useless-suppression
from qutebrowser.browser import webelem
from qutebrowser.browser.inspector import AbstractWebInspector
tab_id_gen = itertools.count(0)
def create(win_id: int,
private: bool,
parent: QWidget = None) -> 'AbstractTab':
"""Get a QtWebKit/QtWebEngine tab object.
Args:
win_id: The window ID where the tab will be shown.
private: Whether the tab is a private/off the record tab.
parent: The Qt parent to set.
"""
# Importing modules here so we don't depend on QtWebEngine without the
# argument and to avoid circular imports.
mode_manager = modeman.instance(win_id)
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
tab_class = webenginetab.WebEngineTab
else:
from qutebrowser.browser.webkit import webkittab
tab_class = webkittab.WebKitTab
return tab_class(win_id=win_id, mode_manager=mode_manager, private=private,
parent=parent)
def init() -> None:
"""Initialize backend-specific modules."""
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
webenginetab.init()
class WebTabError(Exception):
"""Base class for various errors."""
class UnsupportedOperationError(WebTabError):
"""Raised when an operation is not supported with the given backend."""
TerminationStatus = enum.Enum('TerminationStatus', [
'normal',
'abnormal', # non-zero exit status
'crashed', # e.g. segfault
'killed',
'unknown',
])
@attr.s
class TabData:
"""A simple namespace with a fixed set of attributes.
Attributes:
keep_icon: Whether the (e.g. cloned) icon should not be cleared on page
load.
inspector: The QWebInspector used for this webview.
viewing_source: Set if we're currently showing a source view.
Only used when sources are shown via pygments.
open_target: Where to open the next link.
Only used for QtWebKit.
override_target: Override for open_target for fake clicks (like hints).
Only used for QtWebKit.
pinned: Flag to pin the tab.
fullscreen: Whether the tab has a video shown fullscreen currently.
netrc_used: Whether netrc authentication was performed.
input_mode: current input mode for the tab.
"""
keep_icon = attr.ib(False) # type: bool
viewing_source = attr.ib(False) # type: bool
inspector = attr.ib(None) # type: typing.Optional[AbstractWebInspector]
open_target = attr.ib(
usertypes.ClickTarget.normal) # type: usertypes.ClickTarget
override_target = attr.ib(None) # type: usertypes.ClickTarget
pinned = attr.ib(False) # type: bool
fullscreen = attr.ib(False) # type: bool
netrc_used = attr.ib(False) # type: bool
input_mode = attr.ib(usertypes.KeyMode.normal) # type: usertypes.KeyMode
def should_show_icon(self) -> bool:
return (config.val.tabs.favicons.show == 'always' or
config.val.tabs.favicons.show == 'pinned' and self.pinned)
class AbstractAction:
"""Attribute ``action`` of AbstractTab for Qt WebActions."""
# The class actions are defined on (QWeb{Engine,}Page)
action_class = None # type: type
# The type of the actions (QWeb{Engine,}Page.WebAction)
action_base = None # type: type
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def exit_fullscreen(self) -> None:
"""Exit the fullscreen mode."""
raise NotImplementedError
def save_page(self) -> None:
"""Save the current page."""
raise NotImplementedError
def run_string(self, name: str) -> None:
"""Run a webaction based on its name."""
member = getattr(self.action_class, name, None)
if not isinstance(member, self.action_base):
raise WebTabError("{} is not a valid web action!".format(name))
self._widget.triggerPageAction(member)
def show_source(
self,
pygments: bool = False # pylint: disable=redefined-outer-name
) -> None:
"""Show the source of the current page in a new tab."""
raise NotImplementedError
def _show_source_pygments(self) -> None:
def show_source_cb(source: str) -> None:
"""Show source as soon as it's ready."""
# WORKAROUND for https://github.com/PyCQA/pylint/issues/491
# pylint: disable=no-member
lexer = pygments.lexers.HtmlLexer()
formatter = pygments.formatters.HtmlFormatter(
full=True, linenos='table')
# pylint: enable=no-member
highlighted = pygments.highlight(source, lexer, formatter)
tb = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
new_tab = tb.tabopen(background=False, related=True)
new_tab.set_html(highlighted, self._tab.url())
new_tab.data.viewing_source = True
self._tab.dump_async(show_source_cb)
class AbstractPrinting:
"""Attribute ``printing`` of AbstractTab for printing the page."""
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = None
self._tab = tab
def check_pdf_support(self) -> None:
"""Check whether writing to PDFs is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_printer_support(self) -> None:
"""Check whether writing to a printer is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_preview_support(self) -> None:
"""Check whether showing a print preview is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def to_pdf(self, filename: str) -> bool:
"""Print the tab to a PDF with the given filename."""
raise NotImplementedError
def to_printer(self, printer: QPrinter,
callback: typing.Callable[[bool], None] = None) -> None:
"""Print the tab.
Args:
printer: The QPrinter to print to.
callback: Called with a boolean
(True if printing succeeded, False otherwise)
"""
raise NotImplementedError
def show_dialog(self) -> None:
"""Print with a QPrintDialog."""
self.check_printer_support()
def print_callback(ok: bool) -> None:
"""Called when printing finished."""
if not ok:
message.error("Printing failed!")
diag.deleteLater()
def do_print() -> None:
"""Called when the dialog was closed."""
self.to_printer(diag.printer(), print_callback)
diag = QPrintDialog(self._tab)
if utils.is_mac:
# For some reason we get a segfault when using open() on macOS
ret = diag.exec_()
if ret == QDialog.Accepted:
do_print()
else:
diag.open(do_print)
class AbstractSearch(QObject):
"""Attribute ``search`` of AbstractTab for doing searches.
Attributes:
text: The last thing this view was searched for.
search_displayed: Whether we're currently displaying search results in
this view.
_flags: The flags of the last search (needs to be set by subclasses).
_widget: The underlying WebView widget.
"""
#: Signal emitted when a search was finished
#: (True if the text was found, False otherwise)
finished = pyqtSignal(bool)
#: Signal emitted when an existing search was cleared.
cleared = pyqtSignal()
_Callback = typing.Callable[[bool], None]
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = None
self.text = None # type: typing.Optional[str]
self.search_displayed = False
def _is_case_sensitive(self, ignore_case: usertypes.IgnoreCase) -> bool:
"""Check if case-sensitivity should be used.
This assumes self.text is already set properly.
Arguments:
ignore_case: The ignore_case value from the config.
"""
assert self.text is not None
mapping = {
usertypes.IgnoreCase.smart: not self.text.islower(),
usertypes.IgnoreCase.never: True,
usertypes.IgnoreCase.always: False,
}
return mapping[ignore_case]
def search(self, text: str, *,
ignore_case: usertypes.IgnoreCase = usertypes.IgnoreCase.never,
reverse: bool = False,
result_cb: _Callback = None) -> None:
"""Find the given text on the page.
Args:
text: The text to search for.
ignore_case: Search case-insensitively.
reverse: Reverse search direction.
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def clear(self) -> None:
"""Clear the current search."""
raise NotImplementedError
def prev_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the previous result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def next_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the next result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
class AbstractZoom(QObject):
"""Attribute ``zoom`` of AbstractTab for controlling zoom."""
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = None
# Whether zoom was changed from the default.
self._default_zoom_changed = False
self._init_neighborlist()
config.instance.changed.connect(self._on_config_changed)
self._zoom_factor = float(config.val.zoom.default) / 100
@pyqtSlot(str)
def _on_config_changed(self, option: str) -> None:
if option in ['zoom.levels', 'zoom.default']:
if not self._default_zoom_changed:
factor = float(config.val.zoom.default) / 100
self.set_factor(factor)
self._init_neighborlist()
def _init_neighborlist(self) -> None:
"""Initialize self._neighborlist.
It is a NeighborList with the zoom levels."""
levels = config.val.zoom.levels
self._neighborlist = usertypes.NeighborList(
levels, mode=usertypes.NeighborList.Modes.edge)
self._neighborlist.fuzzyval = config.val.zoom.default
def apply_offset(self, offset: int) -> None:
"""Increase/Decrease the zoom level by the given offset.
Args:
offset: The offset in the zoom level list.
Return:
The new zoom percentage.
"""
level = self._neighborlist.getitem(offset)
self.set_factor(float(level) / 100, fuzzyval=False)
return level
def _set_factor_internal(self, factor: float) -> None:
raise NotImplementedError
def set_factor(self, factor: float, *, fuzzyval: bool = True) -> None:
"""Zoom to a given zoom factor.
Args:
factor: The zoom factor as float.
fuzzyval: Whether to set the NeighborLists fuzzyval.
"""
if fuzzyval:
self._neighborlist.fuzzyval = int(factor * 100)
if factor < 0:
raise ValueError("Can't zoom to factor {}!".format(factor))
default_zoom_factor = float(config.val.zoom.default) / 100
self._default_zoom_changed = (factor != default_zoom_factor)
self._zoom_factor = factor
self._set_factor_internal(factor)
def factor(self) -> float:
return self._zoom_factor
def apply_default(self) -> None:
self._set_factor_internal(float(config.val.zoom.default) / 100)
def reapply(self) -> None:
self._set_factor_internal(self._zoom_factor)
class AbstractCaret(QObject):
"""Attribute ``caret`` of AbstractTab for caret browsing."""
#: Signal emitted when the selection was toggled.
#: (argument - whether the selection is now active)
selection_toggled = pyqtSignal(bool)
#: Emitted when a ``follow_selection`` action is done.
follow_selected_done = pyqtSignal()
def __init__(self,
tab: 'AbstractTab',
mode_manager: modeman.ModeManager,
parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = None
self.selection_enabled = False
self._mode_manager = mode_manager
mode_manager.entered.connect(self._on_mode_entered)
mode_manager.left.connect(self._on_mode_left)
def _on_mode_entered(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def _on_mode_left(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def move_to_next_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_line(self) -> None:
raise NotImplementedError
def move_to_end_of_line(self) -> None:
raise NotImplementedError
def move_to_start_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_document(self) -> None:
raise NotImplementedError
def move_to_end_of_document(self) -> None:
raise NotImplementedError
def toggle_selection(self) -> None:
raise NotImplementedError
def drop_selection(self) -> None:
raise NotImplementedError
def selection(self, callback: typing.Callable[[str], None]) -> None:
raise NotImplementedError
def reverse_selection(self) -> None:
raise NotImplementedError
def _follow_enter(self, tab: bool) -> None:
"""Follow a link by faking an enter press."""
if tab:
self._tab.fake_key_press(Qt.Key_Enter, modifier=Qt.ControlModifier)
else:
self._tab.fake_key_press(Qt.Key_Enter)
def follow_selected(self, *, tab: bool = False) -> None:
raise NotImplementedError
class AbstractScroller(QObject):
"""Attribute ``scroller`` of AbstractTab to manage scroll position."""
#: Signal emitted when the scroll position changed (int, int)
perc_changed = pyqtSignal(int, int)
#: Signal emitted before the user requested a jump.
#: Used to set the special ' mark so the user can return.
before_jump_requested = pyqtSignal()
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = None # type: typing.Optional[QWidget]
self.perc_changed.connect(self._log_scroll_pos_change)
@pyqtSlot()
def _log_scroll_pos_change(self) -> None:
log.webview.vdebug( # type: ignore
"Scroll position changed to {}".format(self.pos_px()))
def _init_widget(self, widget: QWidget) -> None:
self._widget = widget
def pos_px(self) -> int:
raise NotImplementedError
def pos_perc(self) -> int:
raise NotImplementedError
def to_perc(self, x: int = None, y: int = None) -> None:
raise NotImplementedError
def to_point(self, point: QPoint) -> None:
raise NotImplementedError
def to_anchor(self, name: str) -> None:
raise NotImplementedError
def delta(self, x: int = 0, y: int = 0) -> None:
raise NotImplementedError
def delta_page(self, x: float = 0, y: float = 0) -> None:
raise NotImplementedError
def up(self, count: int = 1) -> None:
raise NotImplementedError
def down(self, count: int = 1) -> None:
raise NotImplementedError
def left(self, count: int = 1) -> None:
raise NotImplementedError
def right(self, count: int = 1) -> None:
raise NotImplementedError
def top(self) -> None:
raise NotImplementedError
def bottom(self) -> None:
raise NotImplementedError
def page_up(self, count: int = 1) -> None:
raise NotImplementedError
def page_down(self, count: int = 1) -> None:
raise NotImplementedError
def at_top(self) -> bool:
raise NotImplementedError
def at_bottom(self) -> bool:
raise NotImplementedError
class AbstractHistoryPrivate:
"""Private API related to the history."""
def __init__(self, tab: 'AbstractTab'):
self._tab = tab
self._history = None
def serialize(self) -> bytes:
"""Serialize into an opaque format understood by self.deserialize."""
raise NotImplementedError
def deserialize(self, data: bytes) -> None:
"""Deserialize from a format produced by self.serialize."""
raise NotImplementedError
def load_items(self, items: typing.Sequence) -> None:
"""Deserialize from a list of WebHistoryItems."""
raise NotImplementedError
class AbstractHistory:
"""The history attribute of a AbstractTab."""
def __init__(self, tab: 'AbstractTab') -> None:
self._tab = tab
self._history = None
self.private_api = AbstractHistoryPrivate(tab)
def __len__(self) -> int:
raise NotImplementedError
def __iter__(self) -> typing.Iterable:
raise NotImplementedError
def _check_count(self, count: int) -> None:
"""Check whether the count is positive."""
if count < 0:
raise WebTabError("count needs to be positive!")
def current_idx(self) -> int:
raise NotImplementedError
def back(self, count: int = 1) -> None:
"""Go back in the tab's history."""
self._check_count(count)
idx = self.current_idx() - count
if idx >= 0:
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(0))
raise WebTabError("At beginning of history.")
def forward(self, count: int = 1) -> None:
"""Go forward in the tab's history."""
self._check_count(count)
idx = self.current_idx() + count
if idx < len(self):
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(len(self) - 1))
raise WebTabError("At end of history.")
def can_go_back(self) -> bool:
raise NotImplementedError
def can_go_forward(self) -> bool:
raise NotImplementedError
def _item_at(self, i: int) -> typing.Any:
raise NotImplementedError
def _go_to_item(self, item: typing.Any) -> None:
raise NotImplementedError
class AbstractElements:
"""Finding and handling of elements on the page."""
_MultiCallback = typing.Callable[
[typing.Sequence['webelem.AbstractWebElement']], None]
_SingleCallback = typing.Callable[
[typing.Optional['webelem.AbstractWebElement']], None]
_ErrorCallback = typing.Callable[[Exception], None]
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = None
self._tab = tab
def find_css(self, selector: str,
callback: _MultiCallback,
error_cb: _ErrorCallback, *,
only_visible: bool = False) -> None:
"""Find all HTML elements matching a given selector async.
If there's an error, the callback is called with a webelem.Error
instance.
Args:
callback: The callback to be called when the search finished.
error_cb: The callback to be called when an error occurred.
selector: The CSS selector to search for.
only_visible: Only show elements which are visible on screen.
"""
raise NotImplementedError
def find_id(self, elem_id: str, callback: _SingleCallback) -> None:
"""Find the HTML element with the given ID async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
elem_id: The ID to search for.
"""
raise NotImplementedError
def find_focused(self, callback: _SingleCallback) -> None:
"""Find the focused element on the page async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
def find_at_pos(self, pos: QPoint, callback: _SingleCallback) -> None:
"""Find the element at the given position async.
This is also called "hit test" elsewhere.
Args:
pos: The QPoint to get the element for.
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
class AbstractAudio(QObject):
"""Handling of audio/muting for this tab."""
muted_changed = pyqtSignal(bool)
recently_audible_changed = pyqtSignal(bool)
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._widget = None # type: typing.Optional[QWidget]
self._tab = tab
def set_muted(self, muted: bool, override: bool = False) -> None:
"""Set this tab as muted or not.
Arguments:
override: If set to True, muting/unmuting was done manually and
overrides future automatic mute/unmute changes based on
the URL.
"""
raise NotImplementedError
def is_muted(self) -> bool:
raise NotImplementedError
def is_recently_audible(self) -> bool:
"""Whether this tab has had audio playing recently."""
raise NotImplementedError
class AbstractTabPrivate:
"""Tab-related methods which are only needed in the core.
Those methods are not part of the API which is exposed to extensions, and
should ideally be removed at some point in the future.
"""
def __init__(self, mode_manager: modeman.ModeManager,
tab: 'AbstractTab') -> None:
self._widget = None # type: typing.Optional[QWidget]
self._tab = tab
self._mode_manager = mode_manager
def event_target(self) -> QWidget:
"""Return the widget events should be sent to."""
raise NotImplementedError
def handle_auto_insert_mode(self, ok: bool) -> None:
"""Handle `input.insert_mode.auto_load` after loading finished."""
if not config.val.input.insert_mode.auto_load or not ok:
return
cur_mode = self._mode_manager.mode
if cur_mode == usertypes.KeyMode.insert:
return
def _auto_insert_mode_cb(elem: 'webelem.AbstractWebElement') -> None:
"""Called from JS after finding the focused element."""
if elem is None:
log.webview.debug("No focused element!")
return
if elem.is_editable():
modeman.enter(self._tab.win_id, usertypes.KeyMode.insert,
'load finished', only_if_normal=True)
self._tab.elements.find_focused(_auto_insert_mode_cb)
def clear_ssl_errors(self) -> None:
raise NotImplementedError
def networkaccessmanager(self) -> typing.Optional[QNetworkAccessManager]:
"""Get the QNetworkAccessManager for this tab.
This is only implemented for QtWebKit.
For QtWebEngine, always returns None.
"""
raise NotImplementedError
def user_agent(self) -> typing.Optional[str]:
"""Get the user agent for this tab.
This is only implemented for QtWebKit.
For QtWebEngine, always returns None.
"""
raise NotImplementedError
def shutdown(self) -> None:
raise NotImplementedError
class AbstractTab(QWidget):
"""An adapter for QWebView/QWebEngineView representing a single tab."""
#: Signal emitted when a website requests to close this tab.
window_close_requested = pyqtSignal()
#: Signal emitted when a link is hovered (the hover text)
link_hovered = pyqtSignal(str)
#: Signal emitted when a page started loading
load_started = pyqtSignal()
#: Signal emitted when a page is loading (progress percentage)
load_progress = pyqtSignal(int)
#: Signal emitted when a page finished loading (success as bool)
load_finished = pyqtSignal(bool)
#: Signal emitted when a page's favicon changed (icon as QIcon)
icon_changed = pyqtSignal(QIcon)
#: Signal emitted when a page's title changed (new title as str)
title_changed = pyqtSignal(str)
#: Signal emitted when a new tab should be opened (url as QUrl)
new_tab_requested = pyqtSignal(QUrl)
#: Signal emitted when a page's URL changed (url as QUrl)
url_changed = pyqtSignal(QUrl)
#: Signal emitted when a tab's content size changed
#: (new size as QSizeF)
contents_size_changed = pyqtSignal(QSizeF)
#: Signal emitted when a page requested full-screen (bool)
fullscreen_requested = pyqtSignal(bool)
#: Signal emitted before load starts (URL as QUrl)
before_load_started = pyqtSignal(QUrl)
# Signal emitted when a page's load status changed
# (argument: usertypes.LoadStatus)
load_status_changed = pyqtSignal(usertypes.LoadStatus)
# Signal emitted before shutting down
shutting_down = pyqtSignal()
# Signal emitted when a history item should be added
history_item_triggered = pyqtSignal(QUrl, QUrl, str)
# Signal emitted when the underlying renderer process terminated.
# arg 0: A TerminationStatus member.
# arg 1: The exit code.
renderer_process_terminated = pyqtSignal(TerminationStatus, int)
# Hosts for which a certificate error happened. Shared between all tabs.
#
# Note that we remember hosts here, without scheme/port:
# QtWebEngine/Chromium also only remembers hostnames, and certificates are
# for a given hostname anyways.
_insecure_hosts = set() # type: typing.Set[str]
def __init__(self, *, win_id: int, private: bool,
parent: QWidget = None) -> None:
self.is_private = private
self.win_id = win_id
self.tab_id = next(tab_id_gen)
super().__init__(parent)
self.registry = objreg.ObjectRegistry()
tab_registry = objreg.get('tab-registry', scope='window',
window=win_id)
tab_registry[self.tab_id] = self
objreg.register('tab', self, registry=self.registry)
self.data = TabData()
self._layout = miscwidgets.WrapperLayout(self)
self._widget = None # type: typing.Optional[QWidget]
self._progress = 0
self._load_status = usertypes.LoadStatus.none
self._mouse_event_filter = mouse.MouseEventFilter(
self, parent=self)
self.backend = None
# FIXME:qtwebengine Should this be public api via self.hints?
# Also, should we get it out of objreg?
hintmanager = hints.HintManager(win_id, self.tab_id, parent=self)
objreg.register('hintmanager', hintmanager, scope='tab',
window=self.win_id, tab=self.tab_id)
self.before_load_started.connect(self._on_before_load_started)
def _set_widget(self, widget: QWidget) -> None:
# pylint: disable=protected-access
self._widget = widget
self._layout.wrap(self, widget)
self.history._history = widget.history()
self.history.private_api._history = widget.history()
self.scroller._init_widget(widget)
self.caret._widget = widget
self.zoom._widget = widget
self.search._widget = widget
self.printing._widget = widget
self.action._widget = widget
self.elements._widget = widget
self.audio._widget = widget
self.private_api._widget = widget
self.settings._settings = widget.settings()
self._install_event_filter()
self.zoom.apply_default()
def _install_event_filter(self) -> None:
raise NotImplementedError
def _set_load_status(self, val: usertypes.LoadStatus) -> None:
"""Setter for load_status."""
if not isinstance(val, usertypes.LoadStatus):
raise TypeError("Type {} is no LoadStatus member!".format(val))
log.webview.debug("load status for {}: {}".format(repr(self), val))
self._load_status = val
self.load_status_changed.emit(val)
def send_event(self, evt: QEvent) -> None:
"""Send the given event to the underlying widget.
The event will be sent via QApplication.postEvent.
Note that a posted event must not be re-used in any way!
"""
# This only gives us some mild protection against re-using events, but
# it's certainly better than a segfault.
if getattr(evt, 'posted', False):
raise utils.Unreachable("Can't re-use an event which was already "
"posted!")
recipient = self.private_api.event_target()
if recipient is None:
# https://github.com/qutebrowser/qutebrowser/issues/3888
log.webview.warning("Unable to find event target!")
return
evt.posted = True
QApplication.postEvent(recipient, evt)
def navigation_blocked(self) -> bool:
"""Test if navigation is allowed on the current tab."""
return self.data.pinned and config.val.tabs.pinned.frozen
@pyqtSlot(QUrl)
def _on_before_load_started(self, url: QUrl) -> None:
"""Adjust the title if we are going to visit a URL soon."""
qtutils.ensure_valid(url)
url_string = url.toDisplayString()
log.webview.debug("Going to start loading: {}".format(url_string))
self.title_changed.emit(url_string)
@pyqtSlot(QUrl)
def _on_url_changed(self, url: QUrl) -> None:
"""Update title when URL has changed and no title is available."""
if url.isValid() and not self.title():
self.title_changed.emit(url.toDisplayString())
self.url_changed.emit(url)
@pyqtSlot()
def _on_load_started(self) -> None:
self._progress = 0
self.data.viewing_source = False
self._set_load_status(usertypes.LoadStatus.loading)
self.load_started.emit()
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(
self,
navigation: usertypes.NavigationRequest
) -> None:
"""Handle common acceptNavigationRequest code."""
url = utils.elide(navigation.url.toDisplayString(), 100)
log.webview.debug("navigation request: url {}, type {}, is_main_frame "
"{}".format(url,
navigation.navigation_type,
navigation.is_main_frame))
if not navigation.url.isValid():
# Also a WORKAROUND for missing IDNA 2008 support in QUrl, see
# https://bugreports.qt.io/browse/QTBUG-60364
if navigation.navigation_type == navigation.Type.link_clicked:
msg = urlutils.get_errstring(navigation.url,
"Invalid link clicked")
message.error(msg)
self.data.open_target = usertypes.ClickTarget.normal
log.webview.debug("Ignoring invalid URL {} in "
"acceptNavigationRequest: {}".format(
navigation.url.toDisplayString(),
navigation.url.errorString()))
navigation.accepted = False
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
assert self._widget is not None
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3498
return
try:
sess_manager = objreg.get('session-manager')
except KeyError:
# https://github.com/qutebrowser/qutebrowser/issues/4311
return
sess_manager.save_autosave()
self.load_finished.emit(ok)
if not self.title():
self.title_changed.emit(self.url().toDisplayString())
self.zoom.reapply()
def _update_load_status(self, ok: bool) -> None:
"""Update the load status after a page finished loading.
Needs to be called by subclasses to trigger a load status update, e.g.
as a response to a loadFinished signal.
"""
if ok:
if self.url().scheme() == 'https':
if self.url().host() in self._insecure_hosts:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.success_https)
else:
self._set_load_status(usertypes.LoadStatus.success)
elif ok:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.error)
@pyqtSlot()
def _on_history_trigger(self) -> None:
"""Emit history_item_triggered based on backend-specific signal."""
raise NotImplementedError
@pyqtSlot(int)
def _on_load_progress(self, perc: int) -> None:
self._progress = perc
self.load_progress.emit(perc)
def url(self, *, requested: bool = False) -> QUrl:
raise NotImplementedError
def progress(self) -> int:
return self._progress
def load_status(self) -> usertypes.LoadStatus:
return self._load_status
def _load_url_prepare(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
qtutils.ensure_valid(url)
if emit_before_load_started:
self.before_load_started.emit(url)
def load_url(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
raise NotImplementedError
def reload(self, *, force: bool = False) -> None:
raise NotImplementedError
def stop(self) -> None:
raise NotImplementedError
def fake_key_press(self,
key: Qt.Key,
modifier: Qt.KeyboardModifier = Qt.NoModifier) -> None:
"""Send a fake key event to this tab."""
press_evt = QKeyEvent(QEvent.KeyPress, key, modifier, 0, 0, 0)
release_evt = QKeyEvent(QEvent.KeyRelease, key, modifier,
0, 0, 0)
self.send_event(press_evt)
self.send_event(release_evt)
def dump_async(self,
callback: typing.Callable[[str], None], *,
plain: bool = False) -> None:
"""Dump the current page's html asynchronously.
The given callback will be called with the result when dumping is
complete.
"""
raise NotImplementedError
def run_js_async(
self,
code: str,
callback: typing.Callable[[typing.Any], None] = None, *,
world: typing.Union[usertypes.JsWorld, int] = None
) -> None:
"""Run javascript async.
The given callback will be called with the result when running JS is
complete.
Args:
code: The javascript code to run.
callback: The callback to call with the result, or None.
world: A world ID (int or usertypes.JsWorld member) to run the JS
in the main world or in another isolated world.
"""
raise NotImplementedError
def title(self) -> str:
raise NotImplementedError
def icon(self) -> None:
raise NotImplementedError
def set_html(self, html: str, base_url: QUrl = QUrl()) -> None:
raise NotImplementedError
def __repr__(self) -> str:
try:
qurl = self.url()
url = qurl.toDisplayString(QUrl.EncodeUnicode) # type: ignore
except (AttributeError, RuntimeError) as exc:
url = '<{}>'.format(exc.__class__.__name__)
else:
url = utils.elide(url, 100)
return utils.get_repr(self, tab_id=self.tab_id, url=url)
def is_deleted(self) -> bool:
assert self._widget is not None
return sip.isdeleted(self._widget)
| ./CrossVul/dataset_final_sorted/CWE-684/py/good_3915_0 |
crossvul-python_data_bad_3914_1 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Wrapper over a QWebEngineView."""
import math
import functools
import re
import html as html_utils
import typing
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, Qt, QPoint, QPointF, QUrl,
QTimer, QObject)
from PyQt5.QtNetwork import QAuthenticator
from PyQt5.QtWidgets import QApplication, QWidget
from PyQt5.QtWebEngineWidgets import QWebEnginePage, QWebEngineScript
from qutebrowser.config import configdata, config
from qutebrowser.browser import (browsertab, eventfilter, shared, webelem,
history, greasemonkey)
from qutebrowser.browser.webengine import (webview, webengineelem, tabhistory,
interceptor, webenginequtescheme,
cookies, webenginedownloads,
webenginesettings, certificateerror)
from qutebrowser.misc import miscwidgets, objects
from qutebrowser.utils import (usertypes, qtutils, log, javascript, utils,
message, objreg, jinja, debug)
from qutebrowser.qt import sip
_qute_scheme_handler = None
def init():
"""Initialize QtWebEngine-specific modules."""
# For some reason we need to keep a reference, otherwise the scheme handler
# won't work...
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-September/038075.html
global _qute_scheme_handler
app = QApplication.instance()
log.init.debug("Initializing qute://* handler...")
_qute_scheme_handler = webenginequtescheme.QuteSchemeHandler(parent=app)
_qute_scheme_handler.install(webenginesettings.default_profile)
if webenginesettings.private_profile:
_qute_scheme_handler.install(webenginesettings.private_profile)
log.init.debug("Initializing request interceptor...")
req_interceptor = interceptor.RequestInterceptor(parent=app)
req_interceptor.install(webenginesettings.default_profile)
if webenginesettings.private_profile:
req_interceptor.install(webenginesettings.private_profile)
log.init.debug("Initializing QtWebEngine downloads...")
download_manager = webenginedownloads.DownloadManager(parent=app)
download_manager.install(webenginesettings.default_profile)
if webenginesettings.private_profile:
download_manager.install(webenginesettings.private_profile)
objreg.register('webengine-download-manager', download_manager)
log.init.debug("Initializing cookie filter...")
cookies.install_filter(webenginesettings.default_profile)
if webenginesettings.private_profile:
cookies.install_filter(webenginesettings.private_profile)
# Clear visited links on web history clear
for p in [webenginesettings.default_profile,
webenginesettings.private_profile]:
if not p:
continue
history.web_history.history_cleared.connect(p.clearAllVisitedLinks)
history.web_history.url_cleared.connect(
lambda url, profile=p: profile.clearVisitedLinks([url]))
# Mapping worlds from usertypes.JsWorld to QWebEngineScript world IDs.
_JS_WORLD_MAP = {
usertypes.JsWorld.main: QWebEngineScript.MainWorld,
usertypes.JsWorld.application: QWebEngineScript.ApplicationWorld,
usertypes.JsWorld.user: QWebEngineScript.UserWorld,
usertypes.JsWorld.jseval: QWebEngineScript.UserWorld + 1,
}
class WebEngineAction(browsertab.AbstractAction):
"""QtWebEngine implementations related to web actions."""
action_class = QWebEnginePage
action_base = QWebEnginePage.WebAction
def exit_fullscreen(self):
self._widget.triggerPageAction(QWebEnginePage.ExitFullScreen)
def save_page(self):
"""Save the current page."""
self._widget.triggerPageAction(QWebEnginePage.SavePage)
def show_source(self, pygments=False):
if pygments:
self._show_source_pygments()
return
try:
self._widget.triggerPageAction(QWebEnginePage.ViewSource)
except AttributeError:
# Qt < 5.8
tb = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
urlstr = self._tab.url().toString(
QUrl.RemoveUserInfo) # type: ignore
# The original URL becomes the path of a view-source: URL
# (without a host), but query/fragment should stay.
url = QUrl('view-source:' + urlstr)
tb.tabopen(url, background=False, related=True)
class WebEnginePrinting(browsertab.AbstractPrinting):
"""QtWebEngine implementations related to printing."""
def check_pdf_support(self):
pass
def check_printer_support(self):
if not hasattr(self._widget.page(), 'print'):
raise browsertab.WebTabError(
"Printing is unsupported with QtWebEngine on Qt < 5.8")
def check_preview_support(self):
raise browsertab.WebTabError(
"Print previews are unsupported with QtWebEngine")
def to_pdf(self, filename):
self._widget.page().printToPdf(filename)
def to_printer(self, printer, callback=None):
if callback is None:
callback = lambda _ok: None
self._widget.page().print(printer, callback)
class _WebEngineSearchWrapHandler:
"""QtWebEngine implementations related to wrapping when searching.
Attributes:
flag_wrap: An additional flag indicating whether the last search
used wrapping.
_active_match: The 1-based index of the currently active match
on the page.
_total_matches: The total number of search matches on the page.
_nowrap_available: Whether the functionality to prevent wrapping
is available.
"""
def __init__(self):
self._active_match = 0
self._total_matches = 0
self.flag_wrap = True
self._nowrap_available = False
def connect_signal(self, page):
"""Connect to the findTextFinished signal of the page.
Args:
page: The QtWebEnginePage to connect to this handler.
"""
if qtutils.version_check("5.14"):
page.findTextFinished.connect(self._store_match_data)
self._nowrap_available = True
def _store_match_data(self, result):
"""Store information on the last match.
The information will be checked against when wrapping is turned off.
Args:
result: A FindTextResult passed by the findTextFinished signal.
"""
self._active_match = result.activeMatch()
self._total_matches = result.numberOfMatches()
log.webview.debug("Active search match: {}/{}"
.format(self._active_match, self._total_matches))
def reset_match_data(self):
"""Reset match information.
Stale information could lead to next_result or prev_result misbehaving.
"""
self._active_match = 0
self._total_matches = 0
def prevent_wrapping(self, *, going_up):
"""Prevent wrapping if possible and required.
Returns True if a wrap was prevented and False if not.
Args:
going_up: Whether the search would scroll the page up or down.
"""
if (not self._nowrap_available or
self.flag_wrap or self._total_matches == 0):
return False
elif going_up and self._active_match == 1:
message.info("Search hit TOP")
return True
elif not going_up and self._active_match == self._total_matches:
message.info("Search hit BOTTOM")
return True
else:
return False
class WebEngineSearch(browsertab.AbstractSearch):
"""QtWebEngine implementations related to searching on the page.
Attributes:
_flags: The QWebEnginePage.FindFlags of the last search.
_pending_searches: How many searches have been started but not called
back yet.
"""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._flags = QWebEnginePage.FindFlags(0) # type: ignore
self._pending_searches = 0
# The API necessary to stop wrapping was added in this version
self._wrap_handler = _WebEngineSearchWrapHandler()
def connect_signals(self):
self._wrap_handler.connect_signal(self._widget.page())
def _find(self, text, flags, callback, caller):
"""Call findText on the widget."""
self.search_displayed = True
self._pending_searches += 1
def wrapped_callback(found):
"""Wrap the callback to do debug logging."""
self._pending_searches -= 1
if self._pending_searches > 0:
# See https://github.com/qutebrowser/qutebrowser/issues/2442
# and https://github.com/qt/qtwebengine/blob/5.10/src/core/web_contents_adapter.cpp#L924-L934
log.webview.debug("Ignoring cancelled search callback with "
"{} pending searches".format(
self._pending_searches))
return
if sip.isdeleted(self._widget):
# This happens when starting a search, and closing the tab
# before results arrive.
log.webview.debug("Ignoring finished search for deleted "
"widget")
return
found_text = 'found' if found else "didn't find"
if flags:
flag_text = 'with flags {}'.format(debug.qflags_key(
QWebEnginePage, flags, klass=QWebEnginePage.FindFlag))
else:
flag_text = ''
log.webview.debug(' '.join([caller, found_text, text, flag_text])
.strip())
if callback is not None:
callback(found)
self.finished.emit(found)
self._widget.page().findText(text, flags, wrapped_callback)
def search(self, text, *, ignore_case=usertypes.IgnoreCase.never,
reverse=False, wrap=True, result_cb=None):
# Don't go to next entry on duplicate search
if self.text == text and self.search_displayed:
log.webview.debug("Ignoring duplicate search request"
" for {}".format(text))
return
self.text = text
self._flags = QWebEnginePage.FindFlags(0) # type: ignore
self._wrap_handler.reset_match_data()
self._wrap_handler.flag_wrap = wrap
if self._is_case_sensitive(ignore_case):
self._flags |= QWebEnginePage.FindCaseSensitively
if reverse:
self._flags |= QWebEnginePage.FindBackward
self._find(text, self._flags, result_cb, 'search')
def clear(self):
if self.search_displayed:
self.cleared.emit()
self.search_displayed = False
self._wrap_handler.reset_match_data()
self._widget.page().findText('')
def prev_result(self, *, result_cb=None):
# The int() here makes sure we get a copy of the flags.
flags = QWebEnginePage.FindFlags(int(self._flags)) # type: ignore
if flags & QWebEnginePage.FindBackward:
if self._wrap_handler.prevent_wrapping(going_up=False):
return
flags &= ~QWebEnginePage.FindBackward
else:
if self._wrap_handler.prevent_wrapping(going_up=True):
return
flags |= QWebEnginePage.FindBackward
self._find(self.text, flags, result_cb, 'prev_result')
def next_result(self, *, result_cb=None):
going_up = self._flags & QWebEnginePage.FindBackward
if self._wrap_handler.prevent_wrapping(going_up=going_up):
return
self._find(self.text, self._flags, result_cb, 'next_result')
class WebEngineCaret(browsertab.AbstractCaret):
"""QtWebEngine implementations related to moving the cursor/selection."""
def _flags(self):
"""Get flags to pass to JS."""
flags = set()
if qtutils.version_check('5.7.1', compiled=False):
flags.add('filter-prefix')
if utils.is_windows:
flags.add('windows')
return list(flags)
@pyqtSlot(usertypes.KeyMode)
def _on_mode_entered(self, mode):
if mode != usertypes.KeyMode.caret:
return
if self._tab.search.search_displayed:
# We are currently in search mode.
# convert the search to a blue selection so we can operate on it
# https://bugreports.qt.io/browse/QTBUG-60673
self._tab.search.clear()
self._tab.run_js_async(
javascript.assemble('caret', 'setFlags', self._flags()))
self._js_call('setInitialCursor', callback=self._selection_cb)
def _selection_cb(self, enabled):
"""Emit selection_toggled based on setInitialCursor."""
if self._mode_manager.mode != usertypes.KeyMode.caret:
log.webview.debug("Ignoring selection cb due to mode change.")
return
if enabled is None:
log.webview.debug("Ignoring selection status None")
return
self.selection_toggled.emit(enabled)
@pyqtSlot(usertypes.KeyMode)
def _on_mode_left(self, mode):
if mode != usertypes.KeyMode.caret:
return
self.drop_selection()
self._js_call('disableCaret')
def move_to_next_line(self, count=1):
self._js_call('moveDown', count)
def move_to_prev_line(self, count=1):
self._js_call('moveUp', count)
def move_to_next_char(self, count=1):
self._js_call('moveRight', count)
def move_to_prev_char(self, count=1):
self._js_call('moveLeft', count)
def move_to_end_of_word(self, count=1):
self._js_call('moveToEndOfWord', count)
def move_to_next_word(self, count=1):
self._js_call('moveToNextWord', count)
def move_to_prev_word(self, count=1):
self._js_call('moveToPreviousWord', count)
def move_to_start_of_line(self):
self._js_call('moveToStartOfLine')
def move_to_end_of_line(self):
self._js_call('moveToEndOfLine')
def move_to_start_of_next_block(self, count=1):
self._js_call('moveToStartOfNextBlock', count)
def move_to_start_of_prev_block(self, count=1):
self._js_call('moveToStartOfPrevBlock', count)
def move_to_end_of_next_block(self, count=1):
self._js_call('moveToEndOfNextBlock', count)
def move_to_end_of_prev_block(self, count=1):
self._js_call('moveToEndOfPrevBlock', count)
def move_to_start_of_document(self):
self._js_call('moveToStartOfDocument')
def move_to_end_of_document(self):
self._js_call('moveToEndOfDocument')
def toggle_selection(self):
self._js_call('toggleSelection', callback=self.selection_toggled.emit)
def drop_selection(self):
self._js_call('dropSelection')
def selection(self, callback):
# Not using selectedText() as WORKAROUND for
# https://bugreports.qt.io/browse/QTBUG-53134
# Even on Qt 5.10 selectedText() seems to work poorly, see
# https://github.com/qutebrowser/qutebrowser/issues/3523
self._tab.run_js_async(javascript.assemble('caret', 'getSelection'),
callback)
def reverse_selection(self):
self._js_call('reverseSelection')
def _follow_selected_cb_wrapped(self, js_elem, tab):
try:
self._follow_selected_cb(js_elem, tab)
finally:
self.follow_selected_done.emit()
def _follow_selected_cb(self, js_elem, tab):
"""Callback for javascript which clicks the selected element.
Args:
js_elem: The element serialized from javascript.
tab: Open in a new tab.
"""
if js_elem is None:
return
if js_elem == "focused":
# we had a focused element, not a selected one. Just send <enter>
self._follow_enter(tab)
return
assert isinstance(js_elem, dict), js_elem
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
if tab:
click_type = usertypes.ClickTarget.tab
else:
click_type = usertypes.ClickTarget.normal
# Only click if we see a link
if elem.is_link():
log.webview.debug("Found link in selection, clicking. ClickTarget "
"{}, elem {}".format(click_type, elem))
try:
elem.click(click_type)
except webelem.Error as e:
message.error(str(e))
def follow_selected(self, *, tab=False):
if self._tab.search.search_displayed:
# We are currently in search mode.
# let's click the link via a fake-click
# https://bugreports.qt.io/browse/QTBUG-60673
self._tab.search.clear()
log.webview.debug("Clicking a searched link via fake key press.")
# send a fake enter, clicking the orange selection box
self._follow_enter(tab)
else:
# click an existing blue selection
js_code = javascript.assemble('webelem',
'find_selected_focused_link')
self._tab.run_js_async(
js_code,
lambda jsret: self._follow_selected_cb_wrapped(jsret, tab))
def _js_call(self, command, *args, callback=None):
code = javascript.assemble('caret', command, *args)
self._tab.run_js_async(code, callback)
class WebEngineScroller(browsertab.AbstractScroller):
"""QtWebEngine implementations related to scrolling."""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._pos_perc = (0, 0)
self._pos_px = QPoint()
self._at_bottom = False
def _init_widget(self, widget):
super()._init_widget(widget)
page = widget.page()
page.scrollPositionChanged.connect(self._update_pos)
def _repeated_key_press(self, key, count=1, modifier=Qt.NoModifier):
"""Send count fake key presses to this scroller's WebEngineTab."""
for _ in range(min(count, 1000)):
self._tab.fake_key_press(key, modifier)
@pyqtSlot(QPointF)
def _update_pos(self, pos):
"""Update the scroll position attributes when it changed."""
self._pos_px = pos.toPoint()
contents_size = self._widget.page().contentsSize()
scrollable_x = contents_size.width() - self._widget.width()
if scrollable_x == 0:
perc_x = 0
else:
try:
perc_x = min(100, round(100 / scrollable_x * pos.x()))
except ValueError:
# https://github.com/qutebrowser/qutebrowser/issues/3219
log.misc.debug("Got ValueError for perc_x!")
log.misc.debug("contents_size.width(): {}".format(
contents_size.width()))
log.misc.debug("self._widget.width(): {}".format(
self._widget.width()))
log.misc.debug("scrollable_x: {}".format(scrollable_x))
log.misc.debug("pos.x(): {}".format(pos.x()))
raise
scrollable_y = contents_size.height() - self._widget.height()
if scrollable_y == 0:
perc_y = 0
else:
try:
perc_y = min(100, round(100 / scrollable_y * pos.y()))
except ValueError:
# https://github.com/qutebrowser/qutebrowser/issues/3219
log.misc.debug("Got ValueError for perc_y!")
log.misc.debug("contents_size.height(): {}".format(
contents_size.height()))
log.misc.debug("self._widget.height(): {}".format(
self._widget.height()))
log.misc.debug("scrollable_y: {}".format(scrollable_y))
log.misc.debug("pos.y(): {}".format(pos.y()))
raise
self._at_bottom = math.ceil(pos.y()) >= scrollable_y
if (self._pos_perc != (perc_x, perc_y) or
'no-scroll-filtering' in objects.debug_flags):
self._pos_perc = perc_x, perc_y
self.perc_changed.emit(*self._pos_perc)
def pos_px(self):
return self._pos_px
def pos_perc(self):
return self._pos_perc
def to_perc(self, x=None, y=None):
js_code = javascript.assemble('scroll', 'to_perc', x, y)
self._tab.run_js_async(js_code)
def to_point(self, point):
js_code = javascript.assemble('window', 'scroll', point.x(), point.y())
self._tab.run_js_async(js_code)
def to_anchor(self, name):
url = self._tab.url()
url.setFragment(name)
self._tab.load_url(url)
def delta(self, x=0, y=0):
self._tab.run_js_async(javascript.assemble('window', 'scrollBy', x, y))
def delta_page(self, x=0, y=0):
js_code = javascript.assemble('scroll', 'delta_page', x, y)
self._tab.run_js_async(js_code)
def up(self, count=1):
self._repeated_key_press(Qt.Key_Up, count)
def down(self, count=1):
self._repeated_key_press(Qt.Key_Down, count)
def left(self, count=1):
self._repeated_key_press(Qt.Key_Left, count)
def right(self, count=1):
self._repeated_key_press(Qt.Key_Right, count)
def top(self):
self._tab.fake_key_press(Qt.Key_Home)
def bottom(self):
self._tab.fake_key_press(Qt.Key_End)
def page_up(self, count=1):
self._repeated_key_press(Qt.Key_PageUp, count)
def page_down(self, count=1):
self._repeated_key_press(Qt.Key_PageDown, count)
def at_top(self):
return self.pos_px().y() == 0
def at_bottom(self):
return self._at_bottom
class WebEngineHistoryPrivate(browsertab.AbstractHistoryPrivate):
"""History-related methods which are not part of the extension API."""
def serialize(self):
if not qtutils.version_check('5.9', compiled=False):
# WORKAROUND for
# https://github.com/qutebrowser/qutebrowser/issues/2289
# Don't use the history's currentItem here, because of
# https://bugreports.qt.io/browse/QTBUG-59599 and because it doesn't
# contain view-source.
scheme = self._tab.url().scheme()
if scheme in ['view-source', 'chrome']:
raise browsertab.WebTabError("Can't serialize special URL!")
return qtutils.serialize(self._history)
def deserialize(self, data):
qtutils.deserialize(data, self._history)
def load_items(self, items):
if qtutils.version_check('5.15', compiled=False):
# WORKAROUND for https://github.com/qutebrowser/qutebrowser/issues/5359
if items:
self._tab.load_url(items[-1].url)
return
if items:
self._tab.before_load_started.emit(items[-1].url)
stream, _data, cur_data = tabhistory.serialize(items)
qtutils.deserialize_stream(stream, self._history)
@pyqtSlot()
def _on_load_finished():
self._tab.scroller.to_point(cur_data['scroll-pos'])
self._tab.load_finished.disconnect(_on_load_finished)
if cur_data is not None:
if 'zoom' in cur_data:
self._tab.zoom.set_factor(cur_data['zoom'])
if ('scroll-pos' in cur_data and
self._tab.scroller.pos_px() == QPoint(0, 0)):
self._tab.load_finished.connect(_on_load_finished)
class WebEngineHistory(browsertab.AbstractHistory):
"""QtWebEngine implementations related to page history."""
def __init__(self, tab):
super().__init__(tab)
self.private_api = WebEngineHistoryPrivate(tab)
def __len__(self):
return len(self._history)
def __iter__(self):
return iter(self._history.items())
def current_idx(self):
return self._history.currentItemIndex()
def can_go_back(self):
return self._history.canGoBack()
def can_go_forward(self):
return self._history.canGoForward()
def _item_at(self, i):
return self._history.itemAt(i)
def _go_to_item(self, item):
self._tab.before_load_started.emit(item.url())
self._history.goToItem(item)
class WebEngineZoom(browsertab.AbstractZoom):
"""QtWebEngine implementations related to zooming."""
def _set_factor_internal(self, factor):
self._widget.setZoomFactor(factor)
class WebEngineElements(browsertab.AbstractElements):
"""QtWebEngine implemementations related to elements on the page."""
def _js_cb_multiple(self, callback, error_cb, js_elems):
"""Handle found elements coming from JS and call the real callback.
Args:
callback: The callback to call with the found elements.
error_cb: The callback to call in case of an error.
js_elems: The elements serialized from javascript.
"""
if js_elems is None:
error_cb(webelem.Error("Unknown error while getting "
"elements"))
return
elif not js_elems['success']:
error_cb(webelem.Error(js_elems['error']))
return
elems = []
for js_elem in js_elems['result']:
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
elems.append(elem)
callback(elems)
def _js_cb_single(self, callback, js_elem):
"""Handle a found focus elem coming from JS and call the real callback.
Args:
callback: The callback to call with the found element.
Called with a WebEngineElement or None.
js_elem: The element serialized from javascript.
"""
debug_str = ('None' if js_elem is None
else utils.elide(repr(js_elem), 1000))
log.webview.debug("Got element from JS: {}".format(debug_str))
if js_elem is None:
callback(None)
else:
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
callback(elem)
def find_css(self, selector, callback, error_cb, *,
only_visible=False):
js_code = javascript.assemble('webelem', 'find_css', selector,
only_visible)
js_cb = functools.partial(self._js_cb_multiple, callback, error_cb)
self._tab.run_js_async(js_code, js_cb)
def find_id(self, elem_id, callback):
js_code = javascript.assemble('webelem', 'find_id', elem_id)
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
def find_focused(self, callback):
js_code = javascript.assemble('webelem', 'find_focused')
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
def find_at_pos(self, pos, callback):
assert pos.x() >= 0, pos
assert pos.y() >= 0, pos
pos /= self._tab.zoom.factor()
js_code = javascript.assemble('webelem', 'find_at_pos',
pos.x(), pos.y())
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
class WebEngineAudio(browsertab.AbstractAudio):
"""QtWebEngine implemementations related to audio/muting.
Attributes:
_overridden: Whether the user toggled muting manually.
If that's the case, we leave it alone.
"""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._overridden = False
def _connect_signals(self):
page = self._widget.page()
page.audioMutedChanged.connect(self.muted_changed)
page.recentlyAudibleChanged.connect(self.recently_audible_changed)
self._tab.url_changed.connect(self._on_url_changed)
config.instance.changed.connect(self._on_config_changed)
def set_muted(self, muted: bool, override: bool = False) -> None:
self._overridden = override
assert self._widget is not None
page = self._widget.page()
page.setAudioMuted(muted)
def is_muted(self):
page = self._widget.page()
return page.isAudioMuted()
def is_recently_audible(self):
page = self._widget.page()
return page.recentlyAudible()
@pyqtSlot(QUrl)
def _on_url_changed(self, url):
if self._overridden:
return
mute = config.instance.get('content.mute', url=url)
self.set_muted(mute)
@config.change_filter('content.mute')
def _on_config_changed(self):
self._on_url_changed(self._tab.url())
class _WebEnginePermissions(QObject):
"""Handling of various permission-related signals."""
# Using 0 as WORKAROUND for:
# https://www.riverbankcomputing.com/pipermail/pyqt/2019-July/041903.html
_options = {
0: 'content.notifications',
QWebEnginePage.Geolocation: 'content.geolocation',
QWebEnginePage.MediaAudioCapture: 'content.media_capture',
QWebEnginePage.MediaVideoCapture: 'content.media_capture',
QWebEnginePage.MediaAudioVideoCapture: 'content.media_capture',
}
_messages = {
0: 'show notifications',
QWebEnginePage.Geolocation: 'access your location',
QWebEnginePage.MediaAudioCapture: 'record audio',
QWebEnginePage.MediaVideoCapture: 'record video',
QWebEnginePage.MediaAudioVideoCapture: 'record audio/video',
}
def __init__(self, tab, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
try:
self._options.update({
QWebEnginePage.MouseLock:
'content.mouse_lock',
})
self._messages.update({
QWebEnginePage.MouseLock:
'hide your mouse pointer',
})
except AttributeError:
# Added in Qt 5.8
pass
try:
self._options.update({
QWebEnginePage.DesktopVideoCapture:
'content.desktop_capture',
QWebEnginePage.DesktopAudioVideoCapture:
'content.desktop_capture',
})
self._messages.update({
QWebEnginePage.DesktopVideoCapture:
'capture your desktop',
QWebEnginePage.DesktopAudioVideoCapture:
'capture your desktop and audio',
})
except AttributeError:
# Added in Qt 5.10
pass
assert self._options.keys() == self._messages.keys()
def connect_signals(self):
"""Connect related signals from the QWebEnginePage."""
page = self._widget.page()
page.fullScreenRequested.connect(
self._on_fullscreen_requested)
page.featurePermissionRequested.connect(
self._on_feature_permission_requested)
if qtutils.version_check('5.11'):
page.quotaRequested.connect(
self._on_quota_requested)
page.registerProtocolHandlerRequested.connect(
self._on_register_protocol_handler_requested)
@pyqtSlot('QWebEngineFullScreenRequest')
def _on_fullscreen_requested(self, request):
request.accept()
on = request.toggleOn()
self._tab.data.fullscreen = on
self._tab.fullscreen_requested.emit(on)
if on:
timeout = config.val.content.fullscreen.overlay_timeout
if timeout != 0:
notification = miscwidgets.FullscreenNotification(self._widget)
notification.set_timeout(timeout)
notification.show()
@pyqtSlot(QUrl, 'QWebEnginePage::Feature')
def _on_feature_permission_requested(self, url, feature):
"""Ask the user for approval for geolocation/media/etc.."""
page = self._widget.page()
grant_permission = functools.partial(
page.setFeaturePermission, url, feature,
QWebEnginePage.PermissionGrantedByUser)
deny_permission = functools.partial(
page.setFeaturePermission, url, feature,
QWebEnginePage.PermissionDeniedByUser)
if feature not in self._options:
log.webview.error("Unhandled feature permission {}".format(
debug.qenum_key(QWebEnginePage, feature)))
deny_permission()
return
if (
hasattr(QWebEnginePage, 'DesktopVideoCapture') and
feature in [QWebEnginePage.DesktopVideoCapture,
QWebEnginePage.DesktopAudioVideoCapture] and
qtutils.version_check('5.13', compiled=False) and
not qtutils.version_check('5.13.2', compiled=False)
):
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-78016
log.webview.warning("Ignoring desktop sharing request due to "
"crashes in Qt < 5.13.2")
deny_permission()
return
question = shared.feature_permission(
url=url.adjusted(QUrl.RemovePath),
option=self._options[feature], msg=self._messages[feature],
yes_action=grant_permission, no_action=deny_permission,
abort_on=[self._tab.abort_questions])
if question is not None:
page.featurePermissionRequestCanceled.connect(
functools.partial(self._on_feature_permission_cancelled,
question, url, feature))
def _on_feature_permission_cancelled(self, question, url, feature,
cancelled_url, cancelled_feature):
"""Slot invoked when a feature permission request was cancelled.
To be used with functools.partial.
"""
if url == cancelled_url and feature == cancelled_feature:
try:
question.abort()
except RuntimeError:
# The question could already be deleted, e.g. because it was
# aborted after a loadStarted signal.
pass
def _on_quota_requested(self, request):
size = utils.format_size(request.requestedSize())
shared.feature_permission(
url=request.origin().adjusted(QUrl.RemovePath),
option='content.persistent_storage',
msg='use {} of persistent storage'.format(size),
yes_action=request.accept, no_action=request.reject,
abort_on=[self._tab.abort_questions],
blocking=True)
def _on_register_protocol_handler_requested(self, request):
shared.feature_permission(
url=request.origin().adjusted(QUrl.RemovePath),
option='content.register_protocol_handler',
msg='open all {} links'.format(request.scheme()),
yes_action=request.accept, no_action=request.reject,
abort_on=[self._tab.abort_questions],
blocking=True)
class _WebEngineScripts(QObject):
def __init__(self, tab, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
self._greasemonkey = greasemonkey.gm_manager
def connect_signals(self):
"""Connect signals to our private slots."""
config.instance.changed.connect(self._on_config_changed)
self._tab.search.cleared.connect(functools.partial(
self._update_stylesheet, searching=False))
self._tab.search.finished.connect(self._update_stylesheet)
@pyqtSlot(str)
def _on_config_changed(self, option):
if option in ['scrolling.bar', 'content.user_stylesheets']:
self._init_stylesheet()
self._update_stylesheet()
@pyqtSlot(bool)
def _update_stylesheet(self, searching=False):
"""Update the custom stylesheet in existing tabs."""
css = shared.get_user_stylesheet(searching=searching)
code = javascript.assemble('stylesheet', 'set_css', css)
self._tab.run_js_async(code)
def _inject_early_js(self, name, js_code, *,
world=QWebEngineScript.ApplicationWorld,
subframes=False):
"""Inject the given script to run early on a page load.
This runs the script both on DocumentCreation and DocumentReady as on
some internal pages, DocumentCreation will not work.
That is a WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66011
"""
scripts = self._widget.page().scripts()
for injection in ['creation', 'ready']:
injection_points = {
'creation': QWebEngineScript.DocumentCreation,
'ready': QWebEngineScript.DocumentReady,
}
script = QWebEngineScript()
script.setInjectionPoint(injection_points[injection])
script.setSourceCode(js_code)
script.setWorldId(world)
script.setRunsOnSubFrames(subframes)
script.setName('_qute_{}_{}'.format(name, injection))
scripts.insert(script)
def _remove_early_js(self, name):
"""Remove an early QWebEngineScript."""
scripts = self._widget.page().scripts()
for injection in ['creation', 'ready']:
full_name = '_qute_{}_{}'.format(name, injection)
script = scripts.findScript(full_name)
if not script.isNull():
scripts.remove(script)
def init(self):
"""Initialize global qutebrowser JavaScript."""
js_code = javascript.wrap_global(
'scripts',
utils.read_file('javascript/scroll.js'),
utils.read_file('javascript/webelem.js'),
utils.read_file('javascript/caret.js'),
)
if not qtutils.version_check('5.12'):
# WORKAROUND for Qt versions < 5.12 not exposing window.print().
# Qt 5.12 has a printRequested() signal so we don't need this hack
# anymore.
self._inject_early_js('js',
utils.read_file('javascript/print.js'),
subframes=True,
world=QWebEngineScript.MainWorld)
# FIXME:qtwebengine what about subframes=True?
self._inject_early_js('js', js_code, subframes=True)
self._init_stylesheet()
# The Greasemonkey metadata block support in QtWebEngine only starts at
# Qt 5.8. With 5.7.1, we need to inject the scripts ourselves in
# response to urlChanged.
if not qtutils.version_check('5.8'):
self._tab.url_changed.connect(
self._inject_greasemonkey_scripts_for_url)
else:
self._greasemonkey.scripts_reloaded.connect(
self._inject_all_greasemonkey_scripts)
self._inject_all_greasemonkey_scripts()
self._inject_site_specific_quirks()
def _init_stylesheet(self):
"""Initialize custom stylesheets.
Partially inspired by QupZilla:
https://github.com/QupZilla/qupzilla/blob/v2.0/src/lib/app/mainapplication.cpp#L1063-L1101
"""
self._remove_early_js('stylesheet')
css = shared.get_user_stylesheet()
js_code = javascript.wrap_global(
'stylesheet',
utils.read_file('javascript/stylesheet.js'),
javascript.assemble('stylesheet', 'set_css', css),
)
self._inject_early_js('stylesheet', js_code, subframes=True)
@pyqtSlot(QUrl)
def _inject_greasemonkey_scripts_for_url(self, url):
matching_scripts = self._greasemonkey.scripts_for(url)
self._inject_greasemonkey_scripts(
matching_scripts.start, QWebEngineScript.DocumentCreation, True)
self._inject_greasemonkey_scripts(
matching_scripts.end, QWebEngineScript.DocumentReady, False)
self._inject_greasemonkey_scripts(
matching_scripts.idle, QWebEngineScript.Deferred, False)
@pyqtSlot()
def _inject_all_greasemonkey_scripts(self):
scripts = self._greasemonkey.all_scripts()
self._inject_greasemonkey_scripts(scripts)
def _remove_all_greasemonkey_scripts(self):
page_scripts = self._widget.page().scripts()
for script in page_scripts.toList():
if script.name().startswith("GM-"):
log.greasemonkey.debug('Removing script: {}'
.format(script.name()))
removed = page_scripts.remove(script)
assert removed, script.name()
def _inject_greasemonkey_scripts(self, scripts=None, injection_point=None,
remove_first=True):
"""Register user JavaScript files with the current tab.
Args:
scripts: A list of GreasemonkeyScripts, or None to add all
known by the Greasemonkey subsystem.
injection_point: The QWebEngineScript::InjectionPoint stage
to inject the script into, None to use
auto-detection.
remove_first: Whether to remove all previously injected
scripts before adding these ones.
"""
if sip.isdeleted(self._widget):
return
# Since we are inserting scripts into a per-tab collection,
# rather than just injecting scripts on page load, we need to
# make sure we replace existing scripts, not just add new ones.
# While, taking care not to remove any other scripts that might
# have been added elsewhere, like the one for stylesheets.
page_scripts = self._widget.page().scripts()
if remove_first:
self._remove_all_greasemonkey_scripts()
if not scripts:
return
for script in scripts:
new_script = QWebEngineScript()
try:
world = int(script.jsworld)
if not 0 <= world <= qtutils.MAX_WORLD_ID:
log.greasemonkey.error(
"script {} has invalid value for '@qute-js-world'"
": {}, should be between 0 and {}"
.format(
script.name,
script.jsworld,
qtutils.MAX_WORLD_ID))
continue
except ValueError:
try:
world = _JS_WORLD_MAP[usertypes.JsWorld[
script.jsworld.lower()]]
except KeyError:
log.greasemonkey.error(
"script {} has invalid value for '@qute-js-world'"
": {}".format(script.name, script.jsworld))
continue
new_script.setWorldId(world)
new_script.setSourceCode(script.code())
new_script.setName("GM-{}".format(script.name))
new_script.setRunsOnSubFrames(script.runs_on_sub_frames)
# Override the @run-at value parsed by QWebEngineScript if desired.
if injection_point:
new_script.setInjectionPoint(injection_point)
elif script.needs_document_end_workaround():
log.greasemonkey.debug("Forcing @run-at document-end for {}"
.format(script.name))
new_script.setInjectionPoint(QWebEngineScript.DocumentReady)
log.greasemonkey.debug('adding script: {}'
.format(new_script.name()))
page_scripts.insert(new_script)
def _inject_site_specific_quirks(self):
"""Add site-specific quirk scripts.
NOTE: This isn't implemented for Qt 5.7 because of different UserScript
semantics there. We only have a quirk for WhatsApp Web right now. It
looks like that quirk isn't needed for Qt < 5.13.
"""
if not config.val.content.site_specific_quirks:
return
page_scripts = self._widget.page().scripts()
for filename in ['whatsapp_web_quirk']:
script = QWebEngineScript()
script.setName(filename)
script.setWorldId(QWebEngineScript.ApplicationWorld)
script.setInjectionPoint(QWebEngineScript.DocumentReady)
src = utils.read_file("javascript/{}.user.js".format(filename))
script.setSourceCode(src)
page_scripts.insert(script)
class WebEngineTabPrivate(browsertab.AbstractTabPrivate):
"""QtWebEngine-related methods which aren't part of the public API."""
def networkaccessmanager(self):
return None
def user_agent(self):
return None
def clear_ssl_errors(self):
raise browsertab.UnsupportedOperationError
def event_target(self):
return self._widget.render_widget()
def shutdown(self):
self._tab.shutting_down.emit()
self._tab.action.exit_fullscreen()
self._widget.shutdown()
class WebEngineTab(browsertab.AbstractTab):
"""A QtWebEngine tab in the browser.
Signals:
abort_questions: Emitted when a new load started or we're shutting
down.
"""
abort_questions = pyqtSignal()
def __init__(self, *, win_id, mode_manager, private, parent=None):
super().__init__(win_id=win_id, private=private, parent=parent)
widget = webview.WebEngineView(tabdata=self.data, win_id=win_id,
private=private)
self.history = WebEngineHistory(tab=self)
self.scroller = WebEngineScroller(tab=self, parent=self)
self.caret = WebEngineCaret(mode_manager=mode_manager,
tab=self, parent=self)
self.zoom = WebEngineZoom(tab=self, parent=self)
self.search = WebEngineSearch(tab=self, parent=self)
self.printing = WebEnginePrinting(tab=self)
self.elements = WebEngineElements(tab=self)
self.action = WebEngineAction(tab=self)
self.audio = WebEngineAudio(tab=self, parent=self)
self.private_api = WebEngineTabPrivate(mode_manager=mode_manager,
tab=self)
self._permissions = _WebEnginePermissions(tab=self, parent=self)
self._scripts = _WebEngineScripts(tab=self, parent=self)
# We're assigning settings in _set_widget
self.settings = webenginesettings.WebEngineSettings(settings=None)
self._set_widget(widget)
self._connect_signals()
self.backend = usertypes.Backend.QtWebEngine
self._child_event_filter = None
self._saved_zoom = None
self._reload_url = None # type: typing.Optional[QUrl]
self._scripts.init()
def _set_widget(self, widget):
# pylint: disable=protected-access
super()._set_widget(widget)
self._permissions._widget = widget
self._scripts._widget = widget
def _install_event_filter(self):
fp = self._widget.focusProxy()
if fp is not None:
fp.installEventFilter(self._tab_event_filter)
self._child_event_filter = eventfilter.ChildEventFilter(
eventfilter=self._tab_event_filter, widget=self._widget,
win_id=self.win_id, parent=self)
self._widget.installEventFilter(self._child_event_filter)
@pyqtSlot()
def _restore_zoom(self):
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3498
return
if self._saved_zoom is None:
return
self.zoom.set_factor(self._saved_zoom)
self._saved_zoom = None
def load_url(self, url, *, emit_before_load_started=True):
"""Load the given URL in this tab.
Arguments:
url: The QUrl to load.
emit_before_load_started: If set to False, before_load_started is
not emitted.
"""
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3896
return
self._saved_zoom = self.zoom.factor()
self._load_url_prepare(
url, emit_before_load_started=emit_before_load_started)
self._widget.load(url)
def url(self, *, requested=False):
page = self._widget.page()
if requested:
return page.requestedUrl()
else:
return page.url()
def dump_async(self, callback, *, plain=False):
if plain:
self._widget.page().toPlainText(callback)
else:
self._widget.page().toHtml(callback)
def run_js_async(self, code, callback=None, *, world=None):
world_id_type = typing.Union[QWebEngineScript.ScriptWorldId, int]
if world is None:
world_id = QWebEngineScript.ApplicationWorld # type: world_id_type
elif isinstance(world, int):
world_id = world
if not 0 <= world_id <= qtutils.MAX_WORLD_ID:
raise browsertab.WebTabError(
"World ID should be between 0 and {}"
.format(qtutils.MAX_WORLD_ID))
else:
world_id = _JS_WORLD_MAP[world]
if callback is None:
self._widget.page().runJavaScript(code, world_id)
else:
self._widget.page().runJavaScript(code, world_id, callback)
def reload(self, *, force=False):
if force:
action = QWebEnginePage.ReloadAndBypassCache
else:
action = QWebEnginePage.Reload
self._widget.triggerPageAction(action)
def stop(self):
self._widget.stop()
def title(self):
return self._widget.title()
def icon(self):
return self._widget.icon()
def set_html(self, html, base_url=QUrl()):
# FIXME:qtwebengine
# check this and raise an exception if too big:
# Warning: The content will be percent encoded before being sent to the
# renderer via IPC. This may increase its size. The maximum size of the
# percent encoded content is 2 megabytes minus 30 bytes.
self._widget.setHtml(html, base_url)
def _show_error_page(self, url, error):
"""Show an error page in the tab."""
log.misc.debug("Showing error page for {}".format(error))
url_string = url.toDisplayString()
error_page = jinja.render(
'error.html',
title="Error loading page: {}".format(url_string),
url=url_string, error=error)
self.set_html(error_page)
@pyqtSlot()
def _on_history_trigger(self):
try:
self._widget.page()
except RuntimeError:
# Looks like this slot can be triggered on destroyed tabs:
# https://crashes.qutebrowser.org/view/3abffbed (Qt 5.9.1)
# wrapped C/C++ object of type WebEngineView has been deleted
log.misc.debug("Ignoring history trigger for destroyed tab")
return
url = self.url()
requested_url = self.url(requested=True)
# Don't save the title if it's generated from the URL
title = self.title()
title_url = QUrl(url)
title_url.setScheme('')
title_url_str = title_url.toDisplayString(
QUrl.RemoveScheme) # type: ignore
if title == title_url_str.strip('/'):
title = ""
# Don't add history entry if the URL is invalid anyways
if not url.isValid():
log.misc.debug("Ignoring invalid URL being added to history")
return
self.history_item_triggered.emit(url, requested_url, title)
@pyqtSlot(QUrl, 'QAuthenticator*', 'QString')
def _on_proxy_authentication_required(self, url, authenticator,
proxy_host):
"""Called when a proxy needs authentication."""
msg = "<b>{}</b> requires a username and password.".format(
html_utils.escape(proxy_host))
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
answer = message.ask(
title="Proxy authentication required", text=msg,
mode=usertypes.PromptMode.user_pwd,
abort_on=[self.abort_questions], url=urlstr)
if answer is not None:
authenticator.setUser(answer.user)
authenticator.setPassword(answer.password)
else:
try:
sip.assign(authenticator, QAuthenticator()) # type: ignore
except AttributeError:
self._show_error_page(url, "Proxy authentication required")
@pyqtSlot(QUrl, 'QAuthenticator*')
def _on_authentication_required(self, url, authenticator):
log.network.debug("Authentication requested for {}, netrc_used {}"
.format(url.toDisplayString(), self.data.netrc_used))
netrc_success = False
if not self.data.netrc_used:
self.data.netrc_used = True
netrc_success = shared.netrc_authentication(url, authenticator)
if not netrc_success:
log.network.debug("Asking for credentials")
answer = shared.authentication_required(
url, authenticator, abort_on=[self.abort_questions])
if not netrc_success and answer is None:
log.network.debug("Aborting auth")
try:
sip.assign(authenticator, QAuthenticator()) # type: ignore
except AttributeError:
# WORKAROUND for
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-December/038400.html
self._show_error_page(url, "Authentication required")
@pyqtSlot()
def _on_load_started(self):
"""Clear search when a new load is started if needed."""
# WORKAROUND for
# https://bugreports.qt.io/browse/QTBUG-61506
# (seems to be back in later Qt versions as well)
self.search.clear()
super()._on_load_started()
self.data.netrc_used = False
@pyqtSlot(QWebEnginePage.RenderProcessTerminationStatus, int)
def _on_render_process_terminated(self, status, exitcode):
"""Show an error when the renderer process terminated."""
if (status == QWebEnginePage.AbnormalTerminationStatus and
exitcode == 256):
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-58697
status = QWebEnginePage.CrashedTerminationStatus
status_map = {
QWebEnginePage.NormalTerminationStatus:
browsertab.TerminationStatus.normal,
QWebEnginePage.AbnormalTerminationStatus:
browsertab.TerminationStatus.abnormal,
QWebEnginePage.CrashedTerminationStatus:
browsertab.TerminationStatus.crashed,
QWebEnginePage.KilledTerminationStatus:
browsertab.TerminationStatus.killed,
-1:
browsertab.TerminationStatus.unknown,
}
self.renderer_process_terminated.emit(status_map[status], exitcode)
def _error_page_workaround(self, js_enabled, html):
"""Check if we're displaying a Chromium error page.
This gets called if we got a loadFinished(False), so we can display at
least some error page in situations where Chromium's can't be
displayed.
WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66643
WORKAROUND for https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=882805
Needs to check the page content as a WORKAROUND for
https://bugreports.qt.io/browse/QTBUG-66661
"""
match = re.search(r'"errorCode":"([^"]*)"', html)
if match is None:
return
error = match.group(1)
log.webview.error("Load error: {}".format(error))
missing_jst = 'jstProcess(' in html and 'jstProcess=' not in html
if js_enabled and not missing_jst:
return
self._show_error_page(self.url(), error=error)
@pyqtSlot(int)
def _on_load_progress(self, perc: int) -> None:
"""QtWebEngine-specific loadProgress workarounds.
WORKAROUND for https://bugreports.qt.io/browse/QTBUG-65223
"""
super()._on_load_progress(perc)
if (perc == 100 and
qtutils.version_check('5.10', compiled=False) and
self.load_status() != usertypes.LoadStatus.error):
self._update_load_status(ok=True)
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
"""QtWebEngine-specific loadFinished workarounds."""
super()._on_load_finished(ok)
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-65223
if qtutils.version_check('5.10', compiled=False):
if not ok:
self._update_load_status(ok)
else:
self._update_load_status(ok)
if not ok:
self.dump_async(functools.partial(
self._error_page_workaround,
self.settings.test_attribute('content.javascript.enabled')))
if ok and self._reload_url is not None:
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
log.config.debug(
"Loading {} again because of config change".format(
self._reload_url.toDisplayString()))
QTimer.singleShot(100, functools.partial(
self.load_url, self._reload_url,
emit_before_load_started=False))
self._reload_url = None
@pyqtSlot(certificateerror.CertificateErrorWrapper)
def _on_ssl_errors(self, error):
self._has_ssl_errors = True
url = error.url()
log.webview.debug("Certificate error: {}".format(error))
if error.is_overridable():
error.ignore = shared.ignore_certificate_errors(
url, [error], abort_on=[self.abort_questions])
else:
log.webview.error("Non-overridable certificate error: "
"{}".format(error))
log.webview.debug("ignore {}, URL {}, requested {}".format(
error.ignore, url, self.url(requested=True)))
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-56207
show_cert_error = (
not qtutils.version_check('5.9') and
not error.ignore
)
# WORKAROUND for https://codereview.qt-project.org/c/qt/qtwebengine/+/270556
show_non_overr_cert_error = (
not error.is_overridable() and (
# Affected Qt versions:
# 5.13 before 5.13.2
# 5.12 before 5.12.6
# < 5.12
(qtutils.version_check('5.13') and
not qtutils.version_check('5.13.2')) or
(qtutils.version_check('5.12') and
not qtutils.version_check('5.12.6')) or
not qtutils.version_check('5.12')
)
)
# We can't really know when to show an error page, as the error might
# have happened when loading some resource.
# However, self.url() is not available yet and the requested URL
# might not match the URL we get from the error - so we just apply a
# heuristic here.
if ((show_cert_error or show_non_overr_cert_error) and
url.matches(self.data.last_navigation.url, QUrl.RemoveScheme)):
self._show_error_page(url, str(error))
@pyqtSlot(QUrl)
def _on_before_load_started(self, url):
"""If we know we're going to visit a URL soon, change the settings.
This is a WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
"""
super()._on_before_load_started(url)
if not qtutils.version_check('5.11.1', compiled=False):
self.settings.update_for_url(url)
@pyqtSlot()
def _on_print_requested(self):
"""Slot for window.print() in JS."""
try:
self.printing.show_dialog()
except browsertab.WebTabError as e:
message.error(str(e))
@pyqtSlot(QUrl)
def _on_url_changed(self, url: QUrl) -> None:
"""Update settings for the current URL.
Normally this is done below in _on_navigation_request, but we also need
to do it here as WORKAROUND for
https://bugreports.qt.io/browse/QTBUG-77137
Since update_for_url() is idempotent, it doesn't matter much if we end
up doing it twice.
"""
super()._on_url_changed(url)
if url.isValid() and qtutils.version_check('5.13'):
self.settings.update_for_url(url)
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(self, navigation):
super()._on_navigation_request(navigation)
if navigation.url == QUrl('qute://print'):
self._on_print_requested()
navigation.accepted = False
if not navigation.accepted or not navigation.is_main_frame:
return
settings_needing_reload = {
'content.plugins',
'content.javascript.enabled',
'content.javascript.can_access_clipboard',
'content.print_element_backgrounds',
'input.spatial_navigation',
}
assert settings_needing_reload.issubset(configdata.DATA)
changed = self.settings.update_for_url(navigation.url)
reload_needed = bool(changed & settings_needing_reload)
# On Qt < 5.11, we don't don't need a reload when type == link_clicked.
# On Qt 5.11.0, we always need a reload.
# On Qt > 5.11.0, we never need a reload:
# https://codereview.qt-project.org/#/c/229525/1
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
if qtutils.version_check('5.11.1', compiled=False):
reload_needed = False
elif not qtutils.version_check('5.11.0', exact=True, compiled=False):
if navigation.navigation_type == navigation.Type.link_clicked:
reload_needed = False
if reload_needed:
self._reload_url = navigation.url
def _on_select_client_certificate(self, selection):
"""Handle client certificates.
Currently, we simply pick the first available certificate and show an
additional note if there are multiple matches.
"""
certificate = selection.certificates()[0]
text = ('<b>Subject:</b> {subj}<br/>'
'<b>Issuer:</b> {issuer}<br/>'
'<b>Serial:</b> {serial}'.format(
subj=html_utils.escape(certificate.subjectDisplayName()),
issuer=html_utils.escape(certificate.issuerDisplayName()),
serial=bytes(certificate.serialNumber()).decode('ascii')))
if len(selection.certificates()) > 1:
text += ('<br/><br/><b>Note:</b> Multiple matching certificates '
'were found, but certificate selection is not '
'implemented yet!')
urlstr = selection.host().host()
present = message.ask(
title='Present client certificate to {}?'.format(urlstr),
text=text,
mode=usertypes.PromptMode.yesno,
abort_on=[self.abort_questions],
url=urlstr)
if present:
selection.select(certificate)
else:
selection.selectNone()
def _connect_signals(self):
view = self._widget
page = view.page()
page.windowCloseRequested.connect(self.window_close_requested)
page.linkHovered.connect(self.link_hovered)
page.loadProgress.connect(self._on_load_progress)
page.loadStarted.connect(self._on_load_started)
page.certificate_error.connect(self._on_ssl_errors)
page.authenticationRequired.connect(self._on_authentication_required)
page.proxyAuthenticationRequired.connect(
self._on_proxy_authentication_required)
page.contentsSizeChanged.connect(self.contents_size_changed)
page.navigation_request.connect(self._on_navigation_request)
if qtutils.version_check('5.12'):
page.printRequested.connect(self._on_print_requested)
try:
# pylint: disable=unused-import
from PyQt5.QtWebEngineWidgets import ( # type: ignore
QWebEngineClientCertificateSelection)
except ImportError:
pass
else:
page.selectClientCertificate.connect(
self._on_select_client_certificate)
view.titleChanged.connect(self.title_changed)
view.urlChanged.connect(self._on_url_changed)
view.renderProcessTerminated.connect(
self._on_render_process_terminated)
view.iconChanged.connect(self.icon_changed)
page.loadFinished.connect(self._on_history_trigger)
page.loadFinished.connect(self._restore_zoom)
page.loadFinished.connect(self._on_load_finished)
self.before_load_started.connect(self._on_before_load_started)
self.shutting_down.connect(self.abort_questions) # type: ignore
self.load_started.connect(self.abort_questions) # type: ignore
# pylint: disable=protected-access
self.audio._connect_signals()
self.search.connect_signals()
self._permissions.connect_signals()
self._scripts.connect_signals()
| ./CrossVul/dataset_final_sorted/CWE-684/py/bad_3914_1 |
crossvul-python_data_good_3921_0 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Base class for a wrapper over QWebView/QWebEngineView."""
import enum
import itertools
import attr
from PyQt5.QtCore import pyqtSignal, pyqtSlot, QUrl, QObject, QSizeF, Qt
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QWidget, QApplication, QDialog
from PyQt5.QtPrintSupport import QPrintDialog
import pygments
import pygments.lexers
import pygments.formatters
from qutebrowser.keyinput import modeman
from qutebrowser.config import config
from qutebrowser.utils import (utils, objreg, usertypes, log, qtutils,
urlutils, message)
from qutebrowser.misc import miscwidgets, objects
from qutebrowser.browser import mouse, hints
from qutebrowser.qt import sip
tab_id_gen = itertools.count(0)
def create(win_id, private, parent=None):
"""Get a QtWebKit/QtWebEngine tab object.
Args:
win_id: The window ID where the tab will be shown.
private: Whether the tab is a private/off the record tab.
parent: The Qt parent to set.
"""
# Importing modules here so we don't depend on QtWebEngine without the
# argument and to avoid circular imports.
mode_manager = modeman.instance(win_id)
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
tab_class = webenginetab.WebEngineTab
else:
from qutebrowser.browser.webkit import webkittab
tab_class = webkittab.WebKitTab
return tab_class(win_id=win_id, mode_manager=mode_manager, private=private,
parent=parent)
def init():
"""Initialize backend-specific modules."""
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
webenginetab.init()
class WebTabError(Exception):
"""Base class for various errors."""
class UnsupportedOperationError(WebTabError):
"""Raised when an operation is not supported with the given backend."""
TerminationStatus = enum.Enum('TerminationStatus', [
'normal',
'abnormal', # non-zero exit status
'crashed', # e.g. segfault
'killed',
'unknown',
])
@attr.s
class TabData:
"""A simple namespace with a fixed set of attributes.
Attributes:
keep_icon: Whether the (e.g. cloned) icon should not be cleared on page
load.
inspector: The QWebInspector used for this webview.
viewing_source: Set if we're currently showing a source view.
Only used when sources are shown via pygments.
open_target: Where to open the next link.
Only used for QtWebKit.
override_target: Override for open_target for fake clicks (like hints).
Only used for QtWebKit.
pinned: Flag to pin the tab.
fullscreen: Whether the tab has a video shown fullscreen currently.
netrc_used: Whether netrc authentication was performed.
input_mode: current input mode for the tab.
"""
keep_icon = attr.ib(False)
viewing_source = attr.ib(False)
inspector = attr.ib(None)
open_target = attr.ib(usertypes.ClickTarget.normal)
override_target = attr.ib(None)
pinned = attr.ib(False)
fullscreen = attr.ib(False)
netrc_used = attr.ib(False)
input_mode = attr.ib(usertypes.KeyMode.normal)
def should_show_icon(self):
return (config.val.tabs.favicons.show == 'always' or
config.val.tabs.favicons.show == 'pinned' and self.pinned)
class AbstractAction:
"""Attribute of AbstractTab for Qt WebActions.
Class attributes (overridden by subclasses):
action_class: The class actions are defined on (QWeb{Engine,}Page)
action_base: The type of the actions (QWeb{Engine,}Page.WebAction)
"""
action_class = None
action_base = None
def __init__(self, tab):
self._widget = None
self._tab = tab
def exit_fullscreen(self):
"""Exit the fullscreen mode."""
raise NotImplementedError
def save_page(self):
"""Save the current page."""
raise NotImplementedError
def run_string(self, name):
"""Run a webaction based on its name."""
member = getattr(self.action_class, name, None)
if not isinstance(member, self.action_base):
raise WebTabError("{} is not a valid web action!".format(name))
self._widget.triggerPageAction(member)
def show_source(self,
pygments=False): # pylint: disable=redefined-outer-name
"""Show the source of the current page in a new tab."""
raise NotImplementedError
def _show_source_pygments(self):
def show_source_cb(source):
"""Show source as soon as it's ready."""
# WORKAROUND for https://github.com/PyCQA/pylint/issues/491
# pylint: disable=no-member
lexer = pygments.lexers.HtmlLexer()
formatter = pygments.formatters.HtmlFormatter(
full=True, linenos='table')
# pylint: enable=no-member
highlighted = pygments.highlight(source, lexer, formatter)
tb = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
new_tab = tb.tabopen(background=False, related=True)
new_tab.set_html(highlighted, self._tab.url())
new_tab.data.viewing_source = True
self._tab.dump_async(show_source_cb)
class AbstractPrinting:
"""Attribute of AbstractTab for printing the page."""
def __init__(self, tab):
self._widget = None
self._tab = tab
def check_pdf_support(self):
raise NotImplementedError
def check_printer_support(self):
raise NotImplementedError
def check_preview_support(self):
raise NotImplementedError
def to_pdf(self, filename):
raise NotImplementedError
def to_printer(self, printer, callback=None):
"""Print the tab.
Args:
printer: The QPrinter to print to.
callback: Called with a boolean
(True if printing succeeded, False otherwise)
"""
raise NotImplementedError
def show_dialog(self):
"""Print with a QPrintDialog."""
self.check_printer_support()
def print_callback(ok):
"""Called when printing finished."""
if not ok:
message.error("Printing failed!")
diag.deleteLater()
def do_print():
"""Called when the dialog was closed."""
self.to_printer(diag.printer(), print_callback)
diag = QPrintDialog(self._tab)
if utils.is_mac:
# For some reason we get a segfault when using open() on macOS
ret = diag.exec_()
if ret == QDialog.Accepted:
do_print()
else:
diag.open(do_print)
class AbstractSearch(QObject):
"""Attribute of AbstractTab for doing searches.
Attributes:
text: The last thing this view was searched for.
search_displayed: Whether we're currently displaying search results in
this view.
_flags: The flags of the last search (needs to be set by subclasses).
_widget: The underlying WebView widget.
Signals:
finished: Emitted when a search was finished.
arg: True if the text was found, False otherwise.
cleared: Emitted when an existing search was cleared.
"""
finished = pyqtSignal(bool)
cleared = pyqtSignal()
def __init__(self, tab, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = None
self.text = None
self.search_displayed = False
def _is_case_sensitive(self, ignore_case):
"""Check if case-sensitivity should be used.
This assumes self.text is already set properly.
Arguments:
ignore_case: The ignore_case value from the config.
"""
mapping = {
'smart': not self.text.islower(),
'never': True,
'always': False,
}
return mapping[ignore_case]
def search(self, text, *, ignore_case='never', reverse=False,
result_cb=None):
"""Find the given text on the page.
Args:
text: The text to search for.
ignore_case: Search case-insensitively. ('always'/'never/'smart')
reverse: Reverse search direction.
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def clear(self):
"""Clear the current search."""
raise NotImplementedError
def prev_result(self, *, result_cb=None):
"""Go to the previous result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def next_result(self, *, result_cb=None):
"""Go to the next result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
class AbstractZoom(QObject):
"""Attribute of AbstractTab for controlling zoom.
Attributes:
_neighborlist: A NeighborList with the zoom levels.
_default_zoom_changed: Whether the zoom was changed from the default.
"""
def __init__(self, tab, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = None
self._default_zoom_changed = False
self._init_neighborlist()
config.instance.changed.connect(self._on_config_changed)
self._zoom_factor = float(config.val.zoom.default) / 100
# # FIXME:qtwebengine is this needed?
# # For some reason, this signal doesn't get disconnected automatically
# # when the WebView is destroyed on older PyQt versions.
# # See https://github.com/qutebrowser/qutebrowser/issues/390
# self.destroyed.connect(functools.partial(
# cfg.changed.disconnect, self.init_neighborlist))
@pyqtSlot(str)
def _on_config_changed(self, option):
if option in ['zoom.levels', 'zoom.default']:
if not self._default_zoom_changed:
factor = float(config.val.zoom.default) / 100
self.set_factor(factor)
self._init_neighborlist()
def _init_neighborlist(self):
"""Initialize self._neighborlist."""
levels = config.val.zoom.levels
self._neighborlist = usertypes.NeighborList(
levels, mode=usertypes.NeighborList.Modes.edge)
self._neighborlist.fuzzyval = config.val.zoom.default
def offset(self, offset):
"""Increase/Decrease the zoom level by the given offset.
Args:
offset: The offset in the zoom level list.
Return:
The new zoom percentage.
"""
level = self._neighborlist.getitem(offset)
self.set_factor(float(level) / 100, fuzzyval=False)
return level
def _set_factor_internal(self, factor):
raise NotImplementedError
def set_factor(self, factor, *, fuzzyval=True):
"""Zoom to a given zoom factor.
Args:
factor: The zoom factor as float.
fuzzyval: Whether to set the NeighborLists fuzzyval.
"""
if fuzzyval:
self._neighborlist.fuzzyval = int(factor * 100)
if factor < 0:
raise ValueError("Can't zoom to factor {}!".format(factor))
default_zoom_factor = float(config.val.zoom.default) / 100
self._default_zoom_changed = (factor != default_zoom_factor)
self._zoom_factor = factor
self._set_factor_internal(factor)
def factor(self):
return self._zoom_factor
def set_default(self):
self._set_factor_internal(float(config.val.zoom.default) / 100)
def set_current(self):
self._set_factor_internal(self._zoom_factor)
class AbstractCaret(QObject):
"""Attribute of AbstractTab for caret browsing.
Signals:
selection_toggled: Emitted when the selection was toggled.
arg: Whether the selection is now active.
follow_selected_done: Emitted when a follow_selection action is done.
"""
selection_toggled = pyqtSignal(bool)
follow_selected_done = pyqtSignal()
def __init__(self, tab, mode_manager, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = None
self.selection_enabled = False
mode_manager.entered.connect(self._on_mode_entered)
mode_manager.left.connect(self._on_mode_left)
def _on_mode_entered(self, mode):
raise NotImplementedError
def _on_mode_left(self, mode):
raise NotImplementedError
def move_to_next_line(self, count=1):
raise NotImplementedError
def move_to_prev_line(self, count=1):
raise NotImplementedError
def move_to_next_char(self, count=1):
raise NotImplementedError
def move_to_prev_char(self, count=1):
raise NotImplementedError
def move_to_end_of_word(self, count=1):
raise NotImplementedError
def move_to_next_word(self, count=1):
raise NotImplementedError
def move_to_prev_word(self, count=1):
raise NotImplementedError
def move_to_start_of_line(self):
raise NotImplementedError
def move_to_end_of_line(self):
raise NotImplementedError
def move_to_start_of_next_block(self, count=1):
raise NotImplementedError
def move_to_start_of_prev_block(self, count=1):
raise NotImplementedError
def move_to_end_of_next_block(self, count=1):
raise NotImplementedError
def move_to_end_of_prev_block(self, count=1):
raise NotImplementedError
def move_to_start_of_document(self):
raise NotImplementedError
def move_to_end_of_document(self):
raise NotImplementedError
def toggle_selection(self):
raise NotImplementedError
def drop_selection(self):
raise NotImplementedError
def selection(self, callback):
raise NotImplementedError
def _follow_enter(self, tab):
"""Follow a link by faking an enter press."""
if tab:
self._tab.key_press(Qt.Key_Enter, modifier=Qt.ControlModifier)
else:
self._tab.key_press(Qt.Key_Enter)
def follow_selected(self, *, tab=False):
raise NotImplementedError
class AbstractScroller(QObject):
"""Attribute of AbstractTab to manage scroll position."""
perc_changed = pyqtSignal(int, int)
def __init__(self, tab, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = None
self.perc_changed.connect(self._log_scroll_pos_change)
@pyqtSlot()
def _log_scroll_pos_change(self):
log.webview.vdebug("Scroll position changed to {}".format(
self.pos_px()))
def _init_widget(self, widget):
self._widget = widget
def pos_px(self):
raise NotImplementedError
def pos_perc(self):
raise NotImplementedError
def to_perc(self, x=None, y=None):
raise NotImplementedError
def to_point(self, point):
raise NotImplementedError
def to_anchor(self, name):
raise NotImplementedError
def delta(self, x=0, y=0):
raise NotImplementedError
def delta_page(self, x=0, y=0):
raise NotImplementedError
def up(self, count=1):
raise NotImplementedError
def down(self, count=1):
raise NotImplementedError
def left(self, count=1):
raise NotImplementedError
def right(self, count=1):
raise NotImplementedError
def top(self):
raise NotImplementedError
def bottom(self):
raise NotImplementedError
def page_up(self, count=1):
raise NotImplementedError
def page_down(self, count=1):
raise NotImplementedError
def at_top(self):
raise NotImplementedError
def at_bottom(self):
raise NotImplementedError
class AbstractHistory:
"""The history attribute of a AbstractTab."""
def __init__(self, tab):
self._tab = tab
self._history = None
def __len__(self):
return len(self._history)
def __iter__(self):
return iter(self._history.items())
def current_idx(self):
raise NotImplementedError
def back(self, count=1):
"""Go back in the tab's history."""
idx = self.current_idx() - count
if idx >= 0:
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(0))
raise WebTabError("At beginning of history.")
def forward(self, count=1):
"""Go forward in the tab's history."""
idx = self.current_idx() + count
if idx < len(self):
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(len(self) - 1))
raise WebTabError("At end of history.")
def can_go_back(self):
raise NotImplementedError
def can_go_forward(self):
raise NotImplementedError
def _item_at(self, i):
raise NotImplementedError
def _go_to_item(self, item):
raise NotImplementedError
def serialize(self):
"""Serialize into an opaque format understood by self.deserialize."""
raise NotImplementedError
def deserialize(self, data):
"""Serialize from a format produced by self.serialize."""
raise NotImplementedError
def load_items(self, items):
"""Deserialize from a list of WebHistoryItems."""
raise NotImplementedError
class AbstractElements:
"""Finding and handling of elements on the page."""
def __init__(self, tab):
self._widget = None
self._tab = tab
def find_css(self, selector, callback, *, only_visible=False):
"""Find all HTML elements matching a given selector async.
Args:
callback: The callback to be called when the search finished.
selector: The CSS selector to search for.
only_visible: Only show elements which are visible on screen.
"""
raise NotImplementedError
def find_id(self, elem_id, callback):
"""Find the HTML element with the given ID async.
Args:
callback: The callback to be called when the search finished.
elem_id: The ID to search for.
"""
raise NotImplementedError
def find_focused(self, callback):
"""Find the focused element on the page async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
def find_at_pos(self, pos, callback):
"""Find the element at the given position async.
This is also called "hit test" elsewhere.
Args:
pos: The QPoint to get the element for.
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
class AbstractAudio(QObject):
"""Handling of audio/muting for this tab."""
muted_changed = pyqtSignal(bool)
recently_audible_changed = pyqtSignal(bool)
def __init__(self, tab, parent=None):
super().__init__(parent)
self._widget = None
self._tab = tab
def set_muted(self, muted: bool, override: bool = False):
"""Set this tab as muted or not.
Arguments:
override: If set to True, muting/unmuting was done manually and
overrides future automatic mute/unmute changes based on
the URL.
"""
raise NotImplementedError
def is_muted(self):
"""Whether this tab is muted."""
raise NotImplementedError
def toggle_muted(self, *, override: bool = False):
self.set_muted(not self.is_muted(), override=override)
def is_recently_audible(self):
"""Whether this tab has had audio playing recently."""
raise NotImplementedError
class AbstractTab(QWidget):
"""A wrapper over the given widget to hide its API and expose another one.
We use this to unify QWebView and QWebEngineView.
Attributes:
history: The AbstractHistory for the current tab.
registry: The ObjectRegistry associated with this tab.
private: Whether private browsing is turned on for this tab.
_load_status: loading status of this page
Accessible via load_status() method.
_has_ssl_errors: Whether SSL errors happened.
Needs to be set by subclasses.
for properties, see WebView/WebEngineView docs.
Signals:
See related Qt signals.
new_tab_requested: Emitted when a new tab should be opened with the
given URL.
load_status_changed: The loading status changed
fullscreen_requested: Fullscreen display was requested by the page.
arg: True if fullscreen should be turned on,
False if it should be turned off.
renderer_process_terminated: Emitted when the underlying renderer
process terminated.
arg 0: A TerminationStatus member.
arg 1: The exit code.
predicted_navigation: Emitted before we tell Qt to open a URL.
"""
window_close_requested = pyqtSignal()
link_hovered = pyqtSignal(str)
load_started = pyqtSignal()
load_progress = pyqtSignal(int)
load_finished = pyqtSignal(bool)
icon_changed = pyqtSignal(QIcon)
title_changed = pyqtSignal(str)
load_status_changed = pyqtSignal(str)
new_tab_requested = pyqtSignal(QUrl)
url_changed = pyqtSignal(QUrl)
shutting_down = pyqtSignal()
contents_size_changed = pyqtSignal(QSizeF)
add_history_item = pyqtSignal(QUrl, QUrl, str) # url, requested url, title
fullscreen_requested = pyqtSignal(bool)
renderer_process_terminated = pyqtSignal(TerminationStatus, int)
predicted_navigation = pyqtSignal(QUrl)
# Hosts for which a certificate error happened. Shared between all tabs.
#
# Note that we remember hosts here, without scheme/port:
# QtWebEngine/Chromium also only remembers hostnames, and certificates are
# for a given hostname anyways.
_insecure_hosts = set() # type: typing.Set[str]
def __init__(self, *, win_id, mode_manager, private, parent=None):
self.private = private
self.win_id = win_id
self.tab_id = next(tab_id_gen)
super().__init__(parent)
self.registry = objreg.ObjectRegistry()
tab_registry = objreg.get('tab-registry', scope='window',
window=win_id)
tab_registry[self.tab_id] = self
objreg.register('tab', self, registry=self.registry)
self.data = TabData()
self._layout = miscwidgets.WrapperLayout(self)
self._widget = None
self._progress = 0
self._mode_manager = mode_manager
self._load_status = usertypes.LoadStatus.none
self._mouse_event_filter = mouse.MouseEventFilter(
self, parent=self)
self.backend = None
# FIXME:qtwebengine Should this be public api via self.hints?
# Also, should we get it out of objreg?
hintmanager = hints.HintManager(win_id, self.tab_id, parent=self)
objreg.register('hintmanager', hintmanager, scope='tab',
window=self.win_id, tab=self.tab_id)
self.predicted_navigation.connect(self._on_predicted_navigation)
def _set_widget(self, widget):
# pylint: disable=protected-access
self._widget = widget
self._layout.wrap(self, widget)
self.history._history = widget.history()
self.scroller._init_widget(widget)
self.caret._widget = widget
self.zoom._widget = widget
self.search._widget = widget
self.printing._widget = widget
self.action._widget = widget
self.elements._widget = widget
self.audio._widget = widget
self.settings._settings = widget.settings()
self._install_event_filter()
self.zoom.set_default()
def _install_event_filter(self):
raise NotImplementedError
def _set_load_status(self, val):
"""Setter for load_status."""
if not isinstance(val, usertypes.LoadStatus):
raise TypeError("Type {} is no LoadStatus member!".format(val))
log.webview.debug("load status for {}: {}".format(repr(self), val))
self._load_status = val
self.load_status_changed.emit(val.name)
def event_target(self):
"""Return the widget events should be sent to."""
raise NotImplementedError
def send_event(self, evt):
"""Send the given event to the underlying widget.
The event will be sent via QApplication.postEvent.
Note that a posted event may not be re-used in any way!
"""
# This only gives us some mild protection against re-using events, but
# it's certainly better than a segfault.
if getattr(evt, 'posted', False):
raise utils.Unreachable("Can't re-use an event which was already "
"posted!")
recipient = self.event_target()
if recipient is None:
# https://github.com/qutebrowser/qutebrowser/issues/3888
log.webview.warning("Unable to find event target!")
return
evt.posted = True
QApplication.postEvent(recipient, evt)
@pyqtSlot(QUrl)
def _on_predicted_navigation(self, url):
"""Adjust the title if we are going to visit an URL soon."""
qtutils.ensure_valid(url)
url_string = url.toDisplayString()
log.webview.debug("Predicted navigation: {}".format(url_string))
self.title_changed.emit(url_string)
@pyqtSlot(QUrl)
def _on_url_changed(self, url):
"""Update title when URL has changed and no title is available."""
if url.isValid() and not self.title():
self.title_changed.emit(url.toDisplayString())
self.url_changed.emit(url)
@pyqtSlot()
def _on_load_started(self):
self._progress = 0
self.data.viewing_source = False
self._set_load_status(usertypes.LoadStatus.loading)
self.load_started.emit()
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(self, navigation):
"""Handle common acceptNavigationRequest code."""
url = utils.elide(navigation.url.toDisplayString(), 100)
log.webview.debug("navigation request: url {}, type {}, is_main_frame "
"{}".format(url,
navigation.navigation_type,
navigation.is_main_frame))
if not navigation.url.isValid():
# Also a WORKAROUND for missing IDNA 2008 support in QUrl, see
# https://bugreports.qt.io/browse/QTBUG-60364
if navigation.navigation_type == navigation.Type.link_clicked:
msg = urlutils.get_errstring(navigation.url,
"Invalid link clicked")
message.error(msg)
self.data.open_target = usertypes.ClickTarget.normal
log.webview.debug("Ignoring invalid URL {} in "
"acceptNavigationRequest: {}".format(
navigation.url.toDisplayString(),
navigation.url.errorString()))
navigation.accepted = False
def handle_auto_insert_mode(self, ok):
"""Handle `input.insert_mode.auto_load` after loading finished."""
if not config.val.input.insert_mode.auto_load or not ok:
return
cur_mode = self._mode_manager.mode
if cur_mode == usertypes.KeyMode.insert:
return
def _auto_insert_mode_cb(elem):
"""Called from JS after finding the focused element."""
if elem is None:
log.webview.debug("No focused element!")
return
if elem.is_editable():
modeman.enter(self.win_id, usertypes.KeyMode.insert,
'load finished', only_if_normal=True)
self.elements.find_focused(_auto_insert_mode_cb)
@pyqtSlot(bool)
def _on_load_finished(self, ok):
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3498
return
sess_manager = objreg.get('session-manager')
sess_manager.save_autosave()
if ok:
if self.url().scheme() == 'https':
if self.url().host() in self._insecure_hosts:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.success_https)
else:
self._set_load_status(usertypes.LoadStatus.success)
elif ok:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.error)
self.load_finished.emit(ok)
if not self.title():
self.title_changed.emit(self.url().toDisplayString())
self.zoom.set_current()
@pyqtSlot()
def _on_history_trigger(self):
"""Emit add_history_item when triggered by backend-specific signal."""
raise NotImplementedError
@pyqtSlot(int)
def _on_load_progress(self, perc):
self._progress = perc
self.load_progress.emit(perc)
def url(self, requested=False):
raise NotImplementedError
def progress(self):
return self._progress
def load_status(self):
return self._load_status
def _openurl_prepare(self, url, *, predict=True):
qtutils.ensure_valid(url)
if predict:
self.predicted_navigation.emit(url)
def openurl(self, url, *, predict=True):
raise NotImplementedError
def reload(self, *, force=False):
raise NotImplementedError
def stop(self):
raise NotImplementedError
def clear_ssl_errors(self):
raise NotImplementedError
def key_press(self, key, modifier=Qt.NoModifier):
"""Send a fake key event to this tab."""
raise NotImplementedError
def dump_async(self, callback, *, plain=False):
"""Dump the current page's html asynchronously.
The given callback will be called with the result when dumping is
complete.
"""
raise NotImplementedError
def run_js_async(self, code, callback=None, *, world=None):
"""Run javascript async.
The given callback will be called with the result when running JS is
complete.
Args:
code: The javascript code to run.
callback: The callback to call with the result, or None.
world: A world ID (int or usertypes.JsWorld member) to run the JS
in the main world or in another isolated world.
"""
raise NotImplementedError
def shutdown(self):
raise NotImplementedError
def title(self):
raise NotImplementedError
def icon(self):
raise NotImplementedError
def set_html(self, html, base_url=QUrl()):
raise NotImplementedError
def networkaccessmanager(self):
"""Get the QNetworkAccessManager for this tab.
This is only implemented for QtWebKit.
For QtWebEngine, always returns None.
"""
raise NotImplementedError
def user_agent(self):
"""Get the user agent for this tab.
This is only implemented for QtWebKit.
For QtWebEngine, always returns None.
"""
raise NotImplementedError
def __repr__(self):
try:
url = utils.elide(self.url().toDisplayString(QUrl.EncodeUnicode),
100)
except (AttributeError, RuntimeError) as exc:
url = '<{}>'.format(exc.__class__.__name__)
return utils.get_repr(self, tab_id=self.tab_id, url=url)
def is_deleted(self):
return sip.isdeleted(self._widget)
| ./CrossVul/dataset_final_sorted/CWE-684/py/good_3921_0 |
crossvul-python_data_good_3914_0 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Base class for a wrapper over QWebView/QWebEngineView."""
import enum
import itertools
import typing
import functools
import attr
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, QUrl, QObject, QSizeF, Qt,
QEvent, QPoint)
from PyQt5.QtGui import QKeyEvent, QIcon
from PyQt5.QtWidgets import QWidget, QApplication, QDialog
from PyQt5.QtPrintSupport import QPrintDialog, QPrinter
from PyQt5.QtNetwork import QNetworkAccessManager
if typing.TYPE_CHECKING:
from PyQt5.QtWebKit import QWebHistory
from PyQt5.QtWebEngineWidgets import QWebEngineHistory
import pygments
import pygments.lexers
import pygments.formatters
from qutebrowser.keyinput import modeman
from qutebrowser.config import config
from qutebrowser.utils import (utils, objreg, usertypes, log, qtutils,
urlutils, message)
from qutebrowser.misc import miscwidgets, objects, sessions
from qutebrowser.browser import eventfilter
from qutebrowser.qt import sip
if typing.TYPE_CHECKING:
from qutebrowser.browser import webelem
from qutebrowser.browser.inspector import AbstractWebInspector
tab_id_gen = itertools.count(0)
def create(win_id: int,
private: bool,
parent: QWidget = None) -> 'AbstractTab':
"""Get a QtWebKit/QtWebEngine tab object.
Args:
win_id: The window ID where the tab will be shown.
private: Whether the tab is a private/off the record tab.
parent: The Qt parent to set.
"""
# Importing modules here so we don't depend on QtWebEngine without the
# argument and to avoid circular imports.
mode_manager = modeman.instance(win_id)
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
tab_class = webenginetab.WebEngineTab
else:
from qutebrowser.browser.webkit import webkittab
tab_class = webkittab.WebKitTab
return tab_class(win_id=win_id, mode_manager=mode_manager, private=private,
parent=parent)
def init() -> None:
"""Initialize backend-specific modules."""
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
webenginetab.init()
class WebTabError(Exception):
"""Base class for various errors."""
class UnsupportedOperationError(WebTabError):
"""Raised when an operation is not supported with the given backend."""
TerminationStatus = enum.Enum('TerminationStatus', [
'normal',
'abnormal', # non-zero exit status
'crashed', # e.g. segfault
'killed',
'unknown',
])
@attr.s
class TabData:
"""A simple namespace with a fixed set of attributes.
Attributes:
keep_icon: Whether the (e.g. cloned) icon should not be cleared on page
load.
inspector: The QWebInspector used for this webview.
viewing_source: Set if we're currently showing a source view.
Only used when sources are shown via pygments.
open_target: Where to open the next link.
Only used for QtWebKit.
override_target: Override for open_target for fake clicks (like hints).
Only used for QtWebKit.
pinned: Flag to pin the tab.
fullscreen: Whether the tab has a video shown fullscreen currently.
netrc_used: Whether netrc authentication was performed.
input_mode: current input mode for the tab.
"""
keep_icon = attr.ib(False) # type: bool
viewing_source = attr.ib(False) # type: bool
inspector = attr.ib(None) # type: typing.Optional[AbstractWebInspector]
open_target = attr.ib(
usertypes.ClickTarget.normal) # type: usertypes.ClickTarget
override_target = attr.ib(
None) # type: typing.Optional[usertypes.ClickTarget]
pinned = attr.ib(False) # type: bool
fullscreen = attr.ib(False) # type: bool
netrc_used = attr.ib(False) # type: bool
input_mode = attr.ib(usertypes.KeyMode.normal) # type: usertypes.KeyMode
last_navigation = attr.ib(None) # type: usertypes.NavigationRequest
def should_show_icon(self) -> bool:
return (config.val.tabs.favicons.show == 'always' or
config.val.tabs.favicons.show == 'pinned' and self.pinned)
class AbstractAction:
"""Attribute ``action`` of AbstractTab for Qt WebActions."""
# The class actions are defined on (QWeb{Engine,}Page)
action_class = None # type: type
# The type of the actions (QWeb{Engine,}Page.WebAction)
action_base = None # type: type
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def exit_fullscreen(self) -> None:
"""Exit the fullscreen mode."""
raise NotImplementedError
def save_page(self) -> None:
"""Save the current page."""
raise NotImplementedError
def run_string(self, name: str) -> None:
"""Run a webaction based on its name."""
member = getattr(self.action_class, name, None)
if not isinstance(member, self.action_base):
raise WebTabError("{} is not a valid web action!".format(name))
self._widget.triggerPageAction(member)
def show_source(
self,
pygments: bool = False # pylint: disable=redefined-outer-name
) -> None:
"""Show the source of the current page in a new tab."""
raise NotImplementedError
def _show_source_pygments(self) -> None:
def show_source_cb(source: str) -> None:
"""Show source as soon as it's ready."""
# WORKAROUND for https://github.com/PyCQA/pylint/issues/491
# pylint: disable=no-member
lexer = pygments.lexers.HtmlLexer()
formatter = pygments.formatters.HtmlFormatter(
full=True, linenos='table')
# pylint: enable=no-member
highlighted = pygments.highlight(source, lexer, formatter)
tb = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
new_tab = tb.tabopen(background=False, related=True)
new_tab.set_html(highlighted, self._tab.url())
new_tab.data.viewing_source = True
self._tab.dump_async(show_source_cb)
class AbstractPrinting:
"""Attribute ``printing`` of AbstractTab for printing the page."""
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def check_pdf_support(self) -> None:
"""Check whether writing to PDFs is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_printer_support(self) -> None:
"""Check whether writing to a printer is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_preview_support(self) -> None:
"""Check whether showing a print preview is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def to_pdf(self, filename: str) -> bool:
"""Print the tab to a PDF with the given filename."""
raise NotImplementedError
def to_printer(self, printer: QPrinter,
callback: typing.Callable[[bool], None] = None) -> None:
"""Print the tab.
Args:
printer: The QPrinter to print to.
callback: Called with a boolean
(True if printing succeeded, False otherwise)
"""
raise NotImplementedError
def show_dialog(self) -> None:
"""Print with a QPrintDialog."""
self.check_printer_support()
def print_callback(ok: bool) -> None:
"""Called when printing finished."""
if not ok:
message.error("Printing failed!")
diag.deleteLater()
def do_print() -> None:
"""Called when the dialog was closed."""
self.to_printer(diag.printer(), print_callback)
diag = QPrintDialog(self._tab)
if utils.is_mac:
# For some reason we get a segfault when using open() on macOS
ret = diag.exec_()
if ret == QDialog.Accepted:
do_print()
else:
diag.open(do_print)
class AbstractSearch(QObject):
"""Attribute ``search`` of AbstractTab for doing searches.
Attributes:
text: The last thing this view was searched for.
search_displayed: Whether we're currently displaying search results in
this view.
_flags: The flags of the last search (needs to be set by subclasses).
_widget: The underlying WebView widget.
"""
#: Signal emitted when a search was finished
#: (True if the text was found, False otherwise)
finished = pyqtSignal(bool)
#: Signal emitted when an existing search was cleared.
cleared = pyqtSignal()
_Callback = typing.Callable[[bool], None]
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
self.text = None # type: typing.Optional[str]
self.search_displayed = False
def _is_case_sensitive(self, ignore_case: usertypes.IgnoreCase) -> bool:
"""Check if case-sensitivity should be used.
This assumes self.text is already set properly.
Arguments:
ignore_case: The ignore_case value from the config.
"""
assert self.text is not None
mapping = {
usertypes.IgnoreCase.smart: not self.text.islower(),
usertypes.IgnoreCase.never: True,
usertypes.IgnoreCase.always: False,
}
return mapping[ignore_case]
def search(self, text: str, *,
ignore_case: usertypes.IgnoreCase = usertypes.IgnoreCase.never,
reverse: bool = False,
wrap: bool = True,
result_cb: _Callback = None) -> None:
"""Find the given text on the page.
Args:
text: The text to search for.
ignore_case: Search case-insensitively.
reverse: Reverse search direction.
wrap: Allow wrapping at the top or bottom of the page.
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def clear(self) -> None:
"""Clear the current search."""
raise NotImplementedError
def prev_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the previous result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def next_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the next result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
class AbstractZoom(QObject):
"""Attribute ``zoom`` of AbstractTab for controlling zoom."""
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
# Whether zoom was changed from the default.
self._default_zoom_changed = False
self._init_neighborlist()
config.instance.changed.connect(self._on_config_changed)
self._zoom_factor = float(config.val.zoom.default) / 100
@pyqtSlot(str)
def _on_config_changed(self, option: str) -> None:
if option in ['zoom.levels', 'zoom.default']:
if not self._default_zoom_changed:
factor = float(config.val.zoom.default) / 100
self.set_factor(factor)
self._init_neighborlist()
def _init_neighborlist(self) -> None:
"""Initialize self._neighborlist.
It is a NeighborList with the zoom levels."""
levels = config.val.zoom.levels
self._neighborlist = usertypes.NeighborList(
levels, mode=usertypes.NeighborList.Modes.edge
) # type: usertypes.NeighborList[float]
self._neighborlist.fuzzyval = config.val.zoom.default
def apply_offset(self, offset: int) -> float:
"""Increase/Decrease the zoom level by the given offset.
Args:
offset: The offset in the zoom level list.
Return:
The new zoom level.
"""
level = self._neighborlist.getitem(offset)
self.set_factor(float(level) / 100, fuzzyval=False)
return level
def _set_factor_internal(self, factor: float) -> None:
raise NotImplementedError
def set_factor(self, factor: float, *, fuzzyval: bool = True) -> None:
"""Zoom to a given zoom factor.
Args:
factor: The zoom factor as float.
fuzzyval: Whether to set the NeighborLists fuzzyval.
"""
if fuzzyval:
self._neighborlist.fuzzyval = int(factor * 100)
if factor < 0:
raise ValueError("Can't zoom to factor {}!".format(factor))
default_zoom_factor = float(config.val.zoom.default) / 100
self._default_zoom_changed = (factor != default_zoom_factor)
self._zoom_factor = factor
self._set_factor_internal(factor)
def factor(self) -> float:
return self._zoom_factor
def apply_default(self) -> None:
self._set_factor_internal(float(config.val.zoom.default) / 100)
def reapply(self) -> None:
self._set_factor_internal(self._zoom_factor)
class AbstractCaret(QObject):
"""Attribute ``caret`` of AbstractTab for caret browsing."""
#: Signal emitted when the selection was toggled.
#: (argument - whether the selection is now active)
selection_toggled = pyqtSignal(bool)
#: Emitted when a ``follow_selection`` action is done.
follow_selected_done = pyqtSignal()
def __init__(self,
tab: 'AbstractTab',
mode_manager: modeman.ModeManager,
parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
self.selection_enabled = False
self._mode_manager = mode_manager
mode_manager.entered.connect(self._on_mode_entered)
mode_manager.left.connect(self._on_mode_left)
def _on_mode_entered(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def _on_mode_left(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def move_to_next_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_line(self) -> None:
raise NotImplementedError
def move_to_end_of_line(self) -> None:
raise NotImplementedError
def move_to_start_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_document(self) -> None:
raise NotImplementedError
def move_to_end_of_document(self) -> None:
raise NotImplementedError
def toggle_selection(self) -> None:
raise NotImplementedError
def drop_selection(self) -> None:
raise NotImplementedError
def selection(self, callback: typing.Callable[[str], None]) -> None:
raise NotImplementedError
def reverse_selection(self) -> None:
raise NotImplementedError
def _follow_enter(self, tab: bool) -> None:
"""Follow a link by faking an enter press."""
if tab:
self._tab.fake_key_press(Qt.Key_Enter, modifier=Qt.ControlModifier)
else:
self._tab.fake_key_press(Qt.Key_Enter)
def follow_selected(self, *, tab: bool = False) -> None:
raise NotImplementedError
class AbstractScroller(QObject):
"""Attribute ``scroller`` of AbstractTab to manage scroll position."""
#: Signal emitted when the scroll position changed (int, int)
perc_changed = pyqtSignal(int, int)
#: Signal emitted before the user requested a jump.
#: Used to set the special ' mark so the user can return.
before_jump_requested = pyqtSignal()
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
if 'log-scroll-pos' in objects.debug_flags:
self.perc_changed.connect(self._log_scroll_pos_change)
@pyqtSlot()
def _log_scroll_pos_change(self) -> None:
log.webview.vdebug( # type: ignore
"Scroll position changed to {}".format(self.pos_px()))
def _init_widget(self, widget: QWidget) -> None:
self._widget = widget
def pos_px(self) -> int:
raise NotImplementedError
def pos_perc(self) -> int:
raise NotImplementedError
def to_perc(self, x: int = None, y: int = None) -> None:
raise NotImplementedError
def to_point(self, point: QPoint) -> None:
raise NotImplementedError
def to_anchor(self, name: str) -> None:
raise NotImplementedError
def delta(self, x: int = 0, y: int = 0) -> None:
raise NotImplementedError
def delta_page(self, x: float = 0, y: float = 0) -> None:
raise NotImplementedError
def up(self, count: int = 1) -> None:
raise NotImplementedError
def down(self, count: int = 1) -> None:
raise NotImplementedError
def left(self, count: int = 1) -> None:
raise NotImplementedError
def right(self, count: int = 1) -> None:
raise NotImplementedError
def top(self) -> None:
raise NotImplementedError
def bottom(self) -> None:
raise NotImplementedError
def page_up(self, count: int = 1) -> None:
raise NotImplementedError
def page_down(self, count: int = 1) -> None:
raise NotImplementedError
def at_top(self) -> bool:
raise NotImplementedError
def at_bottom(self) -> bool:
raise NotImplementedError
class AbstractHistoryPrivate:
"""Private API related to the history."""
def __init__(self, tab: 'AbstractTab'):
self._tab = tab
self._history = typing.cast(
typing.Union['QWebHistory', 'QWebEngineHistory'], None)
def serialize(self) -> bytes:
"""Serialize into an opaque format understood by self.deserialize."""
raise NotImplementedError
def deserialize(self, data: bytes) -> None:
"""Deserialize from a format produced by self.serialize."""
raise NotImplementedError
def load_items(self, items: typing.Sequence) -> None:
"""Deserialize from a list of WebHistoryItems."""
raise NotImplementedError
class AbstractHistory:
"""The history attribute of a AbstractTab."""
def __init__(self, tab: 'AbstractTab') -> None:
self._tab = tab
self._history = typing.cast(
typing.Union['QWebHistory', 'QWebEngineHistory'], None)
self.private_api = AbstractHistoryPrivate(tab)
def __len__(self) -> int:
raise NotImplementedError
def __iter__(self) -> typing.Iterable:
raise NotImplementedError
def _check_count(self, count: int) -> None:
"""Check whether the count is positive."""
if count < 0:
raise WebTabError("count needs to be positive!")
def current_idx(self) -> int:
raise NotImplementedError
def back(self, count: int = 1) -> None:
"""Go back in the tab's history."""
self._check_count(count)
idx = self.current_idx() - count
if idx >= 0:
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(0))
raise WebTabError("At beginning of history.")
def forward(self, count: int = 1) -> None:
"""Go forward in the tab's history."""
self._check_count(count)
idx = self.current_idx() + count
if idx < len(self):
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(len(self) - 1))
raise WebTabError("At end of history.")
def can_go_back(self) -> bool:
raise NotImplementedError
def can_go_forward(self) -> bool:
raise NotImplementedError
def _item_at(self, i: int) -> typing.Any:
raise NotImplementedError
def _go_to_item(self, item: typing.Any) -> None:
raise NotImplementedError
class AbstractElements:
"""Finding and handling of elements on the page."""
_MultiCallback = typing.Callable[
[typing.Sequence['webelem.AbstractWebElement']], None]
_SingleCallback = typing.Callable[
[typing.Optional['webelem.AbstractWebElement']], None]
_ErrorCallback = typing.Callable[[Exception], None]
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def find_css(self, selector: str,
callback: _MultiCallback,
error_cb: _ErrorCallback, *,
only_visible: bool = False) -> None:
"""Find all HTML elements matching a given selector async.
If there's an error, the callback is called with a webelem.Error
instance.
Args:
callback: The callback to be called when the search finished.
error_cb: The callback to be called when an error occurred.
selector: The CSS selector to search for.
only_visible: Only show elements which are visible on screen.
"""
raise NotImplementedError
def find_id(self, elem_id: str, callback: _SingleCallback) -> None:
"""Find the HTML element with the given ID async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
elem_id: The ID to search for.
"""
raise NotImplementedError
def find_focused(self, callback: _SingleCallback) -> None:
"""Find the focused element on the page async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
def find_at_pos(self, pos: QPoint, callback: _SingleCallback) -> None:
"""Find the element at the given position async.
This is also called "hit test" elsewhere.
Args:
pos: The QPoint to get the element for.
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
class AbstractAudio(QObject):
"""Handling of audio/muting for this tab."""
muted_changed = pyqtSignal(bool)
recently_audible_changed = pyqtSignal(bool)
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._widget = typing.cast(QWidget, None)
self._tab = tab
def set_muted(self, muted: bool, override: bool = False) -> None:
"""Set this tab as muted or not.
Arguments:
override: If set to True, muting/unmuting was done manually and
overrides future automatic mute/unmute changes based on
the URL.
"""
raise NotImplementedError
def is_muted(self) -> bool:
raise NotImplementedError
def is_recently_audible(self) -> bool:
"""Whether this tab has had audio playing recently."""
raise NotImplementedError
class AbstractTabPrivate:
"""Tab-related methods which are only needed in the core.
Those methods are not part of the API which is exposed to extensions, and
should ideally be removed at some point in the future.
"""
def __init__(self, mode_manager: modeman.ModeManager,
tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
self._mode_manager = mode_manager
def event_target(self) -> QWidget:
"""Return the widget events should be sent to."""
raise NotImplementedError
def handle_auto_insert_mode(self, ok: bool) -> None:
"""Handle `input.insert_mode.auto_load` after loading finished."""
if not ok or not config.cache['input.insert_mode.auto_load']:
return
cur_mode = self._mode_manager.mode
if cur_mode == usertypes.KeyMode.insert:
return
def _auto_insert_mode_cb(
elem: typing.Optional['webelem.AbstractWebElement']
) -> None:
"""Called from JS after finding the focused element."""
if elem is None:
log.webview.debug("No focused element!")
return
if elem.is_editable():
modeman.enter(self._tab.win_id, usertypes.KeyMode.insert,
'load finished', only_if_normal=True)
self._tab.elements.find_focused(_auto_insert_mode_cb)
def clear_ssl_errors(self) -> None:
raise NotImplementedError
def networkaccessmanager(self) -> typing.Optional[QNetworkAccessManager]:
"""Get the QNetworkAccessManager for this tab.
This is only implemented for QtWebKit.
For QtWebEngine, always returns None.
"""
raise NotImplementedError
def shutdown(self) -> None:
raise NotImplementedError
class AbstractTab(QWidget):
"""An adapter for QWebView/QWebEngineView representing a single tab."""
#: Signal emitted when a website requests to close this tab.
window_close_requested = pyqtSignal()
#: Signal emitted when a link is hovered (the hover text)
link_hovered = pyqtSignal(str)
#: Signal emitted when a page started loading
load_started = pyqtSignal()
#: Signal emitted when a page is loading (progress percentage)
load_progress = pyqtSignal(int)
#: Signal emitted when a page finished loading (success as bool)
load_finished = pyqtSignal(bool)
#: Signal emitted when a page's favicon changed (icon as QIcon)
icon_changed = pyqtSignal(QIcon)
#: Signal emitted when a page's title changed (new title as str)
title_changed = pyqtSignal(str)
#: Signal emitted when a new tab should be opened (url as QUrl)
new_tab_requested = pyqtSignal(QUrl)
#: Signal emitted when a page's URL changed (url as QUrl)
url_changed = pyqtSignal(QUrl)
#: Signal emitted when a tab's content size changed
#: (new size as QSizeF)
contents_size_changed = pyqtSignal(QSizeF)
#: Signal emitted when a page requested full-screen (bool)
fullscreen_requested = pyqtSignal(bool)
#: Signal emitted before load starts (URL as QUrl)
before_load_started = pyqtSignal(QUrl)
# Signal emitted when a page's load status changed
# (argument: usertypes.LoadStatus)
load_status_changed = pyqtSignal(usertypes.LoadStatus)
# Signal emitted before shutting down
shutting_down = pyqtSignal()
# Signal emitted when a history item should be added
history_item_triggered = pyqtSignal(QUrl, QUrl, str)
# Signal emitted when the underlying renderer process terminated.
# arg 0: A TerminationStatus member.
# arg 1: The exit code.
renderer_process_terminated = pyqtSignal(TerminationStatus, int)
# Hosts for which a certificate error happened. Shared between all tabs.
#
# Note that we remember hosts here, without scheme/port:
# QtWebEngine/Chromium also only remembers hostnames, and certificates are
# for a given hostname anyways.
_insecure_hosts = set() # type: typing.Set[str]
def __init__(self, *, win_id: int, private: bool,
parent: QWidget = None) -> None:
self.is_private = private
self.win_id = win_id
self.tab_id = next(tab_id_gen)
super().__init__(parent)
self.registry = objreg.ObjectRegistry()
tab_registry = objreg.get('tab-registry', scope='window',
window=win_id)
tab_registry[self.tab_id] = self
objreg.register('tab', self, registry=self.registry)
self.data = TabData()
self._layout = miscwidgets.WrapperLayout(self)
self._widget = typing.cast(QWidget, None)
self._progress = 0
self._load_status = usertypes.LoadStatus.none
self._tab_event_filter = eventfilter.TabEventFilter(
self, parent=self)
self.backend = None # type: typing.Optional[usertypes.Backend]
# If true, this tab has been requested to be removed (or is removed).
self.pending_removal = False
self.shutting_down.connect(functools.partial(
setattr, self, 'pending_removal', True))
self.before_load_started.connect(self._on_before_load_started)
def _set_widget(self, widget: QWidget) -> None:
# pylint: disable=protected-access
self._widget = widget
self._layout.wrap(self, widget)
self.history._history = widget.history()
self.history.private_api._history = widget.history()
self.scroller._init_widget(widget)
self.caret._widget = widget
self.zoom._widget = widget
self.search._widget = widget
self.printing._widget = widget
self.action._widget = widget
self.elements._widget = widget
self.audio._widget = widget
self.private_api._widget = widget
self.settings._settings = widget.settings()
self._install_event_filter()
self.zoom.apply_default()
def _install_event_filter(self) -> None:
raise NotImplementedError
def _set_load_status(self, val: usertypes.LoadStatus) -> None:
"""Setter for load_status."""
if not isinstance(val, usertypes.LoadStatus):
raise TypeError("Type {} is no LoadStatus member!".format(val))
log.webview.debug("load status for {}: {}".format(repr(self), val))
self._load_status = val
self.load_status_changed.emit(val)
def send_event(self, evt: QEvent) -> None:
"""Send the given event to the underlying widget.
The event will be sent via QApplication.postEvent.
Note that a posted event must not be re-used in any way!
"""
# This only gives us some mild protection against re-using events, but
# it's certainly better than a segfault.
if getattr(evt, 'posted', False):
raise utils.Unreachable("Can't re-use an event which was already "
"posted!")
recipient = self.private_api.event_target()
if recipient is None:
# https://github.com/qutebrowser/qutebrowser/issues/3888
log.webview.warning("Unable to find event target!")
return
evt.posted = True
QApplication.postEvent(recipient, evt)
def navigation_blocked(self) -> bool:
"""Test if navigation is allowed on the current tab."""
return self.data.pinned and config.val.tabs.pinned.frozen
@pyqtSlot(QUrl)
def _on_before_load_started(self, url: QUrl) -> None:
"""Adjust the title if we are going to visit a URL soon."""
qtutils.ensure_valid(url)
url_string = url.toDisplayString()
log.webview.debug("Going to start loading: {}".format(url_string))
self.title_changed.emit(url_string)
@pyqtSlot(QUrl)
def _on_url_changed(self, url: QUrl) -> None:
"""Update title when URL has changed and no title is available."""
if url.isValid() and not self.title():
self.title_changed.emit(url.toDisplayString())
self.url_changed.emit(url)
@pyqtSlot()
def _on_load_started(self) -> None:
self._progress = 0
self.data.viewing_source = False
self._set_load_status(usertypes.LoadStatus.loading)
self.load_started.emit()
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(
self,
navigation: usertypes.NavigationRequest
) -> None:
"""Handle common acceptNavigationRequest code."""
url = utils.elide(navigation.url.toDisplayString(), 100)
log.webview.debug("navigation request: url {}, type {}, is_main_frame "
"{}".format(url,
navigation.navigation_type,
navigation.is_main_frame))
if navigation.is_main_frame:
self.data.last_navigation = navigation
if not navigation.url.isValid():
# Also a WORKAROUND for missing IDNA 2008 support in QUrl, see
# https://bugreports.qt.io/browse/QTBUG-60364
if navigation.navigation_type == navigation.Type.link_clicked:
msg = urlutils.get_errstring(navigation.url,
"Invalid link clicked")
message.error(msg)
self.data.open_target = usertypes.ClickTarget.normal
log.webview.debug("Ignoring invalid URL {} in "
"acceptNavigationRequest: {}".format(
navigation.url.toDisplayString(),
navigation.url.errorString()))
navigation.accepted = False
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
assert self._widget is not None
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3498
return
if sessions.session_manager is not None:
sessions.session_manager.save_autosave()
self.load_finished.emit(ok)
if not self.title():
self.title_changed.emit(self.url().toDisplayString())
self.zoom.reapply()
def _update_load_status(self, ok: bool) -> None:
"""Update the load status after a page finished loading.
Needs to be called by subclasses to trigger a load status update, e.g.
as a response to a loadFinished signal.
"""
if ok:
if self.url().scheme() == 'https':
if self.url().host() in self._insecure_hosts:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.success_https)
else:
self._set_load_status(usertypes.LoadStatus.success)
elif ok:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.error)
@pyqtSlot()
def _on_history_trigger(self) -> None:
"""Emit history_item_triggered based on backend-specific signal."""
raise NotImplementedError
@pyqtSlot(int)
def _on_load_progress(self, perc: int) -> None:
self._progress = perc
self.load_progress.emit(perc)
def url(self, *, requested: bool = False) -> QUrl:
raise NotImplementedError
def progress(self) -> int:
return self._progress
def load_status(self) -> usertypes.LoadStatus:
return self._load_status
def _load_url_prepare(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
qtutils.ensure_valid(url)
if emit_before_load_started:
self.before_load_started.emit(url)
def load_url(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
raise NotImplementedError
def reload(self, *, force: bool = False) -> None:
raise NotImplementedError
def stop(self) -> None:
raise NotImplementedError
def fake_key_press(self,
key: Qt.Key,
modifier: Qt.KeyboardModifier = Qt.NoModifier) -> None:
"""Send a fake key event to this tab."""
press_evt = QKeyEvent(QEvent.KeyPress, key, modifier, 0, 0, 0)
release_evt = QKeyEvent(QEvent.KeyRelease, key, modifier,
0, 0, 0)
self.send_event(press_evt)
self.send_event(release_evt)
def dump_async(self,
callback: typing.Callable[[str], None], *,
plain: bool = False) -> None:
"""Dump the current page's html asynchronously.
The given callback will be called with the result when dumping is
complete.
"""
raise NotImplementedError
def run_js_async(
self,
code: str,
callback: typing.Callable[[typing.Any], None] = None, *,
world: typing.Union[usertypes.JsWorld, int] = None
) -> None:
"""Run javascript async.
The given callback will be called with the result when running JS is
complete.
Args:
code: The javascript code to run.
callback: The callback to call with the result, or None.
world: A world ID (int or usertypes.JsWorld member) to run the JS
in the main world or in another isolated world.
"""
raise NotImplementedError
def title(self) -> str:
raise NotImplementedError
def icon(self) -> None:
raise NotImplementedError
def set_html(self, html: str, base_url: QUrl = QUrl()) -> None:
raise NotImplementedError
def __repr__(self) -> str:
try:
qurl = self.url()
url = qurl.toDisplayString(QUrl.EncodeUnicode) # type: ignore
except (AttributeError, RuntimeError) as exc:
url = '<{}>'.format(exc.__class__.__name__)
else:
url = utils.elide(url, 100)
return utils.get_repr(self, tab_id=self.tab_id, url=url)
def is_deleted(self) -> bool:
assert self._widget is not None
return sip.isdeleted(self._widget)
| ./CrossVul/dataset_final_sorted/CWE-684/py/good_3914_0 |
crossvul-python_data_good_3919_0 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Base class for a wrapper over QWebView/QWebEngineView."""
import enum
import itertools
import typing
import functools
import attr
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, QUrl, QObject, QSizeF, Qt,
QEvent, QPoint)
from PyQt5.QtGui import QKeyEvent, QIcon
from PyQt5.QtWidgets import QWidget, QApplication, QDialog
from PyQt5.QtPrintSupport import QPrintDialog, QPrinter
from PyQt5.QtNetwork import QNetworkAccessManager
if typing.TYPE_CHECKING:
from PyQt5.QtWebKit import QWebHistory
from PyQt5.QtWebEngineWidgets import QWebEngineHistory
import pygments
import pygments.lexers
import pygments.formatters
from qutebrowser.keyinput import modeman
from qutebrowser.config import config
from qutebrowser.utils import (utils, objreg, usertypes, log, qtutils,
urlutils, message)
from qutebrowser.misc import miscwidgets, objects, sessions
from qutebrowser.browser import eventfilter
from qutebrowser.qt import sip
if typing.TYPE_CHECKING:
from qutebrowser.browser import webelem
from qutebrowser.browser.inspector import AbstractWebInspector
tab_id_gen = itertools.count(0)
def create(win_id: int,
private: bool,
parent: QWidget = None) -> 'AbstractTab':
"""Get a QtWebKit/QtWebEngine tab object.
Args:
win_id: The window ID where the tab will be shown.
private: Whether the tab is a private/off the record tab.
parent: The Qt parent to set.
"""
# Importing modules here so we don't depend on QtWebEngine without the
# argument and to avoid circular imports.
mode_manager = modeman.instance(win_id)
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
tab_class = webenginetab.WebEngineTab
else:
from qutebrowser.browser.webkit import webkittab
tab_class = webkittab.WebKitTab
return tab_class(win_id=win_id, mode_manager=mode_manager, private=private,
parent=parent)
def init() -> None:
"""Initialize backend-specific modules."""
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
webenginetab.init()
class WebTabError(Exception):
"""Base class for various errors."""
class UnsupportedOperationError(WebTabError):
"""Raised when an operation is not supported with the given backend."""
TerminationStatus = enum.Enum('TerminationStatus', [
'normal',
'abnormal', # non-zero exit status
'crashed', # e.g. segfault
'killed',
'unknown',
])
@attr.s
class TabData:
"""A simple namespace with a fixed set of attributes.
Attributes:
keep_icon: Whether the (e.g. cloned) icon should not be cleared on page
load.
inspector: The QWebInspector used for this webview.
viewing_source: Set if we're currently showing a source view.
Only used when sources are shown via pygments.
open_target: Where to open the next link.
Only used for QtWebKit.
override_target: Override for open_target for fake clicks (like hints).
Only used for QtWebKit.
pinned: Flag to pin the tab.
fullscreen: Whether the tab has a video shown fullscreen currently.
netrc_used: Whether netrc authentication was performed.
input_mode: current input mode for the tab.
"""
keep_icon = attr.ib(False) # type: bool
viewing_source = attr.ib(False) # type: bool
inspector = attr.ib(None) # type: typing.Optional[AbstractWebInspector]
open_target = attr.ib(
usertypes.ClickTarget.normal) # type: usertypes.ClickTarget
override_target = attr.ib(
None) # type: typing.Optional[usertypes.ClickTarget]
pinned = attr.ib(False) # type: bool
fullscreen = attr.ib(False) # type: bool
netrc_used = attr.ib(False) # type: bool
input_mode = attr.ib(usertypes.KeyMode.normal) # type: usertypes.KeyMode
last_navigation = attr.ib(None) # type: usertypes.NavigationRequest
def should_show_icon(self) -> bool:
return (config.val.tabs.favicons.show == 'always' or
config.val.tabs.favicons.show == 'pinned' and self.pinned)
class AbstractAction:
"""Attribute ``action`` of AbstractTab for Qt WebActions."""
# The class actions are defined on (QWeb{Engine,}Page)
action_class = None # type: type
# The type of the actions (QWeb{Engine,}Page.WebAction)
action_base = None # type: type
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def exit_fullscreen(self) -> None:
"""Exit the fullscreen mode."""
raise NotImplementedError
def save_page(self) -> None:
"""Save the current page."""
raise NotImplementedError
def run_string(self, name: str) -> None:
"""Run a webaction based on its name."""
member = getattr(self.action_class, name, None)
if not isinstance(member, self.action_base):
raise WebTabError("{} is not a valid web action!".format(name))
self._widget.triggerPageAction(member)
def show_source(
self,
pygments: bool = False # pylint: disable=redefined-outer-name
) -> None:
"""Show the source of the current page in a new tab."""
raise NotImplementedError
def _show_source_pygments(self) -> None:
def show_source_cb(source: str) -> None:
"""Show source as soon as it's ready."""
# WORKAROUND for https://github.com/PyCQA/pylint/issues/491
# pylint: disable=no-member
lexer = pygments.lexers.HtmlLexer()
formatter = pygments.formatters.HtmlFormatter(
full=True, linenos='table')
# pylint: enable=no-member
highlighted = pygments.highlight(source, lexer, formatter)
tb = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
new_tab = tb.tabopen(background=False, related=True)
new_tab.set_html(highlighted, self._tab.url())
new_tab.data.viewing_source = True
self._tab.dump_async(show_source_cb)
class AbstractPrinting:
"""Attribute ``printing`` of AbstractTab for printing the page."""
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def check_pdf_support(self) -> None:
"""Check whether writing to PDFs is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_printer_support(self) -> None:
"""Check whether writing to a printer is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_preview_support(self) -> None:
"""Check whether showing a print preview is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def to_pdf(self, filename: str) -> bool:
"""Print the tab to a PDF with the given filename."""
raise NotImplementedError
def to_printer(self, printer: QPrinter,
callback: typing.Callable[[bool], None] = None) -> None:
"""Print the tab.
Args:
printer: The QPrinter to print to.
callback: Called with a boolean
(True if printing succeeded, False otherwise)
"""
raise NotImplementedError
def show_dialog(self) -> None:
"""Print with a QPrintDialog."""
self.check_printer_support()
def print_callback(ok: bool) -> None:
"""Called when printing finished."""
if not ok:
message.error("Printing failed!")
diag.deleteLater()
def do_print() -> None:
"""Called when the dialog was closed."""
self.to_printer(diag.printer(), print_callback)
diag = QPrintDialog(self._tab)
if utils.is_mac:
# For some reason we get a segfault when using open() on macOS
ret = diag.exec_()
if ret == QDialog.Accepted:
do_print()
else:
diag.open(do_print)
class AbstractSearch(QObject):
"""Attribute ``search`` of AbstractTab for doing searches.
Attributes:
text: The last thing this view was searched for.
search_displayed: Whether we're currently displaying search results in
this view.
_flags: The flags of the last search (needs to be set by subclasses).
_widget: The underlying WebView widget.
"""
#: Signal emitted when a search was finished
#: (True if the text was found, False otherwise)
finished = pyqtSignal(bool)
#: Signal emitted when an existing search was cleared.
cleared = pyqtSignal()
_Callback = typing.Callable[[bool], None]
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
self.text = None # type: typing.Optional[str]
self.search_displayed = False
def _is_case_sensitive(self, ignore_case: usertypes.IgnoreCase) -> bool:
"""Check if case-sensitivity should be used.
This assumes self.text is already set properly.
Arguments:
ignore_case: The ignore_case value from the config.
"""
assert self.text is not None
mapping = {
usertypes.IgnoreCase.smart: not self.text.islower(),
usertypes.IgnoreCase.never: True,
usertypes.IgnoreCase.always: False,
}
return mapping[ignore_case]
def search(self, text: str, *,
ignore_case: usertypes.IgnoreCase = usertypes.IgnoreCase.never,
reverse: bool = False,
wrap: bool = True,
result_cb: _Callback = None) -> None:
"""Find the given text on the page.
Args:
text: The text to search for.
ignore_case: Search case-insensitively.
reverse: Reverse search direction.
wrap: Allow wrapping at the top or bottom of the page.
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def clear(self) -> None:
"""Clear the current search."""
raise NotImplementedError
def prev_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the previous result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def next_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the next result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
class AbstractZoom(QObject):
"""Attribute ``zoom`` of AbstractTab for controlling zoom."""
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
# Whether zoom was changed from the default.
self._default_zoom_changed = False
self._init_neighborlist()
config.instance.changed.connect(self._on_config_changed)
self._zoom_factor = float(config.val.zoom.default) / 100
@pyqtSlot(str)
def _on_config_changed(self, option: str) -> None:
if option in ['zoom.levels', 'zoom.default']:
if not self._default_zoom_changed:
factor = float(config.val.zoom.default) / 100
self.set_factor(factor)
self._init_neighborlist()
def _init_neighborlist(self) -> None:
"""Initialize self._neighborlist.
It is a NeighborList with the zoom levels."""
levels = config.val.zoom.levels
self._neighborlist = usertypes.NeighborList(
levels, mode=usertypes.NeighborList.Modes.edge
) # type: usertypes.NeighborList[float]
self._neighborlist.fuzzyval = config.val.zoom.default
def apply_offset(self, offset: int) -> float:
"""Increase/Decrease the zoom level by the given offset.
Args:
offset: The offset in the zoom level list.
Return:
The new zoom level.
"""
level = self._neighborlist.getitem(offset)
self.set_factor(float(level) / 100, fuzzyval=False)
return level
def _set_factor_internal(self, factor: float) -> None:
raise NotImplementedError
def set_factor(self, factor: float, *, fuzzyval: bool = True) -> None:
"""Zoom to a given zoom factor.
Args:
factor: The zoom factor as float.
fuzzyval: Whether to set the NeighborLists fuzzyval.
"""
if fuzzyval:
self._neighborlist.fuzzyval = int(factor * 100)
if factor < 0:
raise ValueError("Can't zoom to factor {}!".format(factor))
default_zoom_factor = float(config.val.zoom.default) / 100
self._default_zoom_changed = (factor != default_zoom_factor)
self._zoom_factor = factor
self._set_factor_internal(factor)
def factor(self) -> float:
return self._zoom_factor
def apply_default(self) -> None:
self._set_factor_internal(float(config.val.zoom.default) / 100)
def reapply(self) -> None:
self._set_factor_internal(self._zoom_factor)
class AbstractCaret(QObject):
"""Attribute ``caret`` of AbstractTab for caret browsing."""
#: Signal emitted when the selection was toggled.
#: (argument - whether the selection is now active)
selection_toggled = pyqtSignal(bool)
#: Emitted when a ``follow_selection`` action is done.
follow_selected_done = pyqtSignal()
def __init__(self,
tab: 'AbstractTab',
mode_manager: modeman.ModeManager,
parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
self.selection_enabled = False
self._mode_manager = mode_manager
mode_manager.entered.connect(self._on_mode_entered)
mode_manager.left.connect(self._on_mode_left)
def _on_mode_entered(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def _on_mode_left(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def move_to_next_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_line(self) -> None:
raise NotImplementedError
def move_to_end_of_line(self) -> None:
raise NotImplementedError
def move_to_start_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_document(self) -> None:
raise NotImplementedError
def move_to_end_of_document(self) -> None:
raise NotImplementedError
def toggle_selection(self) -> None:
raise NotImplementedError
def drop_selection(self) -> None:
raise NotImplementedError
def selection(self, callback: typing.Callable[[str], None]) -> None:
raise NotImplementedError
def reverse_selection(self) -> None:
raise NotImplementedError
def _follow_enter(self, tab: bool) -> None:
"""Follow a link by faking an enter press."""
if tab:
self._tab.fake_key_press(Qt.Key_Enter, modifier=Qt.ControlModifier)
else:
self._tab.fake_key_press(Qt.Key_Enter)
def follow_selected(self, *, tab: bool = False) -> None:
raise NotImplementedError
class AbstractScroller(QObject):
"""Attribute ``scroller`` of AbstractTab to manage scroll position."""
#: Signal emitted when the scroll position changed (int, int)
perc_changed = pyqtSignal(int, int)
#: Signal emitted before the user requested a jump.
#: Used to set the special ' mark so the user can return.
before_jump_requested = pyqtSignal()
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
if 'log-scroll-pos' in objects.debug_flags:
self.perc_changed.connect(self._log_scroll_pos_change)
@pyqtSlot()
def _log_scroll_pos_change(self) -> None:
log.webview.vdebug( # type: ignore
"Scroll position changed to {}".format(self.pos_px()))
def _init_widget(self, widget: QWidget) -> None:
self._widget = widget
def pos_px(self) -> int:
raise NotImplementedError
def pos_perc(self) -> int:
raise NotImplementedError
def to_perc(self, x: int = None, y: int = None) -> None:
raise NotImplementedError
def to_point(self, point: QPoint) -> None:
raise NotImplementedError
def to_anchor(self, name: str) -> None:
raise NotImplementedError
def delta(self, x: int = 0, y: int = 0) -> None:
raise NotImplementedError
def delta_page(self, x: float = 0, y: float = 0) -> None:
raise NotImplementedError
def up(self, count: int = 1) -> None:
raise NotImplementedError
def down(self, count: int = 1) -> None:
raise NotImplementedError
def left(self, count: int = 1) -> None:
raise NotImplementedError
def right(self, count: int = 1) -> None:
raise NotImplementedError
def top(self) -> None:
raise NotImplementedError
def bottom(self) -> None:
raise NotImplementedError
def page_up(self, count: int = 1) -> None:
raise NotImplementedError
def page_down(self, count: int = 1) -> None:
raise NotImplementedError
def at_top(self) -> bool:
raise NotImplementedError
def at_bottom(self) -> bool:
raise NotImplementedError
class AbstractHistoryPrivate:
"""Private API related to the history."""
def __init__(self, tab: 'AbstractTab'):
self._tab = tab
self._history = typing.cast(
typing.Union['QWebHistory', 'QWebEngineHistory'], None)
def serialize(self) -> bytes:
"""Serialize into an opaque format understood by self.deserialize."""
raise NotImplementedError
def deserialize(self, data: bytes) -> None:
"""Deserialize from a format produced by self.serialize."""
raise NotImplementedError
def load_items(self, items: typing.Sequence) -> None:
"""Deserialize from a list of WebHistoryItems."""
raise NotImplementedError
class AbstractHistory:
"""The history attribute of a AbstractTab."""
def __init__(self, tab: 'AbstractTab') -> None:
self._tab = tab
self._history = typing.cast(
typing.Union['QWebHistory', 'QWebEngineHistory'], None)
self.private_api = AbstractHistoryPrivate(tab)
def __len__(self) -> int:
raise NotImplementedError
def __iter__(self) -> typing.Iterable:
raise NotImplementedError
def _check_count(self, count: int) -> None:
"""Check whether the count is positive."""
if count < 0:
raise WebTabError("count needs to be positive!")
def current_idx(self) -> int:
raise NotImplementedError
def back(self, count: int = 1) -> None:
"""Go back in the tab's history."""
self._check_count(count)
idx = self.current_idx() - count
if idx >= 0:
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(0))
raise WebTabError("At beginning of history.")
def forward(self, count: int = 1) -> None:
"""Go forward in the tab's history."""
self._check_count(count)
idx = self.current_idx() + count
if idx < len(self):
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(len(self) - 1))
raise WebTabError("At end of history.")
def can_go_back(self) -> bool:
raise NotImplementedError
def can_go_forward(self) -> bool:
raise NotImplementedError
def _item_at(self, i: int) -> typing.Any:
raise NotImplementedError
def _go_to_item(self, item: typing.Any) -> None:
raise NotImplementedError
class AbstractElements:
"""Finding and handling of elements on the page."""
_MultiCallback = typing.Callable[
[typing.Sequence['webelem.AbstractWebElement']], None]
_SingleCallback = typing.Callable[
[typing.Optional['webelem.AbstractWebElement']], None]
_ErrorCallback = typing.Callable[[Exception], None]
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def find_css(self, selector: str,
callback: _MultiCallback,
error_cb: _ErrorCallback, *,
only_visible: bool = False) -> None:
"""Find all HTML elements matching a given selector async.
If there's an error, the callback is called with a webelem.Error
instance.
Args:
callback: The callback to be called when the search finished.
error_cb: The callback to be called when an error occurred.
selector: The CSS selector to search for.
only_visible: Only show elements which are visible on screen.
"""
raise NotImplementedError
def find_id(self, elem_id: str, callback: _SingleCallback) -> None:
"""Find the HTML element with the given ID async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
elem_id: The ID to search for.
"""
raise NotImplementedError
def find_focused(self, callback: _SingleCallback) -> None:
"""Find the focused element on the page async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
def find_at_pos(self, pos: QPoint, callback: _SingleCallback) -> None:
"""Find the element at the given position async.
This is also called "hit test" elsewhere.
Args:
pos: The QPoint to get the element for.
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
class AbstractAudio(QObject):
"""Handling of audio/muting for this tab."""
muted_changed = pyqtSignal(bool)
recently_audible_changed = pyqtSignal(bool)
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._widget = typing.cast(QWidget, None)
self._tab = tab
def set_muted(self, muted: bool, override: bool = False) -> None:
"""Set this tab as muted or not.
Arguments:
override: If set to True, muting/unmuting was done manually and
overrides future automatic mute/unmute changes based on
the URL.
"""
raise NotImplementedError
def is_muted(self) -> bool:
raise NotImplementedError
def is_recently_audible(self) -> bool:
"""Whether this tab has had audio playing recently."""
raise NotImplementedError
class AbstractTabPrivate:
"""Tab-related methods which are only needed in the core.
Those methods are not part of the API which is exposed to extensions, and
should ideally be removed at some point in the future.
"""
def __init__(self, mode_manager: modeman.ModeManager,
tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
self._mode_manager = mode_manager
def event_target(self) -> QWidget:
"""Return the widget events should be sent to."""
raise NotImplementedError
def handle_auto_insert_mode(self, ok: bool) -> None:
"""Handle `input.insert_mode.auto_load` after loading finished."""
if not ok or not config.cache['input.insert_mode.auto_load']:
return
cur_mode = self._mode_manager.mode
if cur_mode == usertypes.KeyMode.insert:
return
def _auto_insert_mode_cb(
elem: typing.Optional['webelem.AbstractWebElement']
) -> None:
"""Called from JS after finding the focused element."""
if elem is None:
log.webview.debug("No focused element!")
return
if elem.is_editable():
modeman.enter(self._tab.win_id, usertypes.KeyMode.insert,
'load finished', only_if_normal=True)
self._tab.elements.find_focused(_auto_insert_mode_cb)
def clear_ssl_errors(self) -> None:
raise NotImplementedError
def networkaccessmanager(self) -> typing.Optional[QNetworkAccessManager]:
"""Get the QNetworkAccessManager for this tab.
This is only implemented for QtWebKit.
For QtWebEngine, always returns None.
"""
raise NotImplementedError
def shutdown(self) -> None:
raise NotImplementedError
class AbstractTab(QWidget):
"""An adapter for QWebView/QWebEngineView representing a single tab."""
#: Signal emitted when a website requests to close this tab.
window_close_requested = pyqtSignal()
#: Signal emitted when a link is hovered (the hover text)
link_hovered = pyqtSignal(str)
#: Signal emitted when a page started loading
load_started = pyqtSignal()
#: Signal emitted when a page is loading (progress percentage)
load_progress = pyqtSignal(int)
#: Signal emitted when a page finished loading (success as bool)
load_finished = pyqtSignal(bool)
#: Signal emitted when a page's favicon changed (icon as QIcon)
icon_changed = pyqtSignal(QIcon)
#: Signal emitted when a page's title changed (new title as str)
title_changed = pyqtSignal(str)
#: Signal emitted when a new tab should be opened (url as QUrl)
new_tab_requested = pyqtSignal(QUrl)
#: Signal emitted when a page's URL changed (url as QUrl)
url_changed = pyqtSignal(QUrl)
#: Signal emitted when a tab's content size changed
#: (new size as QSizeF)
contents_size_changed = pyqtSignal(QSizeF)
#: Signal emitted when a page requested full-screen (bool)
fullscreen_requested = pyqtSignal(bool)
#: Signal emitted before load starts (URL as QUrl)
before_load_started = pyqtSignal(QUrl)
# Signal emitted when a page's load status changed
# (argument: usertypes.LoadStatus)
load_status_changed = pyqtSignal(usertypes.LoadStatus)
# Signal emitted before shutting down
shutting_down = pyqtSignal()
# Signal emitted when a history item should be added
history_item_triggered = pyqtSignal(QUrl, QUrl, str)
# Signal emitted when the underlying renderer process terminated.
# arg 0: A TerminationStatus member.
# arg 1: The exit code.
renderer_process_terminated = pyqtSignal(TerminationStatus, int)
# Hosts for which a certificate error happened. Shared between all tabs.
#
# Note that we remember hosts here, without scheme/port:
# QtWebEngine/Chromium also only remembers hostnames, and certificates are
# for a given hostname anyways.
_insecure_hosts = set() # type: typing.Set[str]
def __init__(self, *, win_id: int, private: bool,
parent: QWidget = None) -> None:
self.is_private = private
self.win_id = win_id
self.tab_id = next(tab_id_gen)
super().__init__(parent)
self.registry = objreg.ObjectRegistry()
tab_registry = objreg.get('tab-registry', scope='window',
window=win_id)
tab_registry[self.tab_id] = self
objreg.register('tab', self, registry=self.registry)
self.data = TabData()
self._layout = miscwidgets.WrapperLayout(self)
self._widget = typing.cast(QWidget, None)
self._progress = 0
self._load_status = usertypes.LoadStatus.none
self._tab_event_filter = eventfilter.TabEventFilter(
self, parent=self)
self.backend = None # type: typing.Optional[usertypes.Backend]
# If true, this tab has been requested to be removed (or is removed).
self.pending_removal = False
self.shutting_down.connect(functools.partial(
setattr, self, 'pending_removal', True))
self.before_load_started.connect(self._on_before_load_started)
def _set_widget(self, widget: QWidget) -> None:
# pylint: disable=protected-access
self._widget = widget
self._layout.wrap(self, widget)
self.history._history = widget.history()
self.history.private_api._history = widget.history()
self.scroller._init_widget(widget)
self.caret._widget = widget
self.zoom._widget = widget
self.search._widget = widget
self.printing._widget = widget
self.action._widget = widget
self.elements._widget = widget
self.audio._widget = widget
self.private_api._widget = widget
self.settings._settings = widget.settings()
self._install_event_filter()
self.zoom.apply_default()
def _install_event_filter(self) -> None:
raise NotImplementedError
def _set_load_status(self, val: usertypes.LoadStatus) -> None:
"""Setter for load_status."""
if not isinstance(val, usertypes.LoadStatus):
raise TypeError("Type {} is no LoadStatus member!".format(val))
log.webview.debug("load status for {}: {}".format(repr(self), val))
self._load_status = val
self.load_status_changed.emit(val)
def send_event(self, evt: QEvent) -> None:
"""Send the given event to the underlying widget.
The event will be sent via QApplication.postEvent.
Note that a posted event must not be re-used in any way!
"""
# This only gives us some mild protection against re-using events, but
# it's certainly better than a segfault.
if getattr(evt, 'posted', False):
raise utils.Unreachable("Can't re-use an event which was already "
"posted!")
recipient = self.private_api.event_target()
if recipient is None:
# https://github.com/qutebrowser/qutebrowser/issues/3888
log.webview.warning("Unable to find event target!")
return
evt.posted = True
QApplication.postEvent(recipient, evt)
def navigation_blocked(self) -> bool:
"""Test if navigation is allowed on the current tab."""
return self.data.pinned and config.val.tabs.pinned.frozen
@pyqtSlot(QUrl)
def _on_before_load_started(self, url: QUrl) -> None:
"""Adjust the title if we are going to visit a URL soon."""
qtutils.ensure_valid(url)
url_string = url.toDisplayString()
log.webview.debug("Going to start loading: {}".format(url_string))
self.title_changed.emit(url_string)
@pyqtSlot(QUrl)
def _on_url_changed(self, url: QUrl) -> None:
"""Update title when URL has changed and no title is available."""
if url.isValid() and not self.title():
self.title_changed.emit(url.toDisplayString())
self.url_changed.emit(url)
@pyqtSlot()
def _on_load_started(self) -> None:
self._progress = 0
self.data.viewing_source = False
self._set_load_status(usertypes.LoadStatus.loading)
self.load_started.emit()
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(
self,
navigation: usertypes.NavigationRequest
) -> None:
"""Handle common acceptNavigationRequest code."""
url = utils.elide(navigation.url.toDisplayString(), 100)
log.webview.debug("navigation request: url {}, type {}, is_main_frame "
"{}".format(url,
navigation.navigation_type,
navigation.is_main_frame))
if navigation.is_main_frame:
self.data.last_navigation = navigation
if not navigation.url.isValid():
# Also a WORKAROUND for missing IDNA 2008 support in QUrl, see
# https://bugreports.qt.io/browse/QTBUG-60364
if navigation.navigation_type == navigation.Type.link_clicked:
msg = urlutils.get_errstring(navigation.url,
"Invalid link clicked")
message.error(msg)
self.data.open_target = usertypes.ClickTarget.normal
log.webview.debug("Ignoring invalid URL {} in "
"acceptNavigationRequest: {}".format(
navigation.url.toDisplayString(),
navigation.url.errorString()))
navigation.accepted = False
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
assert self._widget is not None
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3498
return
if sessions.session_manager is not None:
sessions.session_manager.save_autosave()
self.load_finished.emit(ok)
if not self.title():
self.title_changed.emit(self.url().toDisplayString())
self.zoom.reapply()
def _update_load_status(self, ok: bool) -> None:
"""Update the load status after a page finished loading.
Needs to be called by subclasses to trigger a load status update, e.g.
as a response to a loadFinished signal.
"""
if ok:
if self.url().scheme() == 'https':
if self.url().host() in self._insecure_hosts:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.success_https)
else:
self._set_load_status(usertypes.LoadStatus.success)
elif ok:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.error)
@pyqtSlot()
def _on_history_trigger(self) -> None:
"""Emit history_item_triggered based on backend-specific signal."""
raise NotImplementedError
@pyqtSlot(int)
def _on_load_progress(self, perc: int) -> None:
self._progress = perc
self.load_progress.emit(perc)
def url(self, *, requested: bool = False) -> QUrl:
raise NotImplementedError
def progress(self) -> int:
return self._progress
def load_status(self) -> usertypes.LoadStatus:
return self._load_status
def _load_url_prepare(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
qtutils.ensure_valid(url)
if emit_before_load_started:
self.before_load_started.emit(url)
def load_url(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
raise NotImplementedError
def reload(self, *, force: bool = False) -> None:
raise NotImplementedError
def stop(self) -> None:
raise NotImplementedError
def fake_key_press(self,
key: Qt.Key,
modifier: Qt.KeyboardModifier = Qt.NoModifier) -> None:
"""Send a fake key event to this tab."""
press_evt = QKeyEvent(QEvent.KeyPress, key, modifier, 0, 0, 0)
release_evt = QKeyEvent(QEvent.KeyRelease, key, modifier,
0, 0, 0)
self.send_event(press_evt)
self.send_event(release_evt)
def dump_async(self,
callback: typing.Callable[[str], None], *,
plain: bool = False) -> None:
"""Dump the current page's html asynchronously.
The given callback will be called with the result when dumping is
complete.
"""
raise NotImplementedError
def run_js_async(
self,
code: str,
callback: typing.Callable[[typing.Any], None] = None, *,
world: typing.Union[usertypes.JsWorld, int] = None
) -> None:
"""Run javascript async.
The given callback will be called with the result when running JS is
complete.
Args:
code: The javascript code to run.
callback: The callback to call with the result, or None.
world: A world ID (int or usertypes.JsWorld member) to run the JS
in the main world or in another isolated world.
"""
raise NotImplementedError
def title(self) -> str:
raise NotImplementedError
def icon(self) -> None:
raise NotImplementedError
def set_html(self, html: str, base_url: QUrl = QUrl()) -> None:
raise NotImplementedError
def __repr__(self) -> str:
try:
qurl = self.url()
url = qurl.toDisplayString(QUrl.EncodeUnicode) # type: ignore
except (AttributeError, RuntimeError) as exc:
url = '<{}>'.format(exc.__class__.__name__)
else:
url = utils.elide(url, 100)
return utils.get_repr(self, tab_id=self.tab_id, url=url)
def is_deleted(self) -> bool:
assert self._widget is not None
return sip.isdeleted(self._widget)
| ./CrossVul/dataset_final_sorted/CWE-684/py/good_3919_0 |
crossvul-python_data_bad_3923_0 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2019 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Base class for a wrapper over QWebView/QWebEngineView."""
import enum
import itertools
import typing
import attr
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, QUrl, QObject, QSizeF, Qt,
QEvent, QPoint)
from PyQt5.QtGui import QKeyEvent, QIcon
from PyQt5.QtWidgets import QWidget, QApplication, QDialog
from PyQt5.QtPrintSupport import QPrintDialog, QPrinter
from PyQt5.QtNetwork import QNetworkAccessManager
import pygments
import pygments.lexers
import pygments.formatters
from qutebrowser.keyinput import modeman
from qutebrowser.config import config
from qutebrowser.utils import (utils, objreg, usertypes, log, qtutils,
urlutils, message)
from qutebrowser.misc import miscwidgets, objects
from qutebrowser.browser import mouse, hints
from qutebrowser.qt import sip
MYPY = False
if MYPY:
# pylint can't interpret type comments with Python 3.7
# pylint: disable=unused-import,useless-suppression
from qutebrowser.browser import webelem
from qutebrowser.browser.inspector import AbstractWebInspector
tab_id_gen = itertools.count(0)
def create(win_id: int,
private: bool,
parent: QWidget = None) -> 'AbstractTab':
"""Get a QtWebKit/QtWebEngine tab object.
Args:
win_id: The window ID where the tab will be shown.
private: Whether the tab is a private/off the record tab.
parent: The Qt parent to set.
"""
# Importing modules here so we don't depend on QtWebEngine without the
# argument and to avoid circular imports.
mode_manager = modeman.instance(win_id)
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
tab_class = webenginetab.WebEngineTab
else:
from qutebrowser.browser.webkit import webkittab
tab_class = webkittab.WebKitTab
return tab_class(win_id=win_id, mode_manager=mode_manager, private=private,
parent=parent)
def init() -> None:
"""Initialize backend-specific modules."""
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
webenginetab.init()
class WebTabError(Exception):
"""Base class for various errors."""
class UnsupportedOperationError(WebTabError):
"""Raised when an operation is not supported with the given backend."""
TerminationStatus = enum.Enum('TerminationStatus', [
'normal',
'abnormal', # non-zero exit status
'crashed', # e.g. segfault
'killed',
'unknown',
])
@attr.s
class TabData:
"""A simple namespace with a fixed set of attributes.
Attributes:
keep_icon: Whether the (e.g. cloned) icon should not be cleared on page
load.
inspector: The QWebInspector used for this webview.
viewing_source: Set if we're currently showing a source view.
Only used when sources are shown via pygments.
open_target: Where to open the next link.
Only used for QtWebKit.
override_target: Override for open_target for fake clicks (like hints).
Only used for QtWebKit.
pinned: Flag to pin the tab.
fullscreen: Whether the tab has a video shown fullscreen currently.
netrc_used: Whether netrc authentication was performed.
input_mode: current input mode for the tab.
"""
keep_icon = attr.ib(False) # type: bool
viewing_source = attr.ib(False) # type: bool
inspector = attr.ib(None) # type: typing.Optional[AbstractWebInspector]
open_target = attr.ib(
usertypes.ClickTarget.normal) # type: usertypes.ClickTarget
override_target = attr.ib(None) # type: usertypes.ClickTarget
pinned = attr.ib(False) # type: bool
fullscreen = attr.ib(False) # type: bool
netrc_used = attr.ib(False) # type: bool
input_mode = attr.ib(usertypes.KeyMode.normal) # type: usertypes.KeyMode
def should_show_icon(self) -> bool:
return (config.val.tabs.favicons.show == 'always' or
config.val.tabs.favicons.show == 'pinned' and self.pinned)
class AbstractAction:
"""Attribute ``action`` of AbstractTab for Qt WebActions."""
# The class actions are defined on (QWeb{Engine,}Page)
action_class = None # type: type
# The type of the actions (QWeb{Engine,}Page.WebAction)
action_base = None # type: type
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def exit_fullscreen(self) -> None:
"""Exit the fullscreen mode."""
raise NotImplementedError
def save_page(self) -> None:
"""Save the current page."""
raise NotImplementedError
def run_string(self, name: str) -> None:
"""Run a webaction based on its name."""
member = getattr(self.action_class, name, None)
if not isinstance(member, self.action_base):
raise WebTabError("{} is not a valid web action!".format(name))
self._widget.triggerPageAction(member)
def show_source(
self,
pygments: bool = False # pylint: disable=redefined-outer-name
) -> None:
"""Show the source of the current page in a new tab."""
raise NotImplementedError
def _show_source_pygments(self) -> None:
def show_source_cb(source: str) -> None:
"""Show source as soon as it's ready."""
# WORKAROUND for https://github.com/PyCQA/pylint/issues/491
# pylint: disable=no-member
lexer = pygments.lexers.HtmlLexer()
formatter = pygments.formatters.HtmlFormatter(
full=True, linenos='table')
# pylint: enable=no-member
highlighted = pygments.highlight(source, lexer, formatter)
tb = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
new_tab = tb.tabopen(background=False, related=True)
new_tab.set_html(highlighted, self._tab.url())
new_tab.data.viewing_source = True
self._tab.dump_async(show_source_cb)
class AbstractPrinting:
"""Attribute ``printing`` of AbstractTab for printing the page."""
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = None
self._tab = tab
def check_pdf_support(self) -> None:
"""Check whether writing to PDFs is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_printer_support(self) -> None:
"""Check whether writing to a printer is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_preview_support(self) -> None:
"""Check whether showing a print preview is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def to_pdf(self, filename: str) -> bool:
"""Print the tab to a PDF with the given filename."""
raise NotImplementedError
def to_printer(self, printer: QPrinter,
callback: typing.Callable[[bool], None] = None) -> None:
"""Print the tab.
Args:
printer: The QPrinter to print to.
callback: Called with a boolean
(True if printing succeeded, False otherwise)
"""
raise NotImplementedError
def show_dialog(self) -> None:
"""Print with a QPrintDialog."""
self.check_printer_support()
def print_callback(ok: bool) -> None:
"""Called when printing finished."""
if not ok:
message.error("Printing failed!")
diag.deleteLater()
def do_print() -> None:
"""Called when the dialog was closed."""
self.to_printer(diag.printer(), print_callback)
diag = QPrintDialog(self._tab)
if utils.is_mac:
# For some reason we get a segfault when using open() on macOS
ret = diag.exec_()
if ret == QDialog.Accepted:
do_print()
else:
diag.open(do_print)
class AbstractSearch(QObject):
"""Attribute ``search`` of AbstractTab for doing searches.
Attributes:
text: The last thing this view was searched for.
search_displayed: Whether we're currently displaying search results in
this view.
_flags: The flags of the last search (needs to be set by subclasses).
_widget: The underlying WebView widget.
"""
#: Signal emitted when a search was finished
#: (True if the text was found, False otherwise)
finished = pyqtSignal(bool)
#: Signal emitted when an existing search was cleared.
cleared = pyqtSignal()
_Callback = typing.Callable[[bool], None]
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = None
self.text = None # type: typing.Optional[str]
self.search_displayed = False
def _is_case_sensitive(self, ignore_case: usertypes.IgnoreCase) -> bool:
"""Check if case-sensitivity should be used.
This assumes self.text is already set properly.
Arguments:
ignore_case: The ignore_case value from the config.
"""
assert self.text is not None
mapping = {
usertypes.IgnoreCase.smart: not self.text.islower(),
usertypes.IgnoreCase.never: True,
usertypes.IgnoreCase.always: False,
}
return mapping[ignore_case]
def search(self, text: str, *,
ignore_case: usertypes.IgnoreCase = usertypes.IgnoreCase.never,
reverse: bool = False,
result_cb: _Callback = None) -> None:
"""Find the given text on the page.
Args:
text: The text to search for.
ignore_case: Search case-insensitively.
reverse: Reverse search direction.
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def clear(self) -> None:
"""Clear the current search."""
raise NotImplementedError
def prev_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the previous result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def next_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the next result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
class AbstractZoom(QObject):
"""Attribute ``zoom`` of AbstractTab for controlling zoom."""
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = None
# Whether zoom was changed from the default.
self._default_zoom_changed = False
self._init_neighborlist()
config.instance.changed.connect(self._on_config_changed)
self._zoom_factor = float(config.val.zoom.default) / 100
@pyqtSlot(str)
def _on_config_changed(self, option: str) -> None:
if option in ['zoom.levels', 'zoom.default']:
if not self._default_zoom_changed:
factor = float(config.val.zoom.default) / 100
self.set_factor(factor)
self._init_neighborlist()
def _init_neighborlist(self) -> None:
"""Initialize self._neighborlist.
It is a NeighborList with the zoom levels."""
levels = config.val.zoom.levels
self._neighborlist = usertypes.NeighborList(
levels, mode=usertypes.NeighborList.Modes.edge)
self._neighborlist.fuzzyval = config.val.zoom.default
def apply_offset(self, offset: int) -> None:
"""Increase/Decrease the zoom level by the given offset.
Args:
offset: The offset in the zoom level list.
Return:
The new zoom percentage.
"""
level = self._neighborlist.getitem(offset)
self.set_factor(float(level) / 100, fuzzyval=False)
return level
def _set_factor_internal(self, factor: float) -> None:
raise NotImplementedError
def set_factor(self, factor: float, *, fuzzyval: bool = True) -> None:
"""Zoom to a given zoom factor.
Args:
factor: The zoom factor as float.
fuzzyval: Whether to set the NeighborLists fuzzyval.
"""
if fuzzyval:
self._neighborlist.fuzzyval = int(factor * 100)
if factor < 0:
raise ValueError("Can't zoom to factor {}!".format(factor))
default_zoom_factor = float(config.val.zoom.default) / 100
self._default_zoom_changed = (factor != default_zoom_factor)
self._zoom_factor = factor
self._set_factor_internal(factor)
def factor(self) -> float:
return self._zoom_factor
def apply_default(self) -> None:
self._set_factor_internal(float(config.val.zoom.default) / 100)
def reapply(self) -> None:
self._set_factor_internal(self._zoom_factor)
class AbstractCaret(QObject):
"""Attribute ``caret`` of AbstractTab for caret browsing."""
#: Signal emitted when the selection was toggled.
#: (argument - whether the selection is now active)
selection_toggled = pyqtSignal(bool)
#: Emitted when a ``follow_selection`` action is done.
follow_selected_done = pyqtSignal()
def __init__(self,
tab: 'AbstractTab',
mode_manager: modeman.ModeManager,
parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = None
self.selection_enabled = False
mode_manager.entered.connect(self._on_mode_entered)
mode_manager.left.connect(self._on_mode_left)
def _on_mode_entered(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def _on_mode_left(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def move_to_next_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_line(self) -> None:
raise NotImplementedError
def move_to_end_of_line(self) -> None:
raise NotImplementedError
def move_to_start_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_document(self) -> None:
raise NotImplementedError
def move_to_end_of_document(self) -> None:
raise NotImplementedError
def toggle_selection(self) -> None:
raise NotImplementedError
def drop_selection(self) -> None:
raise NotImplementedError
def selection(self, callback: typing.Callable[[str], None]) -> None:
raise NotImplementedError
def _follow_enter(self, tab: bool) -> None:
"""Follow a link by faking an enter press."""
if tab:
self._tab.fake_key_press(Qt.Key_Enter, modifier=Qt.ControlModifier)
else:
self._tab.fake_key_press(Qt.Key_Enter)
def follow_selected(self, *, tab: bool = False) -> None:
raise NotImplementedError
class AbstractScroller(QObject):
"""Attribute ``scroller`` of AbstractTab to manage scroll position."""
#: Signal emitted when the scroll position changed (int, int)
perc_changed = pyqtSignal(int, int)
#: Signal emitted before the user requested a jump.
#: Used to set the special ' mark so the user can return.
before_jump_requested = pyqtSignal()
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = None # type: typing.Optional[QWidget]
self.perc_changed.connect(self._log_scroll_pos_change)
@pyqtSlot()
def _log_scroll_pos_change(self) -> None:
log.webview.vdebug( # type: ignore
"Scroll position changed to {}".format(self.pos_px()))
def _init_widget(self, widget: QWidget) -> None:
self._widget = widget
def pos_px(self) -> int:
raise NotImplementedError
def pos_perc(self) -> int:
raise NotImplementedError
def to_perc(self, x: int = None, y: int = None) -> None:
raise NotImplementedError
def to_point(self, point: QPoint) -> None:
raise NotImplementedError
def to_anchor(self, name: str) -> None:
raise NotImplementedError
def delta(self, x: int = 0, y: int = 0) -> None:
raise NotImplementedError
def delta_page(self, x: float = 0, y: float = 0) -> None:
raise NotImplementedError
def up(self, count: int = 1) -> None:
raise NotImplementedError
def down(self, count: int = 1) -> None:
raise NotImplementedError
def left(self, count: int = 1) -> None:
raise NotImplementedError
def right(self, count: int = 1) -> None:
raise NotImplementedError
def top(self) -> None:
raise NotImplementedError
def bottom(self) -> None:
raise NotImplementedError
def page_up(self, count: int = 1) -> None:
raise NotImplementedError
def page_down(self, count: int = 1) -> None:
raise NotImplementedError
def at_top(self) -> bool:
raise NotImplementedError
def at_bottom(self) -> bool:
raise NotImplementedError
class AbstractHistoryPrivate:
"""Private API related to the history."""
def __init__(self, tab: 'AbstractTab'):
self._tab = tab
self._history = None
def serialize(self) -> bytes:
"""Serialize into an opaque format understood by self.deserialize."""
raise NotImplementedError
def deserialize(self, data: bytes) -> None:
"""Deserialize from a format produced by self.serialize."""
raise NotImplementedError
def load_items(self, items: typing.Sequence) -> None:
"""Deserialize from a list of WebHistoryItems."""
raise NotImplementedError
class AbstractHistory:
"""The history attribute of a AbstractTab."""
def __init__(self, tab: 'AbstractTab') -> None:
self._tab = tab
self._history = None
self.private_api = AbstractHistoryPrivate(tab)
def __len__(self) -> int:
raise NotImplementedError
def __iter__(self) -> typing.Iterable:
raise NotImplementedError
def _check_count(self, count: int) -> None:
"""Check whether the count is positive."""
if count < 0:
raise WebTabError("count needs to be positive!")
def current_idx(self) -> int:
raise NotImplementedError
def back(self, count: int = 1) -> None:
"""Go back in the tab's history."""
self._check_count(count)
idx = self.current_idx() - count
if idx >= 0:
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(0))
raise WebTabError("At beginning of history.")
def forward(self, count: int = 1) -> None:
"""Go forward in the tab's history."""
self._check_count(count)
idx = self.current_idx() + count
if idx < len(self):
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(len(self) - 1))
raise WebTabError("At end of history.")
def can_go_back(self) -> bool:
raise NotImplementedError
def can_go_forward(self) -> bool:
raise NotImplementedError
def _item_at(self, i: int) -> typing.Any:
raise NotImplementedError
def _go_to_item(self, item: typing.Any) -> None:
raise NotImplementedError
class AbstractElements:
"""Finding and handling of elements on the page."""
_MultiCallback = typing.Callable[
[typing.Sequence['webelem.AbstractWebElement']], None]
_SingleCallback = typing.Callable[
[typing.Optional['webelem.AbstractWebElement']], None]
_ErrorCallback = typing.Callable[[Exception], None]
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = None
self._tab = tab
def find_css(self, selector: str,
callback: _MultiCallback,
error_cb: _ErrorCallback, *,
only_visible: bool = False) -> None:
"""Find all HTML elements matching a given selector async.
If there's an error, the callback is called with a webelem.Error
instance.
Args:
callback: The callback to be called when the search finished.
error_cb: The callback to be called when an error occurred.
selector: The CSS selector to search for.
only_visible: Only show elements which are visible on screen.
"""
raise NotImplementedError
def find_id(self, elem_id: str, callback: _SingleCallback) -> None:
"""Find the HTML element with the given ID async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
elem_id: The ID to search for.
"""
raise NotImplementedError
def find_focused(self, callback: _SingleCallback) -> None:
"""Find the focused element on the page async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
def find_at_pos(self, pos: QPoint, callback: _SingleCallback) -> None:
"""Find the element at the given position async.
This is also called "hit test" elsewhere.
Args:
pos: The QPoint to get the element for.
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
class AbstractAudio(QObject):
"""Handling of audio/muting for this tab."""
muted_changed = pyqtSignal(bool)
recently_audible_changed = pyqtSignal(bool)
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._widget = None # type: typing.Optional[QWidget]
self._tab = tab
def set_muted(self, muted: bool, override: bool = False) -> None:
"""Set this tab as muted or not.
Arguments:
override: If set to True, muting/unmuting was done manually and
overrides future automatic mute/unmute changes based on
the URL.
"""
raise NotImplementedError
def is_muted(self) -> bool:
raise NotImplementedError
def is_recently_audible(self) -> bool:
"""Whether this tab has had audio playing recently."""
raise NotImplementedError
class AbstractTabPrivate:
"""Tab-related methods which are only needed in the core.
Those methods are not part of the API which is exposed to extensions, and
should ideally be removed at some point in the future.
"""
def __init__(self, mode_manager: modeman.ModeManager,
tab: 'AbstractTab') -> None:
self._widget = None # type: typing.Optional[QWidget]
self._tab = tab
self._mode_manager = mode_manager
def event_target(self) -> QWidget:
"""Return the widget events should be sent to."""
raise NotImplementedError
def handle_auto_insert_mode(self, ok: bool) -> None:
"""Handle `input.insert_mode.auto_load` after loading finished."""
if not config.val.input.insert_mode.auto_load or not ok:
return
cur_mode = self._mode_manager.mode
if cur_mode == usertypes.KeyMode.insert:
return
def _auto_insert_mode_cb(elem: 'webelem.AbstractWebElement') -> None:
"""Called from JS after finding the focused element."""
if elem is None:
log.webview.debug("No focused element!")
return
if elem.is_editable():
modeman.enter(self._tab.win_id, usertypes.KeyMode.insert,
'load finished', only_if_normal=True)
self._tab.elements.find_focused(_auto_insert_mode_cb)
def clear_ssl_errors(self) -> None:
raise NotImplementedError
def networkaccessmanager(self) -> typing.Optional[QNetworkAccessManager]:
"""Get the QNetworkAccessManager for this tab.
This is only implemented for QtWebKit.
For QtWebEngine, always returns None.
"""
raise NotImplementedError
def user_agent(self) -> typing.Optional[str]:
"""Get the user agent for this tab.
This is only implemented for QtWebKit.
For QtWebEngine, always returns None.
"""
raise NotImplementedError
def shutdown(self) -> None:
raise NotImplementedError
class AbstractTab(QWidget):
"""An adapter for QWebView/QWebEngineView representing a single tab."""
#: Signal emitted when a website requests to close this tab.
window_close_requested = pyqtSignal()
#: Signal emitted when a link is hovered (the hover text)
link_hovered = pyqtSignal(str)
#: Signal emitted when a page started loading
load_started = pyqtSignal()
#: Signal emitted when a page is loading (progress percentage)
load_progress = pyqtSignal(int)
#: Signal emitted when a page finished loading (success as bool)
load_finished = pyqtSignal(bool)
#: Signal emitted when a page's favicon changed (icon as QIcon)
icon_changed = pyqtSignal(QIcon)
#: Signal emitted when a page's title changed (new title as str)
title_changed = pyqtSignal(str)
#: Signal emitted when a new tab should be opened (url as QUrl)
new_tab_requested = pyqtSignal(QUrl)
#: Signal emitted when a page's URL changed (url as QUrl)
url_changed = pyqtSignal(QUrl)
#: Signal emitted when a tab's content size changed
#: (new size as QSizeF)
contents_size_changed = pyqtSignal(QSizeF)
#: Signal emitted when a page requested full-screen (bool)
fullscreen_requested = pyqtSignal(bool)
#: Signal emitted before load starts (URL as QUrl)
before_load_started = pyqtSignal(QUrl)
# Signal emitted when a page's load status changed
# (argument: usertypes.LoadStatus)
load_status_changed = pyqtSignal(usertypes.LoadStatus)
# Signal emitted before shutting down
shutting_down = pyqtSignal()
# Signal emitted when a history item should be added
history_item_triggered = pyqtSignal(QUrl, QUrl, str)
# Signal emitted when the underlying renderer process terminated.
# arg 0: A TerminationStatus member.
# arg 1: The exit code.
renderer_process_terminated = pyqtSignal(TerminationStatus, int)
def __init__(self, *, win_id: int, private: bool,
parent: QWidget = None) -> None:
self.is_private = private
self.win_id = win_id
self.tab_id = next(tab_id_gen)
super().__init__(parent)
self.registry = objreg.ObjectRegistry()
tab_registry = objreg.get('tab-registry', scope='window',
window=win_id)
tab_registry[self.tab_id] = self
objreg.register('tab', self, registry=self.registry)
self.data = TabData()
self._layout = miscwidgets.WrapperLayout(self)
self._widget = None # type: typing.Optional[QWidget]
self._progress = 0
self._has_ssl_errors = False
self._load_status = usertypes.LoadStatus.none
self._mouse_event_filter = mouse.MouseEventFilter(
self, parent=self)
self.backend = None
# FIXME:qtwebengine Should this be public api via self.hints?
# Also, should we get it out of objreg?
hintmanager = hints.HintManager(win_id, self.tab_id, parent=self)
objreg.register('hintmanager', hintmanager, scope='tab',
window=self.win_id, tab=self.tab_id)
self.before_load_started.connect(self._on_before_load_started)
def _set_widget(self, widget: QWidget) -> None:
# pylint: disable=protected-access
self._widget = widget
self._layout.wrap(self, widget)
self.history._history = widget.history()
self.history.private_api._history = widget.history()
self.scroller._init_widget(widget)
self.caret._widget = widget
self.zoom._widget = widget
self.search._widget = widget
self.printing._widget = widget
self.action._widget = widget
self.elements._widget = widget
self.audio._widget = widget
self.private_api._widget = widget
self.settings._settings = widget.settings()
self._install_event_filter()
self.zoom.apply_default()
def _install_event_filter(self) -> None:
raise NotImplementedError
def _set_load_status(self, val: usertypes.LoadStatus) -> None:
"""Setter for load_status."""
if not isinstance(val, usertypes.LoadStatus):
raise TypeError("Type {} is no LoadStatus member!".format(val))
log.webview.debug("load status for {}: {}".format(repr(self), val))
self._load_status = val
self.load_status_changed.emit(val)
def send_event(self, evt: QEvent) -> None:
"""Send the given event to the underlying widget.
The event will be sent via QApplication.postEvent.
Note that a posted event must not be re-used in any way!
"""
# This only gives us some mild protection against re-using events, but
# it's certainly better than a segfault.
if getattr(evt, 'posted', False):
raise utils.Unreachable("Can't re-use an event which was already "
"posted!")
recipient = self.private_api.event_target()
if recipient is None:
# https://github.com/qutebrowser/qutebrowser/issues/3888
log.webview.warning("Unable to find event target!")
return
evt.posted = True
QApplication.postEvent(recipient, evt)
def navigation_blocked(self) -> bool:
"""Test if navigation is allowed on the current tab."""
return self.data.pinned and config.val.tabs.pinned.frozen
@pyqtSlot(QUrl)
def _on_before_load_started(self, url: QUrl) -> None:
"""Adjust the title if we are going to visit a URL soon."""
qtutils.ensure_valid(url)
url_string = url.toDisplayString()
log.webview.debug("Going to start loading: {}".format(url_string))
self.title_changed.emit(url_string)
@pyqtSlot(QUrl)
def _on_url_changed(self, url: QUrl) -> None:
"""Update title when URL has changed and no title is available."""
if url.isValid() and not self.title():
self.title_changed.emit(url.toDisplayString())
self.url_changed.emit(url)
@pyqtSlot()
def _on_load_started(self) -> None:
self._progress = 0
self._has_ssl_errors = False
self.data.viewing_source = False
self._set_load_status(usertypes.LoadStatus.loading)
self.load_started.emit()
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(
self,
navigation: usertypes.NavigationRequest
) -> None:
"""Handle common acceptNavigationRequest code."""
url = utils.elide(navigation.url.toDisplayString(), 100)
log.webview.debug("navigation request: url {}, type {}, is_main_frame "
"{}".format(url,
navigation.navigation_type,
navigation.is_main_frame))
if not navigation.url.isValid():
# Also a WORKAROUND for missing IDNA 2008 support in QUrl, see
# https://bugreports.qt.io/browse/QTBUG-60364
if navigation.navigation_type == navigation.Type.link_clicked:
msg = urlutils.get_errstring(navigation.url,
"Invalid link clicked")
message.error(msg)
self.data.open_target = usertypes.ClickTarget.normal
log.webview.debug("Ignoring invalid URL {} in "
"acceptNavigationRequest: {}".format(
navigation.url.toDisplayString(),
navigation.url.errorString()))
navigation.accepted = False
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
assert self._widget is not None
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3498
return
try:
sess_manager = objreg.get('session-manager')
except KeyError:
# https://github.com/qutebrowser/qutebrowser/issues/4311
return
sess_manager.save_autosave()
if ok and not self._has_ssl_errors:
if self.url().scheme() == 'https':
self._set_load_status(usertypes.LoadStatus.success_https)
else:
self._set_load_status(usertypes.LoadStatus.success)
elif ok:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.error)
self.load_finished.emit(ok)
if not self.title():
self.title_changed.emit(self.url().toDisplayString())
self.zoom.reapply()
@pyqtSlot()
def _on_history_trigger(self) -> None:
"""Emit history_item_triggered based on backend-specific signal."""
raise NotImplementedError
@pyqtSlot(int)
def _on_load_progress(self, perc: int) -> None:
self._progress = perc
self.load_progress.emit(perc)
def url(self, *, requested: bool = False) -> QUrl:
raise NotImplementedError
def progress(self) -> int:
return self._progress
def load_status(self) -> usertypes.LoadStatus:
return self._load_status
def _load_url_prepare(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
qtutils.ensure_valid(url)
if emit_before_load_started:
self.before_load_started.emit(url)
def load_url(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
raise NotImplementedError
def reload(self, *, force: bool = False) -> None:
raise NotImplementedError
def stop(self) -> None:
raise NotImplementedError
def fake_key_press(self,
key: Qt.Key,
modifier: Qt.KeyboardModifier = Qt.NoModifier) -> None:
"""Send a fake key event to this tab."""
press_evt = QKeyEvent(QEvent.KeyPress, key, modifier, 0, 0, 0)
release_evt = QKeyEvent(QEvent.KeyRelease, key, modifier,
0, 0, 0)
self.send_event(press_evt)
self.send_event(release_evt)
def dump_async(self,
callback: typing.Callable[[str], None], *,
plain: bool = False) -> None:
"""Dump the current page's html asynchronously.
The given callback will be called with the result when dumping is
complete.
"""
raise NotImplementedError
def run_js_async(
self,
code: str,
callback: typing.Callable[[typing.Any], None] = None, *,
world: typing.Union[usertypes.JsWorld, int] = None
) -> None:
"""Run javascript async.
The given callback will be called with the result when running JS is
complete.
Args:
code: The javascript code to run.
callback: The callback to call with the result, or None.
world: A world ID (int or usertypes.JsWorld member) to run the JS
in the main world or in another isolated world.
"""
raise NotImplementedError
def title(self) -> str:
raise NotImplementedError
def icon(self) -> None:
raise NotImplementedError
def set_html(self, html: str, base_url: QUrl = QUrl()) -> None:
raise NotImplementedError
def __repr__(self) -> str:
try:
qurl = self.url()
url = qurl.toDisplayString(QUrl.EncodeUnicode) # type: ignore
except (AttributeError, RuntimeError) as exc:
url = '<{}>'.format(exc.__class__.__name__)
else:
url = utils.elide(url, 100)
return utils.get_repr(self, tab_id=self.tab_id, url=url)
def is_deleted(self) -> bool:
assert self._widget is not None
return sip.isdeleted(self._widget)
| ./CrossVul/dataset_final_sorted/CWE-684/py/bad_3923_0 |
crossvul-python_data_bad_3918_1 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Wrapper over a QWebEngineView."""
import math
import functools
import sys
import re
import html as html_utils
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, Qt, QEvent, QPoint, QPointF,
QUrl, QTimer, QObject, qVersion)
from PyQt5.QtGui import QKeyEvent, QIcon
from PyQt5.QtNetwork import QAuthenticator
from PyQt5.QtWidgets import QApplication
from PyQt5.QtWebEngineWidgets import QWebEnginePage, QWebEngineScript
from qutebrowser.config import configdata, config
from qutebrowser.browser import browsertab, mouse, shared, webelem
from qutebrowser.browser.webengine import (webview, webengineelem, tabhistory,
interceptor, webenginequtescheme,
cookies, webenginedownloads,
webenginesettings, certificateerror)
from qutebrowser.misc import miscwidgets
from qutebrowser.utils import (usertypes, qtutils, log, javascript, utils,
message, objreg, jinja, debug)
from qutebrowser.qt import sip
_qute_scheme_handler = None
def init():
"""Initialize QtWebEngine-specific modules."""
# For some reason we need to keep a reference, otherwise the scheme handler
# won't work...
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-September/038075.html
global _qute_scheme_handler
app = QApplication.instance()
log.init.debug("Initializing qute://* handler...")
_qute_scheme_handler = webenginequtescheme.QuteSchemeHandler(parent=app)
_qute_scheme_handler.install(webenginesettings.default_profile)
_qute_scheme_handler.install(webenginesettings.private_profile)
log.init.debug("Initializing request interceptor...")
host_blocker = objreg.get('host-blocker')
args = objreg.get('args')
req_interceptor = interceptor.RequestInterceptor(
host_blocker, args=args, parent=app)
req_interceptor.install(webenginesettings.default_profile)
req_interceptor.install(webenginesettings.private_profile)
log.init.debug("Initializing QtWebEngine downloads...")
download_manager = webenginedownloads.DownloadManager(parent=app)
download_manager.install(webenginesettings.default_profile)
download_manager.install(webenginesettings.private_profile)
objreg.register('webengine-download-manager', download_manager)
log.init.debug("Initializing cookie filter...")
cookies.install_filter(webenginesettings.default_profile)
cookies.install_filter(webenginesettings.private_profile)
# Clear visited links on web history clear
hist = objreg.get('web-history')
for p in [webenginesettings.default_profile,
webenginesettings.private_profile]:
hist.history_cleared.connect(p.clearAllVisitedLinks)
hist.url_cleared.connect(lambda url, profile=p:
profile.clearVisitedLinks([url]))
# Mapping worlds from usertypes.JsWorld to QWebEngineScript world IDs.
_JS_WORLD_MAP = {
usertypes.JsWorld.main: QWebEngineScript.MainWorld,
usertypes.JsWorld.application: QWebEngineScript.ApplicationWorld,
usertypes.JsWorld.user: QWebEngineScript.UserWorld,
usertypes.JsWorld.jseval: QWebEngineScript.UserWorld + 1,
}
class WebEngineAction(browsertab.AbstractAction):
"""QtWebEngine implementations related to web actions."""
action_class = QWebEnginePage
action_base = QWebEnginePage.WebAction
def exit_fullscreen(self):
self._widget.triggerPageAction(QWebEnginePage.ExitFullScreen)
def save_page(self):
"""Save the current page."""
self._widget.triggerPageAction(QWebEnginePage.SavePage)
def show_source(self, pygments=False):
if pygments:
self._show_source_pygments()
return
try:
self._widget.triggerPageAction(QWebEnginePage.ViewSource)
except AttributeError:
# Qt < 5.8
tb = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
urlstr = self._tab.url().toString(QUrl.RemoveUserInfo)
# The original URL becomes the path of a view-source: URL
# (without a host), but query/fragment should stay.
url = QUrl('view-source:' + urlstr)
tb.tabopen(url, background=False, related=True)
class WebEnginePrinting(browsertab.AbstractPrinting):
"""QtWebEngine implementations related to printing."""
def check_pdf_support(self):
return True
def check_printer_support(self):
if not hasattr(self._widget.page(), 'print'):
raise browsertab.WebTabError(
"Printing is unsupported with QtWebEngine on Qt < 5.8")
def check_preview_support(self):
raise browsertab.WebTabError(
"Print previews are unsupported with QtWebEngine")
def to_pdf(self, filename):
self._widget.page().printToPdf(filename)
def to_printer(self, printer, callback=None):
if callback is None:
callback = lambda _ok: None
self._widget.page().print(printer, callback)
class WebEngineSearch(browsertab.AbstractSearch):
"""QtWebEngine implementations related to searching on the page.
Attributes:
_flags: The QWebEnginePage.FindFlags of the last search.
_pending_searches: How many searches have been started but not called
back yet.
"""
def __init__(self, parent=None):
super().__init__(parent)
self._flags = QWebEnginePage.FindFlags(0)
self._pending_searches = 0
def _find(self, text, flags, callback, caller):
"""Call findText on the widget."""
self.search_displayed = True
self._pending_searches += 1
def wrapped_callback(found):
"""Wrap the callback to do debug logging."""
self._pending_searches -= 1
if self._pending_searches > 0:
# See https://github.com/qutebrowser/qutebrowser/issues/2442
# and https://github.com/qt/qtwebengine/blob/5.10/src/core/web_contents_adapter.cpp#L924-L934
log.webview.debug("Ignoring cancelled search callback with "
"{} pending searches".format(
self._pending_searches))
return
found_text = 'found' if found else "didn't find"
if flags:
flag_text = 'with flags {}'.format(debug.qflags_key(
QWebEnginePage, flags, klass=QWebEnginePage.FindFlag))
else:
flag_text = ''
log.webview.debug(' '.join([caller, found_text, text, flag_text])
.strip())
if callback is not None:
callback(found)
self._widget.findText(text, flags, wrapped_callback)
def search(self, text, *, ignore_case='never', reverse=False,
result_cb=None):
# Don't go to next entry on duplicate search
if self.text == text and self.search_displayed:
log.webview.debug("Ignoring duplicate search request"
" for {}".format(text))
return
self.text = text
self._flags = QWebEnginePage.FindFlags(0)
if self._is_case_sensitive(ignore_case):
self._flags |= QWebEnginePage.FindCaseSensitively
if reverse:
self._flags |= QWebEnginePage.FindBackward
self._find(text, self._flags, result_cb, 'search')
def clear(self):
self.search_displayed = False
self._widget.findText('')
def prev_result(self, *, result_cb=None):
# The int() here makes sure we get a copy of the flags.
flags = QWebEnginePage.FindFlags(int(self._flags))
if flags & QWebEnginePage.FindBackward:
flags &= ~QWebEnginePage.FindBackward
else:
flags |= QWebEnginePage.FindBackward
self._find(self.text, flags, result_cb, 'prev_result')
def next_result(self, *, result_cb=None):
self._find(self.text, self._flags, result_cb, 'next_result')
class WebEngineCaret(browsertab.AbstractCaret):
"""QtWebEngine implementations related to moving the cursor/selection."""
@pyqtSlot(usertypes.KeyMode)
def _on_mode_entered(self, mode):
if mode != usertypes.KeyMode.caret:
return
if self._tab.search.search_displayed:
# We are currently in search mode.
# convert the search to a blue selection so we can operate on it
# https://bugreports.qt.io/browse/QTBUG-60673
self._tab.search.clear()
self._tab.run_js_async(
javascript.assemble('caret',
'setPlatform', sys.platform, qVersion()))
self._js_call('setInitialCursor', self._selection_cb)
def _selection_cb(self, enabled):
"""Emit selection_toggled based on setInitialCursor."""
if enabled is None:
log.webview.debug("Ignoring selection status None")
return
self.selection_toggled.emit(enabled)
@pyqtSlot(usertypes.KeyMode)
def _on_mode_left(self, mode):
if mode != usertypes.KeyMode.caret:
return
self.drop_selection()
self._js_call('disableCaret')
def move_to_next_line(self, count=1):
for _ in range(count):
self._js_call('moveDown')
def move_to_prev_line(self, count=1):
for _ in range(count):
self._js_call('moveUp')
def move_to_next_char(self, count=1):
for _ in range(count):
self._js_call('moveRight')
def move_to_prev_char(self, count=1):
for _ in range(count):
self._js_call('moveLeft')
def move_to_end_of_word(self, count=1):
for _ in range(count):
self._js_call('moveToEndOfWord')
def move_to_next_word(self, count=1):
for _ in range(count):
self._js_call('moveToNextWord')
def move_to_prev_word(self, count=1):
for _ in range(count):
self._js_call('moveToPreviousWord')
def move_to_start_of_line(self):
self._js_call('moveToStartOfLine')
def move_to_end_of_line(self):
self._js_call('moveToEndOfLine')
def move_to_start_of_next_block(self, count=1):
for _ in range(count):
self._js_call('moveToStartOfNextBlock')
def move_to_start_of_prev_block(self, count=1):
for _ in range(count):
self._js_call('moveToStartOfPrevBlock')
def move_to_end_of_next_block(self, count=1):
for _ in range(count):
self._js_call('moveToEndOfNextBlock')
def move_to_end_of_prev_block(self, count=1):
for _ in range(count):
self._js_call('moveToEndOfPrevBlock')
def move_to_start_of_document(self):
self._js_call('moveToStartOfDocument')
def move_to_end_of_document(self):
self._js_call('moveToEndOfDocument')
def toggle_selection(self):
self._js_call('toggleSelection', self.selection_toggled.emit)
def drop_selection(self):
self._js_call('dropSelection')
def selection(self, callback):
# Not using selectedText() as WORKAROUND for
# https://bugreports.qt.io/browse/QTBUG-53134
# Even on Qt 5.10 selectedText() seems to work poorly, see
# https://github.com/qutebrowser/qutebrowser/issues/3523
self._tab.run_js_async(javascript.assemble('caret', 'getSelection'),
callback)
def _follow_selected_cb(self, js_elem, tab=False):
"""Callback for javascript which clicks the selected element.
Args:
js_elem: The element serialized from javascript.
tab: Open in a new tab.
"""
if js_elem is None:
return
if js_elem == "focused":
# we had a focused element, not a selected one. Just send <enter>
self._follow_enter(tab)
return
assert isinstance(js_elem, dict), js_elem
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
if tab:
click_type = usertypes.ClickTarget.tab
else:
click_type = usertypes.ClickTarget.normal
# Only click if we see a link
if elem.is_link():
log.webview.debug("Found link in selection, clicking. ClickTarget "
"{}, elem {}".format(click_type, elem))
try:
elem.click(click_type)
except webelem.Error as e:
message.error(str(e))
return
def follow_selected(self, *, tab=False):
if self._tab.search.search_displayed:
# We are currently in search mode.
# let's click the link via a fake-click
# https://bugreports.qt.io/browse/QTBUG-60673
self._tab.search.clear()
log.webview.debug("Clicking a searched link via fake key press.")
# send a fake enter, clicking the orange selection box
self._follow_enter(tab)
else:
# click an existing blue selection
js_code = javascript.assemble('webelem',
'find_selected_focused_link')
self._tab.run_js_async(js_code, lambda jsret:
self._follow_selected_cb(jsret, tab))
def _js_call(self, command, callback=None):
self._tab.run_js_async(javascript.assemble('caret', command), callback)
class WebEngineScroller(browsertab.AbstractScroller):
"""QtWebEngine implementations related to scrolling."""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._args = objreg.get('args')
self._pos_perc = (0, 0)
self._pos_px = QPoint()
self._at_bottom = False
def _init_widget(self, widget):
super()._init_widget(widget)
page = widget.page()
page.scrollPositionChanged.connect(self._update_pos)
def _repeated_key_press(self, key, count=1, modifier=Qt.NoModifier):
"""Send count fake key presses to this scroller's WebEngineTab."""
for _ in range(min(count, 1000)):
self._tab.key_press(key, modifier)
@pyqtSlot(QPointF)
def _update_pos(self, pos):
"""Update the scroll position attributes when it changed."""
self._pos_px = pos.toPoint()
contents_size = self._widget.page().contentsSize()
scrollable_x = contents_size.width() - self._widget.width()
if scrollable_x == 0:
perc_x = 0
else:
try:
perc_x = min(100, round(100 / scrollable_x * pos.x()))
except ValueError:
# https://github.com/qutebrowser/qutebrowser/issues/3219
log.misc.debug("Got ValueError!")
log.misc.debug("contents_size.width(): {}".format(
contents_size.width()))
log.misc.debug("self._widget.width(): {}".format(
self._widget.width()))
log.misc.debug("scrollable_x: {}".format(scrollable_x))
log.misc.debug("pos.x(): {}".format(pos.x()))
raise
scrollable_y = contents_size.height() - self._widget.height()
if scrollable_y == 0:
perc_y = 0
else:
perc_y = min(100, round(100 / scrollable_y * pos.y()))
self._at_bottom = math.ceil(pos.y()) >= scrollable_y
if (self._pos_perc != (perc_x, perc_y) or
'no-scroll-filtering' in self._args.debug_flags):
self._pos_perc = perc_x, perc_y
self.perc_changed.emit(*self._pos_perc)
def pos_px(self):
return self._pos_px
def pos_perc(self):
return self._pos_perc
def to_perc(self, x=None, y=None):
js_code = javascript.assemble('scroll', 'to_perc', x, y)
self._tab.run_js_async(js_code)
def to_point(self, point):
js_code = javascript.assemble('window', 'scroll', point.x(), point.y())
self._tab.run_js_async(js_code)
def to_anchor(self, name):
url = self._tab.url()
url.setFragment(name)
self._tab.openurl(url)
def delta(self, x=0, y=0):
self._tab.run_js_async(javascript.assemble('window', 'scrollBy', x, y))
def delta_page(self, x=0, y=0):
js_code = javascript.assemble('scroll', 'delta_page', x, y)
self._tab.run_js_async(js_code)
def up(self, count=1):
self._repeated_key_press(Qt.Key_Up, count)
def down(self, count=1):
self._repeated_key_press(Qt.Key_Down, count)
def left(self, count=1):
self._repeated_key_press(Qt.Key_Left, count)
def right(self, count=1):
self._repeated_key_press(Qt.Key_Right, count)
def top(self):
self._tab.key_press(Qt.Key_Home)
def bottom(self):
self._tab.key_press(Qt.Key_End)
def page_up(self, count=1):
self._repeated_key_press(Qt.Key_PageUp, count)
def page_down(self, count=1):
self._repeated_key_press(Qt.Key_PageDown, count)
def at_top(self):
return self.pos_px().y() == 0
def at_bottom(self):
return self._at_bottom
class WebEngineHistory(browsertab.AbstractHistory):
"""QtWebEngine implementations related to page history."""
def current_idx(self):
return self._history.currentItemIndex()
def can_go_back(self):
return self._history.canGoBack()
def can_go_forward(self):
return self._history.canGoForward()
def _item_at(self, i):
return self._history.itemAt(i)
def _go_to_item(self, item):
self._tab.predicted_navigation.emit(item.url())
self._history.goToItem(item)
def serialize(self):
if not qtutils.version_check('5.9', compiled=False):
# WORKAROUND for
# https://github.com/qutebrowser/qutebrowser/issues/2289
# Don't use the history's currentItem here, because of
# https://bugreports.qt.io/browse/QTBUG-59599 and because it doesn't
# contain view-source.
scheme = self._tab.url().scheme()
if scheme in ['view-source', 'chrome']:
raise browsertab.WebTabError("Can't serialize special URL!")
return qtutils.serialize(self._history)
def deserialize(self, data):
return qtutils.deserialize(data, self._history)
def load_items(self, items):
if items:
self._tab.predicted_navigation.emit(items[-1].url)
stream, _data, cur_data = tabhistory.serialize(items)
qtutils.deserialize_stream(stream, self._history)
@pyqtSlot()
def _on_load_finished():
self._tab.scroller.to_point(cur_data['scroll-pos'])
self._tab.load_finished.disconnect(_on_load_finished)
if cur_data is not None:
if 'zoom' in cur_data:
self._tab.zoom.set_factor(cur_data['zoom'])
if ('scroll-pos' in cur_data and
self._tab.scroller.pos_px() == QPoint(0, 0)):
self._tab.load_finished.connect(_on_load_finished)
class WebEngineZoom(browsertab.AbstractZoom):
"""QtWebEngine implementations related to zooming."""
def _set_factor_internal(self, factor):
self._widget.setZoomFactor(factor)
class WebEngineElements(browsertab.AbstractElements):
"""QtWebEngine implemementations related to elements on the page."""
def _js_cb_multiple(self, callback, js_elems):
"""Handle found elements coming from JS and call the real callback.
Args:
callback: The callback to call with the found elements.
Called with None if there was an error.
js_elems: The elements serialized from javascript.
"""
if js_elems is None:
callback(None)
return
elems = []
for js_elem in js_elems:
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
elems.append(elem)
callback(elems)
def _js_cb_single(self, callback, js_elem):
"""Handle a found focus elem coming from JS and call the real callback.
Args:
callback: The callback to call with the found element.
Called with a WebEngineElement or None.
js_elem: The element serialized from javascript.
"""
debug_str = ('None' if js_elem is None
else utils.elide(repr(js_elem), 1000))
log.webview.debug("Got element from JS: {}".format(debug_str))
if js_elem is None:
callback(None)
else:
elem = webengineelem.WebEngineElement(js_elem, tab=self._tab)
callback(elem)
def find_css(self, selector, callback, *, only_visible=False):
js_code = javascript.assemble('webelem', 'find_css', selector,
only_visible)
js_cb = functools.partial(self._js_cb_multiple, callback)
self._tab.run_js_async(js_code, js_cb)
def find_id(self, elem_id, callback):
js_code = javascript.assemble('webelem', 'find_id', elem_id)
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
def find_focused(self, callback):
js_code = javascript.assemble('webelem', 'find_focused')
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
def find_at_pos(self, pos, callback):
assert pos.x() >= 0
assert pos.y() >= 0
pos /= self._tab.zoom.factor()
js_code = javascript.assemble('webelem', 'find_at_pos',
pos.x(), pos.y())
js_cb = functools.partial(self._js_cb_single, callback)
self._tab.run_js_async(js_code, js_cb)
class WebEngineAudio(browsertab.AbstractAudio):
"""QtWebEngine implemementations related to audio/muting."""
def _connect_signals(self):
page = self._widget.page()
page.audioMutedChanged.connect(self.muted_changed)
page.recentlyAudibleChanged.connect(self.recently_audible_changed)
def set_muted(self, muted: bool):
page = self._widget.page()
page.setAudioMuted(muted)
def is_muted(self):
page = self._widget.page()
return page.isAudioMuted()
def is_recently_audible(self):
page = self._widget.page()
return page.recentlyAudible()
class _WebEnginePermissions(QObject):
"""Handling of various permission-related signals."""
_abort_questions = pyqtSignal()
def __init__(self, tab, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = None
def connect_signals(self):
"""Connect related signals from the QWebEnginePage."""
page = self._widget.page()
page.fullScreenRequested.connect(
self._on_fullscreen_requested)
page.featurePermissionRequested.connect(
self._on_feature_permission_requested)
if qtutils.version_check('5.11'):
page.quotaRequested.connect(
self._on_quota_requested)
page.registerProtocolHandlerRequested.connect(
self._on_register_protocol_handler_requested)
self._tab.shutting_down.connect(self._abort_questions)
self._tab.load_started.connect(self._abort_questions)
@pyqtSlot('QWebEngineFullScreenRequest')
def _on_fullscreen_requested(self, request):
request.accept()
on = request.toggleOn()
self._tab.data.fullscreen = on
self._tab.fullscreen_requested.emit(on)
if on:
notification = miscwidgets.FullscreenNotification(self._widget)
notification.show()
notification.set_timeout(3000)
@pyqtSlot(QUrl, 'QWebEnginePage::Feature')
def _on_feature_permission_requested(self, url, feature):
"""Ask the user for approval for geolocation/media/etc.."""
options = {
QWebEnginePage.Geolocation: 'content.geolocation',
QWebEnginePage.MediaAudioCapture: 'content.media_capture',
QWebEnginePage.MediaVideoCapture: 'content.media_capture',
QWebEnginePage.MediaAudioVideoCapture: 'content.media_capture',
}
messages = {
QWebEnginePage.Geolocation: 'access your location',
QWebEnginePage.MediaAudioCapture: 'record audio',
QWebEnginePage.MediaVideoCapture: 'record video',
QWebEnginePage.MediaAudioVideoCapture: 'record audio/video',
}
try:
options.update({
QWebEnginePage.DesktopVideoCapture:
'content.desktop_capture',
QWebEnginePage.DesktopAudioVideoCapture:
'content.desktop_capture',
})
messages.update({
QWebEnginePage.DesktopVideoCapture:
'capture your desktop',
QWebEnginePage.DesktopAudioVideoCapture:
'capture your desktop and audio',
})
except AttributeError:
# Added in Qt 5.10
pass
assert options.keys() == messages.keys()
page = self._widget.page()
if feature not in options:
log.webview.error("Unhandled feature permission {}".format(
debug.qenum_key(QWebEnginePage, feature)))
page.setFeaturePermission(url, feature,
QWebEnginePage.PermissionDeniedByUser)
return
yes_action = functools.partial(
page.setFeaturePermission, url, feature,
QWebEnginePage.PermissionGrantedByUser)
no_action = functools.partial(
page.setFeaturePermission, url, feature,
QWebEnginePage.PermissionDeniedByUser)
question = shared.feature_permission(
url=url, option=options[feature], msg=messages[feature],
yes_action=yes_action, no_action=no_action,
abort_on=[self._abort_questions])
if question is not None:
page.featurePermissionRequestCanceled.connect(
functools.partial(self._on_feature_permission_cancelled,
question, url, feature))
def _on_feature_permission_cancelled(self, question, url, feature,
cancelled_url, cancelled_feature):
"""Slot invoked when a feature permission request was cancelled.
To be used with functools.partial.
"""
if url == cancelled_url and feature == cancelled_feature:
try:
question.abort()
except RuntimeError:
# The question could already be deleted, e.g. because it was
# aborted after a loadStarted signal.
pass
def _on_quota_requested(self, request):
size = utils.format_size(request.requestedSize())
shared.feature_permission(
url=request.origin(),
option='content.persistent_storage',
msg='use {} of persistent storage'.format(size),
yes_action=request.accept, no_action=request.reject,
abort_on=[self._abort_questions],
blocking=True)
def _on_register_protocol_handler_requested(self, request):
shared.feature_permission(
url=request.origin(),
option='content.register_protocol_handler',
msg='open all {} links'.format(request.scheme()),
yes_action=request.accept, no_action=request.reject,
abort_on=[self._abort_questions],
blocking=True)
class _WebEngineScripts(QObject):
def __init__(self, tab, parent=None):
super().__init__(parent)
self._tab = tab
self._widget = None
self._greasemonkey = objreg.get('greasemonkey')
def connect_signals(self):
config.instance.changed.connect(self._on_config_changed)
@pyqtSlot(str)
def _on_config_changed(self, option):
if option in ['scrolling.bar', 'content.user_stylesheets']:
self._init_stylesheet()
self._update_stylesheet()
def _update_stylesheet(self):
"""Update the custom stylesheet in existing tabs."""
css = shared.get_user_stylesheet()
code = javascript.assemble('stylesheet', 'set_css', css)
self._tab.run_js_async(code)
def _inject_early_js(self, name, js_code, *,
world=QWebEngineScript.ApplicationWorld,
subframes=False):
"""Inject the given script to run early on a page load.
This runs the script both on DocumentCreation and DocumentReady as on
some internal pages, DocumentCreation will not work.
That is a WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66011
"""
scripts = self._widget.page().scripts()
for injection in ['creation', 'ready']:
injection_points = {
'creation': QWebEngineScript.DocumentCreation,
'ready': QWebEngineScript.DocumentReady,
}
script = QWebEngineScript()
script.setInjectionPoint(injection_points[injection])
script.setSourceCode(js_code)
script.setWorldId(world)
script.setRunsOnSubFrames(subframes)
script.setName('_qute_{}_{}'.format(name, injection))
scripts.insert(script)
def _remove_early_js(self, name):
"""Remove an early QWebEngineScript."""
scripts = self._widget.page().scripts()
for injection in ['creation', 'ready']:
full_name = '_qute_{}_{}'.format(name, injection)
script = scripts.findScript(full_name)
if not script.isNull():
scripts.remove(script)
def init(self):
"""Initialize global qutebrowser JavaScript."""
js_code = javascript.wrap_global(
'scripts',
utils.read_file('javascript/scroll.js'),
utils.read_file('javascript/webelem.js'),
utils.read_file('javascript/caret.js'),
)
self._inject_early_js('js',
utils.read_file('javascript/print.js'),
subframes=True,
world=QWebEngineScript.MainWorld)
# FIXME:qtwebengine what about subframes=True?
self._inject_early_js('js', js_code, subframes=True)
self._init_stylesheet()
# The Greasemonkey metadata block support in QtWebEngine only starts at
# Qt 5.8. With 5.7.1, we need to inject the scripts ourselves in
# response to urlChanged.
if not qtutils.version_check('5.8'):
self._tab.url_changed.connect(
self._inject_greasemonkey_scripts_for_url)
else:
self._greasemonkey.scripts_reloaded.connect(
self._inject_all_greasemonkey_scripts)
self._inject_all_greasemonkey_scripts()
def _init_stylesheet(self):
"""Initialize custom stylesheets.
Partially inspired by QupZilla:
https://github.com/QupZilla/qupzilla/blob/v2.0/src/lib/app/mainapplication.cpp#L1063-L1101
"""
self._remove_early_js('stylesheet')
css = shared.get_user_stylesheet()
js_code = javascript.wrap_global(
'stylesheet',
utils.read_file('javascript/stylesheet.js'),
javascript.assemble('stylesheet', 'set_css', css),
)
self._inject_early_js('stylesheet', js_code, subframes=True)
@pyqtSlot(QUrl)
def _inject_greasemonkey_scripts_for_url(self, url):
matching_scripts = self._greasemonkey.scripts_for(url)
self._inject_greasemonkey_scripts(
matching_scripts.start, QWebEngineScript.DocumentCreation, True)
self._inject_greasemonkey_scripts(
matching_scripts.end, QWebEngineScript.DocumentReady, False)
self._inject_greasemonkey_scripts(
matching_scripts.idle, QWebEngineScript.Deferred, False)
@pyqtSlot()
def _inject_all_greasemonkey_scripts(self):
scripts = self._greasemonkey.all_scripts()
self._inject_greasemonkey_scripts(scripts)
def _inject_greasemonkey_scripts(self, scripts=None, injection_point=None,
remove_first=True):
"""Register user JavaScript files with the current tab.
Args:
scripts: A list of GreasemonkeyScripts, or None to add all
known by the Greasemonkey subsystem.
injection_point: The QWebEngineScript::InjectionPoint stage
to inject the script into, None to use
auto-detection.
remove_first: Whether to remove all previously injected
scripts before adding these ones.
"""
if sip.isdeleted(self._widget):
return
# Since we are inserting scripts into a per-tab collection,
# rather than just injecting scripts on page load, we need to
# make sure we replace existing scripts, not just add new ones.
# While, taking care not to remove any other scripts that might
# have been added elsewhere, like the one for stylesheets.
page_scripts = self._widget.page().scripts()
if remove_first:
for script in page_scripts.toList():
if script.name().startswith("GM-"):
log.greasemonkey.debug('Removing script: {}'
.format(script.name()))
removed = page_scripts.remove(script)
assert removed, script.name()
if not scripts:
return
for script in scripts:
new_script = QWebEngineScript()
try:
world = int(script.jsworld)
except ValueError:
try:
world = _JS_WORLD_MAP[usertypes.JsWorld[
script.jsworld.lower()]]
except KeyError:
log.greasemonkey.error(
"script {} has invalid value for '@qute-js-world'"
": {}".format(script.name, script.jsworld))
continue
new_script.setWorldId(world)
new_script.setSourceCode(script.code())
new_script.setName("GM-{}".format(script.name))
new_script.setRunsOnSubFrames(script.runs_on_sub_frames)
# Override the @run-at value parsed by QWebEngineScript if desired.
if injection_point:
new_script.setInjectionPoint(injection_point)
log.greasemonkey.debug('adding script: {}'
.format(new_script.name()))
page_scripts.insert(new_script)
class WebEngineTab(browsertab.AbstractTab):
"""A QtWebEngine tab in the browser.
Signals:
_load_finished_fake:
Used in place of unreliable loadFinished
"""
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-65223
_load_finished_fake = pyqtSignal(bool)
def __init__(self, *, win_id, mode_manager, private, parent=None):
super().__init__(win_id=win_id, mode_manager=mode_manager,
private=private, parent=parent)
widget = webview.WebEngineView(tabdata=self.data, win_id=win_id,
private=private)
self.history = WebEngineHistory(self)
self.scroller = WebEngineScroller(self, parent=self)
self.caret = WebEngineCaret(mode_manager=mode_manager,
tab=self, parent=self)
self.zoom = WebEngineZoom(tab=self, parent=self)
self.search = WebEngineSearch(parent=self)
self.printing = WebEnginePrinting(tab=self)
self.elements = WebEngineElements(tab=self)
self.action = WebEngineAction(tab=self)
self.audio = WebEngineAudio(parent=self)
self._permissions = _WebEnginePermissions(tab=self, parent=self)
self._scripts = _WebEngineScripts(tab=self, parent=self)
# We're assigning settings in _set_widget
self.settings = webenginesettings.WebEngineSettings(settings=None)
self._set_widget(widget)
self._connect_signals()
self.backend = usertypes.Backend.QtWebEngine
self._child_event_filter = None
self._saved_zoom = None
self._reload_url = None
self._scripts.init()
def _set_widget(self, widget):
# pylint: disable=protected-access
super()._set_widget(widget)
self._permissions._widget = widget
self._scripts._widget = widget
def _install_event_filter(self):
fp = self._widget.focusProxy()
if fp is not None:
fp.installEventFilter(self._mouse_event_filter)
self._child_event_filter = mouse.ChildEventFilter(
eventfilter=self._mouse_event_filter, widget=self._widget,
win_id=self.win_id, parent=self)
self._widget.installEventFilter(self._child_event_filter)
@pyqtSlot()
def _restore_zoom(self):
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3498
return
if self._saved_zoom is None:
return
self.zoom.set_factor(self._saved_zoom)
self._saved_zoom = None
def openurl(self, url, *, predict=True):
"""Open the given URL in this tab.
Arguments:
url: The QUrl to open.
predict: If set to False, predicted_navigation is not emitted.
"""
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3896
return
self._saved_zoom = self.zoom.factor()
self._openurl_prepare(url, predict=predict)
self._widget.load(url)
def url(self, requested=False):
page = self._widget.page()
if requested:
return page.requestedUrl()
else:
return page.url()
def dump_async(self, callback, *, plain=False):
if plain:
self._widget.page().toPlainText(callback)
else:
self._widget.page().toHtml(callback)
def run_js_async(self, code, callback=None, *, world=None):
if world is None:
world_id = QWebEngineScript.ApplicationWorld
elif isinstance(world, int):
world_id = world
else:
world_id = _JS_WORLD_MAP[world]
if callback is None:
self._widget.page().runJavaScript(code, world_id)
else:
self._widget.page().runJavaScript(code, world_id, callback)
def shutdown(self):
self.shutting_down.emit()
self.action.exit_fullscreen()
self._widget.shutdown()
def reload(self, *, force=False):
if force:
action = QWebEnginePage.ReloadAndBypassCache
else:
action = QWebEnginePage.Reload
self._widget.triggerPageAction(action)
def stop(self):
self._widget.stop()
def title(self):
return self._widget.title()
def icon(self):
return self._widget.icon()
def set_html(self, html, base_url=QUrl()):
# FIXME:qtwebengine
# check this and raise an exception if too big:
# Warning: The content will be percent encoded before being sent to the
# renderer via IPC. This may increase its size. The maximum size of the
# percent encoded content is 2 megabytes minus 30 bytes.
self._widget.setHtml(html, base_url)
def networkaccessmanager(self):
return None
def user_agent(self):
return None
def clear_ssl_errors(self):
raise browsertab.UnsupportedOperationError
def key_press(self, key, modifier=Qt.NoModifier):
press_evt = QKeyEvent(QEvent.KeyPress, key, modifier, 0, 0, 0)
release_evt = QKeyEvent(QEvent.KeyRelease, key, modifier,
0, 0, 0)
self.send_event(press_evt)
self.send_event(release_evt)
def _show_error_page(self, url, error):
"""Show an error page in the tab."""
log.misc.debug("Showing error page for {}".format(error))
url_string = url.toDisplayString()
error_page = jinja.render(
'error.html',
title="Error loading page: {}".format(url_string),
url=url_string, error=error)
self.set_html(error_page)
@pyqtSlot()
def _on_history_trigger(self):
try:
self._widget.page()
except RuntimeError:
# Looks like this slot can be triggered on destroyed tabs:
# https://crashes.qutebrowser.org/view/3abffbed (Qt 5.9.1)
# wrapped C/C++ object of type WebEngineView has been deleted
log.misc.debug("Ignoring history trigger for destroyed tab")
return
url = self.url()
requested_url = self.url(requested=True)
# Don't save the title if it's generated from the URL
title = self.title()
title_url = QUrl(url)
title_url.setScheme('')
if title == title_url.toDisplayString(QUrl.RemoveScheme).strip('/'):
title = ""
# Don't add history entry if the URL is invalid anyways
if not url.isValid():
log.misc.debug("Ignoring invalid URL being added to history")
return
self.add_history_item.emit(url, requested_url, title)
@pyqtSlot(QUrl, 'QAuthenticator*', 'QString')
def _on_proxy_authentication_required(self, url, authenticator,
proxy_host):
"""Called when a proxy needs authentication."""
msg = "<b>{}</b> requires a username and password.".format(
html_utils.escape(proxy_host))
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
answer = message.ask(
title="Proxy authentication required", text=msg,
mode=usertypes.PromptMode.user_pwd,
abort_on=[self.shutting_down, self.load_started], url=urlstr)
if answer is not None:
authenticator.setUser(answer.user)
authenticator.setPassword(answer.password)
else:
try:
# pylint: disable=no-member, useless-suppression
sip.assign(authenticator, QAuthenticator())
# pylint: enable=no-member, useless-suppression
except AttributeError:
self._show_error_page(url, "Proxy authentication required")
@pyqtSlot(QUrl, 'QAuthenticator*')
def _on_authentication_required(self, url, authenticator):
netrc_success = False
if not self.data.netrc_used:
self.data.netrc_used = True
netrc_success = shared.netrc_authentication(url, authenticator)
if not netrc_success:
abort_on = [self.shutting_down, self.load_started]
answer = shared.authentication_required(url, authenticator,
abort_on)
if not netrc_success and answer is None:
try:
# pylint: disable=no-member, useless-suppression
sip.assign(authenticator, QAuthenticator())
# pylint: enable=no-member, useless-suppression
except AttributeError:
# WORKAROUND for
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-December/038400.html
self._show_error_page(url, "Authentication required")
@pyqtSlot()
def _on_load_started(self):
"""Clear search when a new load is started if needed."""
# WORKAROUND for
# https://bugreports.qt.io/browse/QTBUG-61506
# (seems to be back in later Qt versions as well)
self.search.clear()
super()._on_load_started()
self.data.netrc_used = False
@pyqtSlot(QWebEnginePage.RenderProcessTerminationStatus, int)
def _on_render_process_terminated(self, status, exitcode):
"""Show an error when the renderer process terminated."""
if (status == QWebEnginePage.AbnormalTerminationStatus and
exitcode == 256):
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-58697
status = QWebEnginePage.CrashedTerminationStatus
status_map = {
QWebEnginePage.NormalTerminationStatus:
browsertab.TerminationStatus.normal,
QWebEnginePage.AbnormalTerminationStatus:
browsertab.TerminationStatus.abnormal,
QWebEnginePage.CrashedTerminationStatus:
browsertab.TerminationStatus.crashed,
QWebEnginePage.KilledTerminationStatus:
browsertab.TerminationStatus.killed,
-1:
browsertab.TerminationStatus.unknown,
}
self.renderer_process_terminated.emit(status_map[status], exitcode)
@pyqtSlot(int)
def _on_load_progress_workaround(self, perc):
"""Use loadProgress(100) to emit loadFinished(True).
See https://bugreports.qt.io/browse/QTBUG-65223
"""
if perc == 100 and self.load_status() != usertypes.LoadStatus.error:
self._load_finished_fake.emit(True)
@pyqtSlot(bool)
def _on_load_finished_workaround(self, ok):
"""Use only loadFinished(False).
See https://bugreports.qt.io/browse/QTBUG-65223
"""
if not ok:
self._load_finished_fake.emit(False)
def _error_page_workaround(self, html):
"""Check if we're displaying a Chromium error page.
This gets only called if we got loadFinished(False) without JavaScript,
so we can display at least some error page.
WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66643
Needs to check the page content as a WORKAROUND for
https://bugreports.qt.io/browse/QTBUG-66661
"""
match = re.search(r'"errorCode":"([^"]*)"', html)
if match is None:
return
self._show_error_page(self.url(), error=match.group(1))
@pyqtSlot(bool)
def _on_load_finished(self, ok):
"""Display a static error page if JavaScript is disabled."""
super()._on_load_finished(ok)
js_enabled = self.settings.test_attribute('content.javascript.enabled')
if not ok and not js_enabled:
self.dump_async(self._error_page_workaround)
if ok and self._reload_url is not None:
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
log.config.debug(
"Loading {} again because of config change".format(
self._reload_url.toDisplayString()))
QTimer.singleShot(100, functools.partial(self.openurl,
self._reload_url,
predict=False))
self._reload_url = None
if not qtutils.version_check('5.10', compiled=False):
# We can't do this when we have the loadFinished workaround as that
# sometimes clears icons without loading a new page.
# In general, this is handled by Qt, but when loading takes long,
# the old icon is still displayed.
self.icon_changed.emit(QIcon())
@pyqtSlot(certificateerror.CertificateErrorWrapper)
def _on_ssl_errors(self, error):
self._has_ssl_errors = True
url = error.url()
log.webview.debug("Certificate error: {}".format(error))
if error.is_overridable():
error.ignore = shared.ignore_certificate_errors(
url, [error], abort_on=[self.shutting_down, self.load_started])
else:
log.webview.error("Non-overridable certificate error: "
"{}".format(error))
log.webview.debug("ignore {}, URL {}, requested {}".format(
error.ignore, url, self.url(requested=True)))
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-56207
# We can't really know when to show an error page, as the error might
# have happened when loading some resource.
# However, self.url() is not available yet and the requested URL
# might not match the URL we get from the error - so we just apply a
# heuristic here.
if (not qtutils.version_check('5.9') and
not error.ignore and
url.matches(self.url(requested=True), QUrl.RemoveScheme)):
self._show_error_page(url, str(error))
@pyqtSlot(QUrl)
def _on_predicted_navigation(self, url):
"""If we know we're going to visit an URL soon, change the settings.
This is a WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
"""
super()._on_predicted_navigation(url)
if not qtutils.version_check('5.11.1', compiled=False):
self.settings.update_for_url(url)
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(self, navigation):
super()._on_navigation_request(navigation)
if navigation.url == QUrl('qute://print'):
try:
self.printing.show_dialog()
except browsertab.WebTabError as e:
message.error(str(e))
navigation.accepted = False
if not navigation.accepted or not navigation.is_main_frame:
return
settings_needing_reload = {
'content.plugins',
'content.javascript.enabled',
'content.javascript.can_access_clipboard',
'content.print_element_backgrounds',
'input.spatial_navigation',
}
assert settings_needing_reload.issubset(configdata.DATA)
changed = self.settings.update_for_url(navigation.url)
reload_needed = changed & settings_needing_reload
# On Qt < 5.11, we don't don't need a reload when type == link_clicked.
# On Qt 5.11.0, we always need a reload.
# On Qt > 5.11.0, we never need a reload:
# https://codereview.qt-project.org/#/c/229525/1
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-66656
if qtutils.version_check('5.11.1', compiled=False):
reload_needed = False
elif not qtutils.version_check('5.11.0', exact=True, compiled=False):
if navigation.navigation_type == navigation.Type.link_clicked:
reload_needed = False
if reload_needed:
self._reload_url = navigation.url
def _connect_signals(self):
view = self._widget
page = view.page()
page.windowCloseRequested.connect(self.window_close_requested)
page.linkHovered.connect(self.link_hovered)
page.loadProgress.connect(self._on_load_progress)
page.loadStarted.connect(self._on_load_started)
page.certificate_error.connect(self._on_ssl_errors)
page.authenticationRequired.connect(self._on_authentication_required)
page.proxyAuthenticationRequired.connect(
self._on_proxy_authentication_required)
page.contentsSizeChanged.connect(self.contents_size_changed)
page.navigation_request.connect(self._on_navigation_request)
view.titleChanged.connect(self.title_changed)
view.urlChanged.connect(self._on_url_changed)
view.renderProcessTerminated.connect(
self._on_render_process_terminated)
view.iconChanged.connect(self.icon_changed)
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-65223
if qtutils.version_check('5.10', compiled=False):
page.loadProgress.connect(self._on_load_progress_workaround)
self._load_finished_fake.connect(self._on_history_trigger)
self._load_finished_fake.connect(self._restore_zoom)
self._load_finished_fake.connect(self._on_load_finished)
page.loadFinished.connect(self._on_load_finished_workaround)
else:
# for older Qt versions which break with the above
page.loadProgress.connect(self._on_load_progress)
page.loadFinished.connect(self._on_history_trigger)
page.loadFinished.connect(self._restore_zoom)
page.loadFinished.connect(self._on_load_finished)
self.predicted_navigation.connect(self._on_predicted_navigation)
# pylint: disable=protected-access
self.audio._connect_signals()
self._permissions.connect_signals()
self._scripts.connect_signals()
def event_target(self):
return self._widget.render_widget()
| ./CrossVul/dataset_final_sorted/CWE-684/py/bad_3918_1 |
crossvul-python_data_good_3920_0 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Base class for a wrapper over QWebView/QWebEngineView."""
import enum
import itertools
import typing
import functools
import attr
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, QUrl, QObject, QSizeF, Qt,
QEvent, QPoint)
from PyQt5.QtGui import QKeyEvent, QIcon
from PyQt5.QtWidgets import QWidget, QApplication, QDialog
from PyQt5.QtPrintSupport import QPrintDialog, QPrinter
from PyQt5.QtNetwork import QNetworkAccessManager
if typing.TYPE_CHECKING:
from PyQt5.QtWebKit import QWebHistory
from PyQt5.QtWebEngineWidgets import QWebEngineHistory
import pygments
import pygments.lexers
import pygments.formatters
from qutebrowser.keyinput import modeman
from qutebrowser.config import config
from qutebrowser.utils import (utils, objreg, usertypes, log, qtutils,
urlutils, message)
from qutebrowser.misc import miscwidgets, objects, sessions
from qutebrowser.browser import eventfilter
from qutebrowser.qt import sip
if typing.TYPE_CHECKING:
from qutebrowser.browser import webelem
from qutebrowser.browser.inspector import AbstractWebInspector
tab_id_gen = itertools.count(0)
def create(win_id: int,
private: bool,
parent: QWidget = None) -> 'AbstractTab':
"""Get a QtWebKit/QtWebEngine tab object.
Args:
win_id: The window ID where the tab will be shown.
private: Whether the tab is a private/off the record tab.
parent: The Qt parent to set.
"""
# Importing modules here so we don't depend on QtWebEngine without the
# argument and to avoid circular imports.
mode_manager = modeman.instance(win_id)
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
tab_class = webenginetab.WebEngineTab
else:
from qutebrowser.browser.webkit import webkittab
tab_class = webkittab.WebKitTab
return tab_class(win_id=win_id, mode_manager=mode_manager, private=private,
parent=parent)
def init() -> None:
"""Initialize backend-specific modules."""
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
webenginetab.init()
class WebTabError(Exception):
"""Base class for various errors."""
class UnsupportedOperationError(WebTabError):
"""Raised when an operation is not supported with the given backend."""
TerminationStatus = enum.Enum('TerminationStatus', [
'normal',
'abnormal', # non-zero exit status
'crashed', # e.g. segfault
'killed',
'unknown',
])
@attr.s
class TabData:
"""A simple namespace with a fixed set of attributes.
Attributes:
keep_icon: Whether the (e.g. cloned) icon should not be cleared on page
load.
inspector: The QWebInspector used for this webview.
viewing_source: Set if we're currently showing a source view.
Only used when sources are shown via pygments.
open_target: Where to open the next link.
Only used for QtWebKit.
override_target: Override for open_target for fake clicks (like hints).
Only used for QtWebKit.
pinned: Flag to pin the tab.
fullscreen: Whether the tab has a video shown fullscreen currently.
netrc_used: Whether netrc authentication was performed.
input_mode: current input mode for the tab.
"""
keep_icon = attr.ib(False) # type: bool
viewing_source = attr.ib(False) # type: bool
inspector = attr.ib(None) # type: typing.Optional[AbstractWebInspector]
open_target = attr.ib(
usertypes.ClickTarget.normal) # type: usertypes.ClickTarget
override_target = attr.ib(
None) # type: typing.Optional[usertypes.ClickTarget]
pinned = attr.ib(False) # type: bool
fullscreen = attr.ib(False) # type: bool
netrc_used = attr.ib(False) # type: bool
input_mode = attr.ib(usertypes.KeyMode.normal) # type: usertypes.KeyMode
last_navigation = attr.ib(None) # type: usertypes.NavigationRequest
def should_show_icon(self) -> bool:
return (config.val.tabs.favicons.show == 'always' or
config.val.tabs.favicons.show == 'pinned' and self.pinned)
class AbstractAction:
"""Attribute ``action`` of AbstractTab for Qt WebActions."""
# The class actions are defined on (QWeb{Engine,}Page)
action_class = None # type: type
# The type of the actions (QWeb{Engine,}Page.WebAction)
action_base = None # type: type
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def exit_fullscreen(self) -> None:
"""Exit the fullscreen mode."""
raise NotImplementedError
def save_page(self) -> None:
"""Save the current page."""
raise NotImplementedError
def run_string(self, name: str) -> None:
"""Run a webaction based on its name."""
member = getattr(self.action_class, name, None)
if not isinstance(member, self.action_base):
raise WebTabError("{} is not a valid web action!".format(name))
self._widget.triggerPageAction(member)
def show_source(
self,
pygments: bool = False # pylint: disable=redefined-outer-name
) -> None:
"""Show the source of the current page in a new tab."""
raise NotImplementedError
def _show_source_pygments(self) -> None:
def show_source_cb(source: str) -> None:
"""Show source as soon as it's ready."""
# WORKAROUND for https://github.com/PyCQA/pylint/issues/491
# pylint: disable=no-member
lexer = pygments.lexers.HtmlLexer()
formatter = pygments.formatters.HtmlFormatter(
full=True, linenos='table')
# pylint: enable=no-member
highlighted = pygments.highlight(source, lexer, formatter)
tb = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
new_tab = tb.tabopen(background=False, related=True)
new_tab.set_html(highlighted, self._tab.url())
new_tab.data.viewing_source = True
self._tab.dump_async(show_source_cb)
class AbstractPrinting:
"""Attribute ``printing`` of AbstractTab for printing the page."""
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def check_pdf_support(self) -> None:
"""Check whether writing to PDFs is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_printer_support(self) -> None:
"""Check whether writing to a printer is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_preview_support(self) -> None:
"""Check whether showing a print preview is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def to_pdf(self, filename: str) -> bool:
"""Print the tab to a PDF with the given filename."""
raise NotImplementedError
def to_printer(self, printer: QPrinter,
callback: typing.Callable[[bool], None] = None) -> None:
"""Print the tab.
Args:
printer: The QPrinter to print to.
callback: Called with a boolean
(True if printing succeeded, False otherwise)
"""
raise NotImplementedError
def show_dialog(self) -> None:
"""Print with a QPrintDialog."""
self.check_printer_support()
def print_callback(ok: bool) -> None:
"""Called when printing finished."""
if not ok:
message.error("Printing failed!")
diag.deleteLater()
def do_print() -> None:
"""Called when the dialog was closed."""
self.to_printer(diag.printer(), print_callback)
diag = QPrintDialog(self._tab)
if utils.is_mac:
# For some reason we get a segfault when using open() on macOS
ret = diag.exec_()
if ret == QDialog.Accepted:
do_print()
else:
diag.open(do_print)
class AbstractSearch(QObject):
"""Attribute ``search`` of AbstractTab for doing searches.
Attributes:
text: The last thing this view was searched for.
search_displayed: Whether we're currently displaying search results in
this view.
_flags: The flags of the last search (needs to be set by subclasses).
_widget: The underlying WebView widget.
"""
#: Signal emitted when a search was finished
#: (True if the text was found, False otherwise)
finished = pyqtSignal(bool)
#: Signal emitted when an existing search was cleared.
cleared = pyqtSignal()
_Callback = typing.Callable[[bool], None]
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
self.text = None # type: typing.Optional[str]
self.search_displayed = False
def _is_case_sensitive(self, ignore_case: usertypes.IgnoreCase) -> bool:
"""Check if case-sensitivity should be used.
This assumes self.text is already set properly.
Arguments:
ignore_case: The ignore_case value from the config.
"""
assert self.text is not None
mapping = {
usertypes.IgnoreCase.smart: not self.text.islower(),
usertypes.IgnoreCase.never: True,
usertypes.IgnoreCase.always: False,
}
return mapping[ignore_case]
def search(self, text: str, *,
ignore_case: usertypes.IgnoreCase = usertypes.IgnoreCase.never,
reverse: bool = False,
result_cb: _Callback = None) -> None:
"""Find the given text on the page.
Args:
text: The text to search for.
ignore_case: Search case-insensitively.
reverse: Reverse search direction.
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def clear(self) -> None:
"""Clear the current search."""
raise NotImplementedError
def prev_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the previous result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def next_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the next result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
class AbstractZoom(QObject):
"""Attribute ``zoom`` of AbstractTab for controlling zoom."""
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
# Whether zoom was changed from the default.
self._default_zoom_changed = False
self._init_neighborlist()
config.instance.changed.connect(self._on_config_changed)
self._zoom_factor = float(config.val.zoom.default) / 100
@pyqtSlot(str)
def _on_config_changed(self, option: str) -> None:
if option in ['zoom.levels', 'zoom.default']:
if not self._default_zoom_changed:
factor = float(config.val.zoom.default) / 100
self.set_factor(factor)
self._init_neighborlist()
def _init_neighborlist(self) -> None:
"""Initialize self._neighborlist.
It is a NeighborList with the zoom levels."""
levels = config.val.zoom.levels
self._neighborlist = usertypes.NeighborList(
levels, mode=usertypes.NeighborList.Modes.edge
) # type: usertypes.NeighborList[float]
self._neighborlist.fuzzyval = config.val.zoom.default
def apply_offset(self, offset: int) -> float:
"""Increase/Decrease the zoom level by the given offset.
Args:
offset: The offset in the zoom level list.
Return:
The new zoom level.
"""
level = self._neighborlist.getitem(offset)
self.set_factor(float(level) / 100, fuzzyval=False)
return level
def _set_factor_internal(self, factor: float) -> None:
raise NotImplementedError
def set_factor(self, factor: float, *, fuzzyval: bool = True) -> None:
"""Zoom to a given zoom factor.
Args:
factor: The zoom factor as float.
fuzzyval: Whether to set the NeighborLists fuzzyval.
"""
if fuzzyval:
self._neighborlist.fuzzyval = int(factor * 100)
if factor < 0:
raise ValueError("Can't zoom to factor {}!".format(factor))
default_zoom_factor = float(config.val.zoom.default) / 100
self._default_zoom_changed = (factor != default_zoom_factor)
self._zoom_factor = factor
self._set_factor_internal(factor)
def factor(self) -> float:
return self._zoom_factor
def apply_default(self) -> None:
self._set_factor_internal(float(config.val.zoom.default) / 100)
def reapply(self) -> None:
self._set_factor_internal(self._zoom_factor)
class AbstractCaret(QObject):
"""Attribute ``caret`` of AbstractTab for caret browsing."""
#: Signal emitted when the selection was toggled.
#: (argument - whether the selection is now active)
selection_toggled = pyqtSignal(bool)
#: Emitted when a ``follow_selection`` action is done.
follow_selected_done = pyqtSignal()
def __init__(self,
tab: 'AbstractTab',
mode_manager: modeman.ModeManager,
parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
self.selection_enabled = False
self._mode_manager = mode_manager
mode_manager.entered.connect(self._on_mode_entered)
mode_manager.left.connect(self._on_mode_left)
def _on_mode_entered(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def _on_mode_left(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def move_to_next_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_line(self) -> None:
raise NotImplementedError
def move_to_end_of_line(self) -> None:
raise NotImplementedError
def move_to_start_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_document(self) -> None:
raise NotImplementedError
def move_to_end_of_document(self) -> None:
raise NotImplementedError
def toggle_selection(self) -> None:
raise NotImplementedError
def drop_selection(self) -> None:
raise NotImplementedError
def selection(self, callback: typing.Callable[[str], None]) -> None:
raise NotImplementedError
def reverse_selection(self) -> None:
raise NotImplementedError
def _follow_enter(self, tab: bool) -> None:
"""Follow a link by faking an enter press."""
if tab:
self._tab.fake_key_press(Qt.Key_Enter, modifier=Qt.ControlModifier)
else:
self._tab.fake_key_press(Qt.Key_Enter)
def follow_selected(self, *, tab: bool = False) -> None:
raise NotImplementedError
class AbstractScroller(QObject):
"""Attribute ``scroller`` of AbstractTab to manage scroll position."""
#: Signal emitted when the scroll position changed (int, int)
perc_changed = pyqtSignal(int, int)
#: Signal emitted before the user requested a jump.
#: Used to set the special ' mark so the user can return.
before_jump_requested = pyqtSignal()
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = typing.cast(QWidget, None)
if 'log-scroll-pos' in objects.debug_flags:
self.perc_changed.connect(self._log_scroll_pos_change)
@pyqtSlot()
def _log_scroll_pos_change(self) -> None:
log.webview.vdebug( # type: ignore
"Scroll position changed to {}".format(self.pos_px()))
def _init_widget(self, widget: QWidget) -> None:
self._widget = widget
def pos_px(self) -> int:
raise NotImplementedError
def pos_perc(self) -> int:
raise NotImplementedError
def to_perc(self, x: int = None, y: int = None) -> None:
raise NotImplementedError
def to_point(self, point: QPoint) -> None:
raise NotImplementedError
def to_anchor(self, name: str) -> None:
raise NotImplementedError
def delta(self, x: int = 0, y: int = 0) -> None:
raise NotImplementedError
def delta_page(self, x: float = 0, y: float = 0) -> None:
raise NotImplementedError
def up(self, count: int = 1) -> None:
raise NotImplementedError
def down(self, count: int = 1) -> None:
raise NotImplementedError
def left(self, count: int = 1) -> None:
raise NotImplementedError
def right(self, count: int = 1) -> None:
raise NotImplementedError
def top(self) -> None:
raise NotImplementedError
def bottom(self) -> None:
raise NotImplementedError
def page_up(self, count: int = 1) -> None:
raise NotImplementedError
def page_down(self, count: int = 1) -> None:
raise NotImplementedError
def at_top(self) -> bool:
raise NotImplementedError
def at_bottom(self) -> bool:
raise NotImplementedError
class AbstractHistoryPrivate:
"""Private API related to the history."""
def __init__(self, tab: 'AbstractTab'):
self._tab = tab
self._history = typing.cast(
typing.Union['QWebHistory', 'QWebEngineHistory'], None)
def serialize(self) -> bytes:
"""Serialize into an opaque format understood by self.deserialize."""
raise NotImplementedError
def deserialize(self, data: bytes) -> None:
"""Deserialize from a format produced by self.serialize."""
raise NotImplementedError
def load_items(self, items: typing.Sequence) -> None:
"""Deserialize from a list of WebHistoryItems."""
raise NotImplementedError
class AbstractHistory:
"""The history attribute of a AbstractTab."""
def __init__(self, tab: 'AbstractTab') -> None:
self._tab = tab
self._history = typing.cast(
typing.Union['QWebHistory', 'QWebEngineHistory'], None)
self.private_api = AbstractHistoryPrivate(tab)
def __len__(self) -> int:
raise NotImplementedError
def __iter__(self) -> typing.Iterable:
raise NotImplementedError
def _check_count(self, count: int) -> None:
"""Check whether the count is positive."""
if count < 0:
raise WebTabError("count needs to be positive!")
def current_idx(self) -> int:
raise NotImplementedError
def back(self, count: int = 1) -> None:
"""Go back in the tab's history."""
self._check_count(count)
idx = self.current_idx() - count
if idx >= 0:
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(0))
raise WebTabError("At beginning of history.")
def forward(self, count: int = 1) -> None:
"""Go forward in the tab's history."""
self._check_count(count)
idx = self.current_idx() + count
if idx < len(self):
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(len(self) - 1))
raise WebTabError("At end of history.")
def can_go_back(self) -> bool:
raise NotImplementedError
def can_go_forward(self) -> bool:
raise NotImplementedError
def _item_at(self, i: int) -> typing.Any:
raise NotImplementedError
def _go_to_item(self, item: typing.Any) -> None:
raise NotImplementedError
class AbstractElements:
"""Finding and handling of elements on the page."""
_MultiCallback = typing.Callable[
[typing.Sequence['webelem.AbstractWebElement']], None]
_SingleCallback = typing.Callable[
[typing.Optional['webelem.AbstractWebElement']], None]
_ErrorCallback = typing.Callable[[Exception], None]
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def find_css(self, selector: str,
callback: _MultiCallback,
error_cb: _ErrorCallback, *,
only_visible: bool = False) -> None:
"""Find all HTML elements matching a given selector async.
If there's an error, the callback is called with a webelem.Error
instance.
Args:
callback: The callback to be called when the search finished.
error_cb: The callback to be called when an error occurred.
selector: The CSS selector to search for.
only_visible: Only show elements which are visible on screen.
"""
raise NotImplementedError
def find_id(self, elem_id: str, callback: _SingleCallback) -> None:
"""Find the HTML element with the given ID async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
elem_id: The ID to search for.
"""
raise NotImplementedError
def find_focused(self, callback: _SingleCallback) -> None:
"""Find the focused element on the page async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
def find_at_pos(self, pos: QPoint, callback: _SingleCallback) -> None:
"""Find the element at the given position async.
This is also called "hit test" elsewhere.
Args:
pos: The QPoint to get the element for.
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
class AbstractAudio(QObject):
"""Handling of audio/muting for this tab."""
muted_changed = pyqtSignal(bool)
recently_audible_changed = pyqtSignal(bool)
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._widget = typing.cast(QWidget, None)
self._tab = tab
def set_muted(self, muted: bool, override: bool = False) -> None:
"""Set this tab as muted or not.
Arguments:
override: If set to True, muting/unmuting was done manually and
overrides future automatic mute/unmute changes based on
the URL.
"""
raise NotImplementedError
def is_muted(self) -> bool:
raise NotImplementedError
def is_recently_audible(self) -> bool:
"""Whether this tab has had audio playing recently."""
raise NotImplementedError
class AbstractTabPrivate:
"""Tab-related methods which are only needed in the core.
Those methods are not part of the API which is exposed to extensions, and
should ideally be removed at some point in the future.
"""
def __init__(self, mode_manager: modeman.ModeManager,
tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
self._mode_manager = mode_manager
def event_target(self) -> QWidget:
"""Return the widget events should be sent to."""
raise NotImplementedError
def handle_auto_insert_mode(self, ok: bool) -> None:
"""Handle `input.insert_mode.auto_load` after loading finished."""
if not ok or not config.cache['input.insert_mode.auto_load']:
return
cur_mode = self._mode_manager.mode
if cur_mode == usertypes.KeyMode.insert:
return
def _auto_insert_mode_cb(
elem: typing.Optional['webelem.AbstractWebElement']
) -> None:
"""Called from JS after finding the focused element."""
if elem is None:
log.webview.debug("No focused element!")
return
if elem.is_editable():
modeman.enter(self._tab.win_id, usertypes.KeyMode.insert,
'load finished', only_if_normal=True)
self._tab.elements.find_focused(_auto_insert_mode_cb)
def clear_ssl_errors(self) -> None:
raise NotImplementedError
def networkaccessmanager(self) -> typing.Optional[QNetworkAccessManager]:
"""Get the QNetworkAccessManager for this tab.
This is only implemented for QtWebKit.
For QtWebEngine, always returns None.
"""
raise NotImplementedError
def shutdown(self) -> None:
raise NotImplementedError
class AbstractTab(QWidget):
"""An adapter for QWebView/QWebEngineView representing a single tab."""
#: Signal emitted when a website requests to close this tab.
window_close_requested = pyqtSignal()
#: Signal emitted when a link is hovered (the hover text)
link_hovered = pyqtSignal(str)
#: Signal emitted when a page started loading
load_started = pyqtSignal()
#: Signal emitted when a page is loading (progress percentage)
load_progress = pyqtSignal(int)
#: Signal emitted when a page finished loading (success as bool)
load_finished = pyqtSignal(bool)
#: Signal emitted when a page's favicon changed (icon as QIcon)
icon_changed = pyqtSignal(QIcon)
#: Signal emitted when a page's title changed (new title as str)
title_changed = pyqtSignal(str)
#: Signal emitted when a new tab should be opened (url as QUrl)
new_tab_requested = pyqtSignal(QUrl)
#: Signal emitted when a page's URL changed (url as QUrl)
url_changed = pyqtSignal(QUrl)
#: Signal emitted when a tab's content size changed
#: (new size as QSizeF)
contents_size_changed = pyqtSignal(QSizeF)
#: Signal emitted when a page requested full-screen (bool)
fullscreen_requested = pyqtSignal(bool)
#: Signal emitted before load starts (URL as QUrl)
before_load_started = pyqtSignal(QUrl)
# Signal emitted when a page's load status changed
# (argument: usertypes.LoadStatus)
load_status_changed = pyqtSignal(usertypes.LoadStatus)
# Signal emitted before shutting down
shutting_down = pyqtSignal()
# Signal emitted when a history item should be added
history_item_triggered = pyqtSignal(QUrl, QUrl, str)
# Signal emitted when the underlying renderer process terminated.
# arg 0: A TerminationStatus member.
# arg 1: The exit code.
renderer_process_terminated = pyqtSignal(TerminationStatus, int)
# Hosts for which a certificate error happened. Shared between all tabs.
#
# Note that we remember hosts here, without scheme/port:
# QtWebEngine/Chromium also only remembers hostnames, and certificates are
# for a given hostname anyways.
_insecure_hosts = set() # type: typing.Set[str]
def __init__(self, *, win_id: int, private: bool,
parent: QWidget = None) -> None:
self.is_private = private
self.win_id = win_id
self.tab_id = next(tab_id_gen)
super().__init__(parent)
self.registry = objreg.ObjectRegistry()
tab_registry = objreg.get('tab-registry', scope='window',
window=win_id)
tab_registry[self.tab_id] = self
objreg.register('tab', self, registry=self.registry)
self.data = TabData()
self._layout = miscwidgets.WrapperLayout(self)
self._widget = typing.cast(QWidget, None)
self._progress = 0
self._load_status = usertypes.LoadStatus.none
self._tab_event_filter = eventfilter.TabEventFilter(
self, parent=self)
self.backend = None # type: typing.Optional[usertypes.Backend]
# If true, this tab has been requested to be removed (or is removed).
self.pending_removal = False
self.shutting_down.connect(functools.partial(
setattr, self, 'pending_removal', True))
self.before_load_started.connect(self._on_before_load_started)
def _set_widget(self, widget: QWidget) -> None:
# pylint: disable=protected-access
self._widget = widget
self._layout.wrap(self, widget)
self.history._history = widget.history()
self.history.private_api._history = widget.history()
self.scroller._init_widget(widget)
self.caret._widget = widget
self.zoom._widget = widget
self.search._widget = widget
self.printing._widget = widget
self.action._widget = widget
self.elements._widget = widget
self.audio._widget = widget
self.private_api._widget = widget
self.settings._settings = widget.settings()
self._install_event_filter()
self.zoom.apply_default()
def _install_event_filter(self) -> None:
raise NotImplementedError
def _set_load_status(self, val: usertypes.LoadStatus) -> None:
"""Setter for load_status."""
if not isinstance(val, usertypes.LoadStatus):
raise TypeError("Type {} is no LoadStatus member!".format(val))
log.webview.debug("load status for {}: {}".format(repr(self), val))
self._load_status = val
self.load_status_changed.emit(val)
def send_event(self, evt: QEvent) -> None:
"""Send the given event to the underlying widget.
The event will be sent via QApplication.postEvent.
Note that a posted event must not be re-used in any way!
"""
# This only gives us some mild protection against re-using events, but
# it's certainly better than a segfault.
if getattr(evt, 'posted', False):
raise utils.Unreachable("Can't re-use an event which was already "
"posted!")
recipient = self.private_api.event_target()
if recipient is None:
# https://github.com/qutebrowser/qutebrowser/issues/3888
log.webview.warning("Unable to find event target!")
return
evt.posted = True
QApplication.postEvent(recipient, evt)
def navigation_blocked(self) -> bool:
"""Test if navigation is allowed on the current tab."""
return self.data.pinned and config.val.tabs.pinned.frozen
@pyqtSlot(QUrl)
def _on_before_load_started(self, url: QUrl) -> None:
"""Adjust the title if we are going to visit a URL soon."""
qtutils.ensure_valid(url)
url_string = url.toDisplayString()
log.webview.debug("Going to start loading: {}".format(url_string))
self.title_changed.emit(url_string)
@pyqtSlot(QUrl)
def _on_url_changed(self, url: QUrl) -> None:
"""Update title when URL has changed and no title is available."""
if url.isValid() and not self.title():
self.title_changed.emit(url.toDisplayString())
self.url_changed.emit(url)
@pyqtSlot()
def _on_load_started(self) -> None:
self._progress = 0
self.data.viewing_source = False
self._set_load_status(usertypes.LoadStatus.loading)
self.load_started.emit()
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(
self,
navigation: usertypes.NavigationRequest
) -> None:
"""Handle common acceptNavigationRequest code."""
url = utils.elide(navigation.url.toDisplayString(), 100)
log.webview.debug("navigation request: url {}, type {}, is_main_frame "
"{}".format(url,
navigation.navigation_type,
navigation.is_main_frame))
if navigation.is_main_frame:
self.data.last_navigation = navigation
if not navigation.url.isValid():
# Also a WORKAROUND for missing IDNA 2008 support in QUrl, see
# https://bugreports.qt.io/browse/QTBUG-60364
if navigation.navigation_type == navigation.Type.link_clicked:
msg = urlutils.get_errstring(navigation.url,
"Invalid link clicked")
message.error(msg)
self.data.open_target = usertypes.ClickTarget.normal
log.webview.debug("Ignoring invalid URL {} in "
"acceptNavigationRequest: {}".format(
navigation.url.toDisplayString(),
navigation.url.errorString()))
navigation.accepted = False
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
assert self._widget is not None
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3498
return
if sessions.session_manager is not None:
sessions.session_manager.save_autosave()
self.load_finished.emit(ok)
if not self.title():
self.title_changed.emit(self.url().toDisplayString())
self.zoom.reapply()
def _update_load_status(self, ok: bool) -> None:
"""Update the load status after a page finished loading.
Needs to be called by subclasses to trigger a load status update, e.g.
as a response to a loadFinished signal.
"""
if ok:
if self.url().scheme() == 'https':
if self.url().host() in self._insecure_hosts:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.success_https)
else:
self._set_load_status(usertypes.LoadStatus.success)
elif ok:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.error)
@pyqtSlot()
def _on_history_trigger(self) -> None:
"""Emit history_item_triggered based on backend-specific signal."""
raise NotImplementedError
@pyqtSlot(int)
def _on_load_progress(self, perc: int) -> None:
self._progress = perc
self.load_progress.emit(perc)
def url(self, *, requested: bool = False) -> QUrl:
raise NotImplementedError
def progress(self) -> int:
return self._progress
def load_status(self) -> usertypes.LoadStatus:
return self._load_status
def _load_url_prepare(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
qtutils.ensure_valid(url)
if emit_before_load_started:
self.before_load_started.emit(url)
def load_url(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
raise NotImplementedError
def reload(self, *, force: bool = False) -> None:
raise NotImplementedError
def stop(self) -> None:
raise NotImplementedError
def fake_key_press(self,
key: Qt.Key,
modifier: Qt.KeyboardModifier = Qt.NoModifier) -> None:
"""Send a fake key event to this tab."""
press_evt = QKeyEvent(QEvent.KeyPress, key, modifier, 0, 0, 0)
release_evt = QKeyEvent(QEvent.KeyRelease, key, modifier,
0, 0, 0)
self.send_event(press_evt)
self.send_event(release_evt)
def dump_async(self,
callback: typing.Callable[[str], None], *,
plain: bool = False) -> None:
"""Dump the current page's html asynchronously.
The given callback will be called with the result when dumping is
complete.
"""
raise NotImplementedError
def run_js_async(
self,
code: str,
callback: typing.Callable[[typing.Any], None] = None, *,
world: typing.Union[usertypes.JsWorld, int] = None
) -> None:
"""Run javascript async.
The given callback will be called with the result when running JS is
complete.
Args:
code: The javascript code to run.
callback: The callback to call with the result, or None.
world: A world ID (int or usertypes.JsWorld member) to run the JS
in the main world or in another isolated world.
"""
raise NotImplementedError
def title(self) -> str:
raise NotImplementedError
def icon(self) -> None:
raise NotImplementedError
def set_html(self, html: str, base_url: QUrl = QUrl()) -> None:
raise NotImplementedError
def __repr__(self) -> str:
try:
qurl = self.url()
url = qurl.toDisplayString(QUrl.EncodeUnicode) # type: ignore
except (AttributeError, RuntimeError) as exc:
url = '<{}>'.format(exc.__class__.__name__)
else:
url = utils.elide(url, 100)
return utils.get_repr(self, tab_id=self.tab_id, url=url)
def is_deleted(self) -> bool:
assert self._widget is not None
return sip.isdeleted(self._widget)
| ./CrossVul/dataset_final_sorted/CWE-684/py/good_3920_0 |
crossvul-python_data_good_3921_2 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Wrapper over our (QtWebKit) WebView."""
import re
import functools
import xml.etree.ElementTree
from PyQt5.QtCore import (pyqtSlot, Qt, QEvent, QUrl, QPoint, QTimer, QSizeF,
QSize)
from PyQt5.QtGui import QKeyEvent, QIcon
from PyQt5.QtWebKitWidgets import QWebPage, QWebFrame
from PyQt5.QtWebKit import QWebSettings
from PyQt5.QtPrintSupport import QPrinter
from qutebrowser.browser import browsertab, shared
from qutebrowser.browser.webkit import (webview, tabhistory, webkitelem,
webkitsettings)
from qutebrowser.utils import qtutils, usertypes, utils, log, debug
from qutebrowser.qt import sip
class WebKitAction(browsertab.AbstractAction):
"""QtWebKit implementations related to web actions."""
action_class = QWebPage
action_base = QWebPage.WebAction
def exit_fullscreen(self):
raise browsertab.UnsupportedOperationError
def save_page(self):
"""Save the current page."""
raise browsertab.UnsupportedOperationError
def show_source(self, pygments=False):
self._show_source_pygments()
class WebKitPrinting(browsertab.AbstractPrinting):
"""QtWebKit implementations related to printing."""
def check_pdf_support(self):
pass
def check_printer_support(self):
pass
def check_preview_support(self):
pass
def to_pdf(self, filename):
printer = QPrinter()
printer.setOutputFileName(filename)
self.to_printer(printer)
def to_printer(self, printer, callback=None):
self._widget.print(printer)
# Can't find out whether there was an error...
if callback is not None:
callback(True)
class WebKitSearch(browsertab.AbstractSearch):
"""QtWebKit implementations related to searching on the page."""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._flags = QWebPage.FindFlags(0)
def _call_cb(self, callback, found, text, flags, caller):
"""Call the given callback if it's non-None.
Delays the call via a QTimer so the website is re-rendered in between.
Args:
callback: What to call
found: If the text was found
text: The text searched for
flags: The flags searched with
caller: Name of the caller.
"""
found_text = 'found' if found else "didn't find"
# Removing FindWrapsAroundDocument to get the same logging as with
# QtWebEngine
debug_flags = debug.qflags_key(
QWebPage, flags & ~QWebPage.FindWrapsAroundDocument,
klass=QWebPage.FindFlag)
if debug_flags != '0x0000':
flag_text = 'with flags {}'.format(debug_flags)
else:
flag_text = ''
log.webview.debug(' '.join([caller, found_text, text, flag_text])
.strip())
if callback is not None:
QTimer.singleShot(0, functools.partial(callback, found))
self.finished.emit(found)
def clear(self):
if self.search_displayed:
self.cleared.emit()
self.search_displayed = False
# We first clear the marked text, then the highlights
self._widget.findText('')
self._widget.findText('', QWebPage.HighlightAllOccurrences)
def search(self, text, *, ignore_case='never', reverse=False,
result_cb=None):
# Don't go to next entry on duplicate search
if self.text == text and self.search_displayed:
log.webview.debug("Ignoring duplicate search request"
" for {}".format(text))
return
# Clear old search results, this is done automatically on QtWebEngine.
self.clear()
self.text = text
self.search_displayed = True
self._flags = QWebPage.FindWrapsAroundDocument
if self._is_case_sensitive(ignore_case):
self._flags |= QWebPage.FindCaseSensitively
if reverse:
self._flags |= QWebPage.FindBackward
# We actually search *twice* - once to highlight everything, then again
# to get a mark so we can navigate.
found = self._widget.findText(text, self._flags)
self._widget.findText(text,
self._flags | QWebPage.HighlightAllOccurrences)
self._call_cb(result_cb, found, text, self._flags, 'search')
def next_result(self, *, result_cb=None):
self.search_displayed = True
found = self._widget.findText(self.text, self._flags)
self._call_cb(result_cb, found, self.text, self._flags, 'next_result')
def prev_result(self, *, result_cb=None):
self.search_displayed = True
# The int() here makes sure we get a copy of the flags.
flags = QWebPage.FindFlags(int(self._flags))
if flags & QWebPage.FindBackward:
flags &= ~QWebPage.FindBackward
else:
flags |= QWebPage.FindBackward
found = self._widget.findText(self.text, flags)
self._call_cb(result_cb, found, self.text, flags, 'prev_result')
class WebKitCaret(browsertab.AbstractCaret):
"""QtWebKit implementations related to moving the cursor/selection."""
@pyqtSlot(usertypes.KeyMode)
def _on_mode_entered(self, mode):
if mode != usertypes.KeyMode.caret:
return
self.selection_enabled = self._widget.hasSelection()
self.selection_toggled.emit(self.selection_enabled)
settings = self._widget.settings()
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, True)
if self._widget.isVisible():
# Sometimes the caret isn't immediately visible, but unfocusing
# and refocusing it fixes that.
self._widget.clearFocus()
self._widget.setFocus(Qt.OtherFocusReason)
# Move the caret to the first element in the viewport if there
# isn't any text which is already selected.
#
# Note: We can't use hasSelection() here, as that's always
# true in caret mode.
if not self.selection_enabled:
self._widget.page().currentFrame().evaluateJavaScript(
utils.read_file('javascript/position_caret.js'))
@pyqtSlot(usertypes.KeyMode)
def _on_mode_left(self, _mode):
settings = self._widget.settings()
if settings.testAttribute(QWebSettings.CaretBrowsingEnabled):
if self.selection_enabled and self._widget.hasSelection():
# Remove selection if it exists
self._widget.triggerPageAction(QWebPage.MoveToNextChar)
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, False)
self.selection_enabled = False
def move_to_next_line(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToNextLine
else:
act = QWebPage.SelectNextLine
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_prev_line(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousLine
else:
act = QWebPage.SelectPreviousLine
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_next_char(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToNextChar
else:
act = QWebPage.SelectNextChar
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_prev_char(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousChar
else:
act = QWebPage.SelectPreviousChar
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_end_of_word(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextWord]
if utils.is_windows: # pragma: no cover
act.append(QWebPage.MoveToPreviousChar)
else:
act = [QWebPage.SelectNextWord]
if utils.is_windows: # pragma: no cover
act.append(QWebPage.SelectPreviousChar)
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_next_word(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextWord]
if not utils.is_windows: # pragma: no branch
act.append(QWebPage.MoveToNextChar)
else:
act = [QWebPage.SelectNextWord]
if not utils.is_windows: # pragma: no branch
act.append(QWebPage.SelectNextChar)
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_prev_word(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousWord
else:
act = QWebPage.SelectPreviousWord
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_start_of_line(self):
if not self.selection_enabled:
act = QWebPage.MoveToStartOfLine
else:
act = QWebPage.SelectStartOfLine
self._widget.triggerPageAction(act)
def move_to_end_of_line(self):
if not self.selection_enabled:
act = QWebPage.MoveToEndOfLine
else:
act = QWebPage.SelectEndOfLine
self._widget.triggerPageAction(act)
def move_to_start_of_next_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextLine,
QWebPage.MoveToStartOfBlock]
else:
act = [QWebPage.SelectNextLine,
QWebPage.SelectStartOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_start_of_prev_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToPreviousLine,
QWebPage.MoveToStartOfBlock]
else:
act = [QWebPage.SelectPreviousLine,
QWebPage.SelectStartOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_end_of_next_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextLine,
QWebPage.MoveToEndOfBlock]
else:
act = [QWebPage.SelectNextLine,
QWebPage.SelectEndOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_end_of_prev_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToPreviousLine, QWebPage.MoveToEndOfBlock]
else:
act = [QWebPage.SelectPreviousLine, QWebPage.SelectEndOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_start_of_document(self):
if not self.selection_enabled:
act = QWebPage.MoveToStartOfDocument
else:
act = QWebPage.SelectStartOfDocument
self._widget.triggerPageAction(act)
def move_to_end_of_document(self):
if not self.selection_enabled:
act = QWebPage.MoveToEndOfDocument
else:
act = QWebPage.SelectEndOfDocument
self._widget.triggerPageAction(act)
def toggle_selection(self):
self.selection_enabled = not self.selection_enabled
self.selection_toggled.emit(self.selection_enabled)
def drop_selection(self):
self._widget.triggerPageAction(QWebPage.MoveToNextChar)
def selection(self, callback):
callback(self._widget.selectedText())
def _follow_selected(self, *, tab=False):
if QWebSettings.globalSettings().testAttribute(
QWebSettings.JavascriptEnabled):
if tab:
self._tab.data.override_target = usertypes.ClickTarget.tab
self._tab.run_js_async("""
const aElm = document.activeElement;
if (window.getSelection().anchorNode) {
window.getSelection().anchorNode.parentNode.click();
} else if (aElm && aElm !== document.body) {
aElm.click();
}
""")
else:
selection = self._widget.selectedHtml()
if not selection:
# Getting here may mean we crashed, but we can't do anything
# about that until this commit is released:
# https://github.com/annulen/webkit/commit/0e75f3272d149bc64899c161f150eb341a2417af
# TODO find a way to check if something is focused
self._follow_enter(tab)
return
try:
selected_element = xml.etree.ElementTree.fromstring(
'<html>{}</html>'.format(selection)).find('a')
except xml.etree.ElementTree.ParseError:
raise browsertab.WebTabError('Could not parse selected '
'element!')
if selected_element is not None:
try:
url = selected_element.attrib['href']
except KeyError:
raise browsertab.WebTabError('Anchor element without '
'href!')
url = self._tab.url().resolved(QUrl(url))
if tab:
self._tab.new_tab_requested.emit(url)
else:
self._tab.openurl(url)
def follow_selected(self, *, tab=False):
try:
self._follow_selected(tab=tab)
finally:
self.follow_selected_done.emit()
class WebKitZoom(browsertab.AbstractZoom):
"""QtWebKit implementations related to zooming."""
def _set_factor_internal(self, factor):
self._widget.setZoomFactor(factor)
class WebKitScroller(browsertab.AbstractScroller):
"""QtWebKit implementations related to scrolling."""
# FIXME:qtwebengine When to use the main frame, when the current one?
def pos_px(self):
return self._widget.page().mainFrame().scrollPosition()
def pos_perc(self):
return self._widget.scroll_pos
def to_point(self, point):
self._widget.page().mainFrame().setScrollPosition(point)
def to_anchor(self, name):
self._widget.page().mainFrame().scrollToAnchor(name)
def delta(self, x=0, y=0):
qtutils.check_overflow(x, 'int')
qtutils.check_overflow(y, 'int')
self._widget.page().mainFrame().scroll(x, y)
def delta_page(self, x=0.0, y=0.0):
if y.is_integer():
y = int(y)
if y == 0:
pass
elif y < 0:
self.page_up(count=-y)
elif y > 0:
self.page_down(count=y)
y = 0
if x == 0 and y == 0:
return
size = self._widget.page().mainFrame().geometry()
self.delta(x * size.width(), y * size.height())
def to_perc(self, x=None, y=None):
if x is None and y == 0:
self.top()
elif x is None and y == 100:
self.bottom()
else:
for val, orientation in [(x, Qt.Horizontal), (y, Qt.Vertical)]:
if val is not None:
frame = self._widget.page().mainFrame()
maximum = frame.scrollBarMaximum(orientation)
if maximum == 0:
continue
pos = int(maximum * val / 100)
pos = qtutils.check_overflow(pos, 'int', fatal=False)
frame.setScrollBarValue(orientation, pos)
def _key_press(self, key, count=1, getter_name=None, direction=None):
frame = self._widget.page().mainFrame()
getter = None if getter_name is None else getattr(frame, getter_name)
# FIXME:qtwebengine needed?
# self._widget.setFocus()
for _ in range(min(count, 5000)):
# Abort scrolling if the minimum/maximum was reached.
if (getter is not None and
frame.scrollBarValue(direction) == getter(direction)):
return
self._tab.key_press(key)
def up(self, count=1):
self._key_press(Qt.Key_Up, count, 'scrollBarMinimum', Qt.Vertical)
def down(self, count=1):
self._key_press(Qt.Key_Down, count, 'scrollBarMaximum', Qt.Vertical)
def left(self, count=1):
self._key_press(Qt.Key_Left, count, 'scrollBarMinimum', Qt.Horizontal)
def right(self, count=1):
self._key_press(Qt.Key_Right, count, 'scrollBarMaximum', Qt.Horizontal)
def top(self):
self._key_press(Qt.Key_Home)
def bottom(self):
self._key_press(Qt.Key_End)
def page_up(self, count=1):
self._key_press(Qt.Key_PageUp, count, 'scrollBarMinimum', Qt.Vertical)
def page_down(self, count=1):
self._key_press(Qt.Key_PageDown, count, 'scrollBarMaximum',
Qt.Vertical)
def at_top(self):
return self.pos_px().y() == 0
def at_bottom(self):
frame = self._widget.page().currentFrame()
return self.pos_px().y() >= frame.scrollBarMaximum(Qt.Vertical)
class WebKitHistory(browsertab.AbstractHistory):
"""QtWebKit implementations related to page history."""
def current_idx(self):
return self._history.currentItemIndex()
def can_go_back(self):
return self._history.canGoBack()
def can_go_forward(self):
return self._history.canGoForward()
def _item_at(self, i):
return self._history.itemAt(i)
def _go_to_item(self, item):
self._tab.predicted_navigation.emit(item.url())
self._history.goToItem(item)
def serialize(self):
return qtutils.serialize(self._history)
def deserialize(self, data):
return qtutils.deserialize(data, self._history)
def load_items(self, items):
if items:
self._tab.predicted_navigation.emit(items[-1].url)
stream, _data, user_data = tabhistory.serialize(items)
qtutils.deserialize_stream(stream, self._history)
for i, data in enumerate(user_data):
self._history.itemAt(i).setUserData(data)
cur_data = self._history.currentItem().userData()
if cur_data is not None:
if 'zoom' in cur_data:
self._tab.zoom.set_factor(cur_data['zoom'])
if ('scroll-pos' in cur_data and
self._tab.scroller.pos_px() == QPoint(0, 0)):
QTimer.singleShot(0, functools.partial(
self._tab.scroller.to_point, cur_data['scroll-pos']))
class WebKitElements(browsertab.AbstractElements):
"""QtWebKit implemementations related to elements on the page."""
def find_css(self, selector, callback, *, only_visible=False):
mainframe = self._widget.page().mainFrame()
if mainframe is None:
raise browsertab.WebTabError("No frame focused!")
elems = []
frames = webkitelem.get_child_frames(mainframe)
for f in frames:
for elem in f.findAllElements(selector):
elems.append(webkitelem.WebKitElement(elem, tab=self._tab))
if only_visible:
# pylint: disable=protected-access
elems = [e for e in elems if e._is_visible(mainframe)]
# pylint: enable=protected-access
callback(elems)
def find_id(self, elem_id, callback):
def find_id_cb(elems):
"""Call the real callback with the found elements."""
if not elems:
callback(None)
else:
callback(elems[0])
# Escape non-alphanumeric characters in the selector
# https://www.w3.org/TR/CSS2/syndata.html#value-def-identifier
elem_id = re.sub(r'[^a-zA-Z0-9_-]', r'\\\g<0>', elem_id)
self.find_css('#' + elem_id, find_id_cb)
def find_focused(self, callback):
frame = self._widget.page().currentFrame()
if frame is None:
callback(None)
return
elem = frame.findFirstElement('*:focus')
if elem.isNull():
callback(None)
else:
callback(webkitelem.WebKitElement(elem, tab=self._tab))
def find_at_pos(self, pos, callback):
assert pos.x() >= 0
assert pos.y() >= 0
frame = self._widget.page().frameAt(pos)
if frame is None:
# This happens when we click inside the webview, but not actually
# on the QWebPage - for example when clicking the scrollbar
# sometimes.
log.webview.debug("Hit test at {} but frame is None!".format(pos))
callback(None)
return
# You'd think we have to subtract frame.geometry().topLeft() from the
# position, but it seems QWebFrame::hitTestContent wants a position
# relative to the QWebView, not to the frame. This makes no sense to
# me, but it works this way.
hitresult = frame.hitTestContent(pos)
if hitresult.isNull():
# For some reason, the whole hit result can be null sometimes (e.g.
# on doodle menu links).
log.webview.debug("Hit test result is null!")
callback(None)
return
try:
elem = webkitelem.WebKitElement(hitresult.element(), tab=self._tab)
except webkitelem.IsNullError:
# For some reason, the hit result element can be a null element
# sometimes (e.g. when clicking the timetable fields on
# http://www.sbb.ch/ ).
log.webview.debug("Hit test result element is null!")
callback(None)
return
callback(elem)
class WebKitAudio(browsertab.AbstractAudio):
"""Dummy handling of audio status for QtWebKit."""
def set_muted(self, muted: bool, override: bool = False):
raise browsertab.WebTabError('Muting is not supported on QtWebKit!')
def is_muted(self):
return False
def is_recently_audible(self):
return False
class WebKitTab(browsertab.AbstractTab):
"""A QtWebKit tab in the browser."""
def __init__(self, *, win_id, mode_manager, private, parent=None):
super().__init__(win_id=win_id, mode_manager=mode_manager,
private=private, parent=parent)
widget = webview.WebView(win_id=win_id, tab_id=self.tab_id,
private=private, tab=self)
if private:
self._make_private(widget)
self.history = WebKitHistory(tab=self)
self.scroller = WebKitScroller(tab=self, parent=self)
self.caret = WebKitCaret(mode_manager=mode_manager,
tab=self, parent=self)
self.zoom = WebKitZoom(tab=self, parent=self)
self.search = WebKitSearch(tab=self, parent=self)
self.printing = WebKitPrinting(tab=self)
self.elements = WebKitElements(tab=self)
self.action = WebKitAction(tab=self)
self.audio = WebKitAudio(tab=self, parent=self)
# We're assigning settings in _set_widget
self.settings = webkitsettings.WebKitSettings(settings=None)
self._set_widget(widget)
self._connect_signals()
self.backend = usertypes.Backend.QtWebKit
def _install_event_filter(self):
self._widget.installEventFilter(self._mouse_event_filter)
def _make_private(self, widget):
settings = widget.settings()
settings.setAttribute(QWebSettings.PrivateBrowsingEnabled, True)
def openurl(self, url, *, predict=True):
self._openurl_prepare(url, predict=predict)
self._widget.openurl(url)
def url(self, requested=False):
frame = self._widget.page().mainFrame()
if requested:
return frame.requestedUrl()
else:
return frame.url()
def dump_async(self, callback, *, plain=False):
frame = self._widget.page().mainFrame()
if plain:
callback(frame.toPlainText())
else:
callback(frame.toHtml())
def run_js_async(self, code, callback=None, *, world=None):
if world is not None and world != usertypes.JsWorld.jseval:
log.webview.warning("Ignoring world ID {}".format(world))
document_element = self._widget.page().mainFrame().documentElement()
result = document_element.evaluateJavaScript(code)
if callback is not None:
callback(result)
def icon(self):
return self._widget.icon()
def shutdown(self):
self._widget.shutdown()
def reload(self, *, force=False):
if force:
action = QWebPage.ReloadAndBypassCache
else:
action = QWebPage.Reload
self._widget.triggerPageAction(action)
def stop(self):
self._widget.stop()
def title(self):
return self._widget.title()
def clear_ssl_errors(self):
self.networkaccessmanager().clear_all_ssl_errors()
def key_press(self, key, modifier=Qt.NoModifier):
press_evt = QKeyEvent(QEvent.KeyPress, key, modifier, 0, 0, 0)
release_evt = QKeyEvent(QEvent.KeyRelease, key, modifier,
0, 0, 0)
self.send_event(press_evt)
self.send_event(release_evt)
@pyqtSlot()
def _on_history_trigger(self):
url = self.url()
requested_url = self.url(requested=True)
self.add_history_item.emit(url, requested_url, self.title())
def set_html(self, html, base_url=QUrl()):
self._widget.setHtml(html, base_url)
def networkaccessmanager(self):
return self._widget.page().networkAccessManager()
def user_agent(self):
page = self._widget.page()
return page.userAgentForUrl(self.url())
@pyqtSlot()
def _on_load_started(self):
super()._on_load_started()
self.networkaccessmanager().netrc_used = False
# Make sure the icon is cleared when navigating to a page without one.
self.icon_changed.emit(QIcon())
@pyqtSlot()
def _on_frame_load_finished(self):
"""Make sure we emit an appropriate status when loading finished.
While Qt has a bool "ok" attribute for loadFinished, it always is True
when using error pages... See
https://github.com/qutebrowser/qutebrowser/issues/84
"""
self._on_load_finished(not self._widget.page().error_occurred)
@pyqtSlot()
def _on_webkit_icon_changed(self):
"""Emit iconChanged with a QIcon like QWebEngineView does."""
if sip.isdeleted(self._widget):
log.webview.debug("Got _on_webkit_icon_changed for deleted view!")
return
self.icon_changed.emit(self._widget.icon())
@pyqtSlot(QWebFrame)
def _on_frame_created(self, frame):
"""Connect the contentsSizeChanged signal of each frame."""
# FIXME:qtwebengine those could theoretically regress:
# https://github.com/qutebrowser/qutebrowser/issues/152
# https://github.com/qutebrowser/qutebrowser/issues/263
frame.contentsSizeChanged.connect(self._on_contents_size_changed)
@pyqtSlot(QSize)
def _on_contents_size_changed(self, size):
self.contents_size_changed.emit(QSizeF(size))
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(self, navigation):
super()._on_navigation_request(navigation)
if not navigation.accepted:
return
log.webview.debug("target {} override {}".format(
self.data.open_target, self.data.override_target))
if self.data.override_target is not None:
target = self.data.override_target
self.data.override_target = None
else:
target = self.data.open_target
if (navigation.navigation_type == navigation.Type.link_clicked and
target != usertypes.ClickTarget.normal):
tab = shared.get_tab(self.win_id, target)
tab.openurl(navigation.url)
self.data.open_target = usertypes.ClickTarget.normal
navigation.accepted = False
if navigation.is_main_frame:
self.settings.update_for_url(navigation.url)
@pyqtSlot('QNetworkReply*')
def _on_ssl_errors(self, reply):
self._insecure_hosts.add(reply.url().host())
def _connect_signals(self):
view = self._widget
page = view.page()
frame = page.mainFrame()
page.windowCloseRequested.connect(self.window_close_requested)
page.linkHovered.connect(self.link_hovered)
page.loadProgress.connect(self._on_load_progress)
frame.loadStarted.connect(self._on_load_started)
view.scroll_pos_changed.connect(self.scroller.perc_changed)
view.titleChanged.connect(self.title_changed)
view.urlChanged.connect(self._on_url_changed)
view.shutting_down.connect(self.shutting_down)
page.networkAccessManager().sslErrors.connect(self._on_ssl_errors)
frame.loadFinished.connect(self._on_frame_load_finished)
view.iconChanged.connect(self._on_webkit_icon_changed)
page.frameCreated.connect(self._on_frame_created)
frame.contentsSizeChanged.connect(self._on_contents_size_changed)
frame.initialLayoutCompleted.connect(self._on_history_trigger)
page.navigation_request.connect(self._on_navigation_request)
def event_target(self):
return self._widget
| ./CrossVul/dataset_final_sorted/CWE-684/py/good_3921_2 |
crossvul-python_data_bad_3922_2 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Wrapper over our (QtWebKit) WebView."""
import re
import functools
import xml.etree.ElementTree
from PyQt5.QtCore import pyqtSlot, Qt, QUrl, QPoint, QTimer, QSizeF, QSize
from PyQt5.QtGui import QIcon
from PyQt5.QtWebKitWidgets import QWebPage, QWebFrame
from PyQt5.QtWebKit import QWebSettings
from PyQt5.QtPrintSupport import QPrinter
from qutebrowser.browser import browsertab, shared
from qutebrowser.browser.webkit import (webview, tabhistory, webkitelem,
webkitsettings)
from qutebrowser.utils import qtutils, usertypes, utils, log, debug
from qutebrowser.qt import sip
class WebKitAction(browsertab.AbstractAction):
"""QtWebKit implementations related to web actions."""
action_class = QWebPage
action_base = QWebPage.WebAction
def exit_fullscreen(self):
raise browsertab.UnsupportedOperationError
def save_page(self):
"""Save the current page."""
raise browsertab.UnsupportedOperationError
def show_source(self, pygments=False):
self._show_source_pygments()
class WebKitPrinting(browsertab.AbstractPrinting):
"""QtWebKit implementations related to printing."""
def check_pdf_support(self):
pass
def check_printer_support(self):
pass
def check_preview_support(self):
pass
def to_pdf(self, filename):
printer = QPrinter()
printer.setOutputFileName(filename)
self.to_printer(printer)
def to_printer(self, printer, callback=None):
self._widget.print(printer)
# Can't find out whether there was an error...
if callback is not None:
callback(True)
class WebKitSearch(browsertab.AbstractSearch):
"""QtWebKit implementations related to searching on the page."""
def __init__(self, tab, parent=None):
super().__init__(tab, parent)
self._flags = QWebPage.FindFlags(0) # type: ignore
def _call_cb(self, callback, found, text, flags, caller):
"""Call the given callback if it's non-None.
Delays the call via a QTimer so the website is re-rendered in between.
Args:
callback: What to call
found: If the text was found
text: The text searched for
flags: The flags searched with
caller: Name of the caller.
"""
found_text = 'found' if found else "didn't find"
# Removing FindWrapsAroundDocument to get the same logging as with
# QtWebEngine
debug_flags = debug.qflags_key(
QWebPage, flags & ~QWebPage.FindWrapsAroundDocument,
klass=QWebPage.FindFlag)
if debug_flags != '0x0000':
flag_text = 'with flags {}'.format(debug_flags)
else:
flag_text = ''
log.webview.debug(' '.join([caller, found_text, text, flag_text])
.strip())
if callback is not None:
QTimer.singleShot(0, functools.partial(callback, found))
self.finished.emit(found)
def clear(self):
if self.search_displayed:
self.cleared.emit()
self.search_displayed = False
# We first clear the marked text, then the highlights
self._widget.findText('')
self._widget.findText('', QWebPage.HighlightAllOccurrences)
def search(self, text, *, ignore_case=usertypes.IgnoreCase.never,
reverse=False, result_cb=None):
# Don't go to next entry on duplicate search
if self.text == text and self.search_displayed:
log.webview.debug("Ignoring duplicate search request"
" for {}".format(text))
return
# Clear old search results, this is done automatically on QtWebEngine.
self.clear()
self.text = text
self.search_displayed = True
self._flags = QWebPage.FindWrapsAroundDocument
if self._is_case_sensitive(ignore_case):
self._flags |= QWebPage.FindCaseSensitively
if reverse:
self._flags |= QWebPage.FindBackward
# We actually search *twice* - once to highlight everything, then again
# to get a mark so we can navigate.
found = self._widget.findText(text, self._flags)
self._widget.findText(text,
self._flags | QWebPage.HighlightAllOccurrences)
self._call_cb(result_cb, found, text, self._flags, 'search')
def next_result(self, *, result_cb=None):
self.search_displayed = True
found = self._widget.findText(self.text, self._flags)
self._call_cb(result_cb, found, self.text, self._flags, 'next_result')
def prev_result(self, *, result_cb=None):
self.search_displayed = True
# The int() here makes sure we get a copy of the flags.
flags = QWebPage.FindFlags(int(self._flags)) # type: ignore
if flags & QWebPage.FindBackward:
flags &= ~QWebPage.FindBackward
else:
flags |= QWebPage.FindBackward
found = self._widget.findText(self.text, flags)
self._call_cb(result_cb, found, self.text, flags, 'prev_result')
class WebKitCaret(browsertab.AbstractCaret):
"""QtWebKit implementations related to moving the cursor/selection."""
@pyqtSlot(usertypes.KeyMode)
def _on_mode_entered(self, mode):
if mode != usertypes.KeyMode.caret:
return
self.selection_enabled = self._widget.hasSelection()
self.selection_toggled.emit(self.selection_enabled)
settings = self._widget.settings()
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, True)
if self._widget.isVisible():
# Sometimes the caret isn't immediately visible, but unfocusing
# and refocusing it fixes that.
self._widget.clearFocus()
self._widget.setFocus(Qt.OtherFocusReason)
# Move the caret to the first element in the viewport if there
# isn't any text which is already selected.
#
# Note: We can't use hasSelection() here, as that's always
# true in caret mode.
if not self.selection_enabled:
self._widget.page().currentFrame().evaluateJavaScript(
utils.read_file('javascript/position_caret.js'))
@pyqtSlot(usertypes.KeyMode)
def _on_mode_left(self, _mode):
settings = self._widget.settings()
if settings.testAttribute(QWebSettings.CaretBrowsingEnabled):
if self.selection_enabled and self._widget.hasSelection():
# Remove selection if it exists
self._widget.triggerPageAction(QWebPage.MoveToNextChar)
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, False)
self.selection_enabled = False
def move_to_next_line(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToNextLine
else:
act = QWebPage.SelectNextLine
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_prev_line(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousLine
else:
act = QWebPage.SelectPreviousLine
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_next_char(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToNextChar
else:
act = QWebPage.SelectNextChar
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_prev_char(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousChar
else:
act = QWebPage.SelectPreviousChar
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_end_of_word(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextWord]
if utils.is_windows: # pragma: no cover
act.append(QWebPage.MoveToPreviousChar)
else:
act = [QWebPage.SelectNextWord]
if utils.is_windows: # pragma: no cover
act.append(QWebPage.SelectPreviousChar)
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_next_word(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextWord]
if not utils.is_windows: # pragma: no branch
act.append(QWebPage.MoveToNextChar)
else:
act = [QWebPage.SelectNextWord]
if not utils.is_windows: # pragma: no branch
act.append(QWebPage.SelectNextChar)
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_prev_word(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousWord
else:
act = QWebPage.SelectPreviousWord
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_start_of_line(self):
if not self.selection_enabled:
act = QWebPage.MoveToStartOfLine
else:
act = QWebPage.SelectStartOfLine
self._widget.triggerPageAction(act)
def move_to_end_of_line(self):
if not self.selection_enabled:
act = QWebPage.MoveToEndOfLine
else:
act = QWebPage.SelectEndOfLine
self._widget.triggerPageAction(act)
def move_to_start_of_next_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextLine,
QWebPage.MoveToStartOfBlock]
else:
act = [QWebPage.SelectNextLine,
QWebPage.SelectStartOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_start_of_prev_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToPreviousLine,
QWebPage.MoveToStartOfBlock]
else:
act = [QWebPage.SelectPreviousLine,
QWebPage.SelectStartOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_end_of_next_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextLine,
QWebPage.MoveToEndOfBlock]
else:
act = [QWebPage.SelectNextLine,
QWebPage.SelectEndOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_end_of_prev_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToPreviousLine, QWebPage.MoveToEndOfBlock]
else:
act = [QWebPage.SelectPreviousLine, QWebPage.SelectEndOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_start_of_document(self):
if not self.selection_enabled:
act = QWebPage.MoveToStartOfDocument
else:
act = QWebPage.SelectStartOfDocument
self._widget.triggerPageAction(act)
def move_to_end_of_document(self):
if not self.selection_enabled:
act = QWebPage.MoveToEndOfDocument
else:
act = QWebPage.SelectEndOfDocument
self._widget.triggerPageAction(act)
def toggle_selection(self):
self.selection_enabled = not self.selection_enabled
self.selection_toggled.emit(self.selection_enabled)
def drop_selection(self):
self._widget.triggerPageAction(QWebPage.MoveToNextChar)
def selection(self, callback):
callback(self._widget.selectedText())
def reverse_selection(self):
self._tab.run_js_async("""{
const sel = window.getSelection();
sel.setBaseAndExtent(
sel.extentNode, sel.extentOffset, sel.baseNode,
sel.baseOffset
);
}""")
def _follow_selected(self, *, tab=False):
if QWebSettings.globalSettings().testAttribute(
QWebSettings.JavascriptEnabled):
if tab:
self._tab.data.override_target = usertypes.ClickTarget.tab
self._tab.run_js_async("""
const aElm = document.activeElement;
if (window.getSelection().anchorNode) {
window.getSelection().anchorNode.parentNode.click();
} else if (aElm && aElm !== document.body) {
aElm.click();
}
""")
else:
selection = self._widget.selectedHtml()
if not selection:
# Getting here may mean we crashed, but we can't do anything
# about that until this commit is released:
# https://github.com/annulen/webkit/commit/0e75f3272d149bc64899c161f150eb341a2417af
# TODO find a way to check if something is focused
self._follow_enter(tab)
return
try:
selected_element = xml.etree.ElementTree.fromstring(
'<html>{}</html>'.format(selection)).find('a')
except xml.etree.ElementTree.ParseError:
raise browsertab.WebTabError('Could not parse selected '
'element!')
if selected_element is not None:
try:
url = selected_element.attrib['href']
except KeyError:
raise browsertab.WebTabError('Anchor element without '
'href!')
url = self._tab.url().resolved(QUrl(url))
if tab:
self._tab.new_tab_requested.emit(url)
else:
self._tab.load_url(url)
def follow_selected(self, *, tab=False):
try:
self._follow_selected(tab=tab)
finally:
self.follow_selected_done.emit()
class WebKitZoom(browsertab.AbstractZoom):
"""QtWebKit implementations related to zooming."""
def _set_factor_internal(self, factor):
self._widget.setZoomFactor(factor)
class WebKitScroller(browsertab.AbstractScroller):
"""QtWebKit implementations related to scrolling."""
# FIXME:qtwebengine When to use the main frame, when the current one?
def pos_px(self):
return self._widget.page().mainFrame().scrollPosition()
def pos_perc(self):
return self._widget.scroll_pos
def to_point(self, point):
self._widget.page().mainFrame().setScrollPosition(point)
def to_anchor(self, name):
self._widget.page().mainFrame().scrollToAnchor(name)
def delta(self, x: int = 0, y: int = 0) -> None:
qtutils.check_overflow(x, 'int')
qtutils.check_overflow(y, 'int')
self._widget.page().mainFrame().scroll(x, y)
def delta_page(self, x: float = 0.0, y: float = 0.0) -> None:
if y.is_integer():
y = int(y)
if y == 0:
pass
elif y < 0:
self.page_up(count=-y)
elif y > 0:
self.page_down(count=y)
y = 0
if x == 0 and y == 0:
return
size = self._widget.page().mainFrame().geometry()
self.delta(int(x * size.width()), int(y * size.height()))
def to_perc(self, x=None, y=None):
if x is None and y == 0:
self.top()
elif x is None and y == 100:
self.bottom()
else:
for val, orientation in [(x, Qt.Horizontal), (y, Qt.Vertical)]:
if val is not None:
frame = self._widget.page().mainFrame()
maximum = frame.scrollBarMaximum(orientation)
if maximum == 0:
continue
pos = int(maximum * val / 100)
pos = qtutils.check_overflow(pos, 'int', fatal=False)
frame.setScrollBarValue(orientation, pos)
def _key_press(self, key, count=1, getter_name=None, direction=None):
frame = self._widget.page().mainFrame()
getter = None if getter_name is None else getattr(frame, getter_name)
# FIXME:qtwebengine needed?
# self._widget.setFocus()
for _ in range(min(count, 5000)):
# Abort scrolling if the minimum/maximum was reached.
if (getter is not None and
frame.scrollBarValue(direction) == getter(direction)):
return
self._tab.fake_key_press(key)
def up(self, count=1):
self._key_press(Qt.Key_Up, count, 'scrollBarMinimum', Qt.Vertical)
def down(self, count=1):
self._key_press(Qt.Key_Down, count, 'scrollBarMaximum', Qt.Vertical)
def left(self, count=1):
self._key_press(Qt.Key_Left, count, 'scrollBarMinimum', Qt.Horizontal)
def right(self, count=1):
self._key_press(Qt.Key_Right, count, 'scrollBarMaximum', Qt.Horizontal)
def top(self):
self._key_press(Qt.Key_Home)
def bottom(self):
self._key_press(Qt.Key_End)
def page_up(self, count=1):
self._key_press(Qt.Key_PageUp, count, 'scrollBarMinimum', Qt.Vertical)
def page_down(self, count=1):
self._key_press(Qt.Key_PageDown, count, 'scrollBarMaximum',
Qt.Vertical)
def at_top(self):
return self.pos_px().y() == 0
def at_bottom(self):
frame = self._widget.page().currentFrame()
return self.pos_px().y() >= frame.scrollBarMaximum(Qt.Vertical)
class WebKitHistoryPrivate(browsertab.AbstractHistoryPrivate):
"""History-related methods which are not part of the extension API."""
def serialize(self):
return qtutils.serialize(self._history)
def deserialize(self, data):
qtutils.deserialize(data, self._history)
def load_items(self, items):
if items:
self._tab.before_load_started.emit(items[-1].url)
stream, _data, user_data = tabhistory.serialize(items)
qtutils.deserialize_stream(stream, self._history)
for i, data in enumerate(user_data):
self._history.itemAt(i).setUserData(data)
cur_data = self._history.currentItem().userData()
if cur_data is not None:
if 'zoom' in cur_data:
self._tab.zoom.set_factor(cur_data['zoom'])
if ('scroll-pos' in cur_data and
self._tab.scroller.pos_px() == QPoint(0, 0)):
QTimer.singleShot(0, functools.partial(
self._tab.scroller.to_point, cur_data['scroll-pos']))
class WebKitHistory(browsertab.AbstractHistory):
"""QtWebKit implementations related to page history."""
def __init__(self, tab):
super().__init__(tab)
self.private_api = WebKitHistoryPrivate(tab)
def __len__(self):
return len(self._history)
def __iter__(self):
return iter(self._history.items())
def current_idx(self):
return self._history.currentItemIndex()
def can_go_back(self):
return self._history.canGoBack()
def can_go_forward(self):
return self._history.canGoForward()
def _item_at(self, i):
return self._history.itemAt(i)
def _go_to_item(self, item):
self._tab.before_load_started.emit(item.url())
self._history.goToItem(item)
class WebKitElements(browsertab.AbstractElements):
"""QtWebKit implemementations related to elements on the page."""
def find_css(self, selector, callback, error_cb, *, only_visible=False):
utils.unused(error_cb)
mainframe = self._widget.page().mainFrame()
if mainframe is None:
raise browsertab.WebTabError("No frame focused!")
elems = []
frames = webkitelem.get_child_frames(mainframe)
for f in frames:
for elem in f.findAllElements(selector):
elems.append(webkitelem.WebKitElement(elem, tab=self._tab))
if only_visible:
# pylint: disable=protected-access
elems = [e for e in elems if e._is_visible(mainframe)]
# pylint: enable=protected-access
callback(elems)
def find_id(self, elem_id, callback):
def find_id_cb(elems):
"""Call the real callback with the found elements."""
if not elems:
callback(None)
else:
callback(elems[0])
# Escape non-alphanumeric characters in the selector
# https://www.w3.org/TR/CSS2/syndata.html#value-def-identifier
elem_id = re.sub(r'[^a-zA-Z0-9_-]', r'\\\g<0>', elem_id)
self.find_css('#' + elem_id, find_id_cb, error_cb=lambda exc: None)
def find_focused(self, callback):
frame = self._widget.page().currentFrame()
if frame is None:
callback(None)
return
elem = frame.findFirstElement('*:focus')
if elem.isNull():
callback(None)
else:
callback(webkitelem.WebKitElement(elem, tab=self._tab))
def find_at_pos(self, pos, callback):
assert pos.x() >= 0
assert pos.y() >= 0
frame = self._widget.page().frameAt(pos)
if frame is None:
# This happens when we click inside the webview, but not actually
# on the QWebPage - for example when clicking the scrollbar
# sometimes.
log.webview.debug("Hit test at {} but frame is None!".format(pos))
callback(None)
return
# You'd think we have to subtract frame.geometry().topLeft() from the
# position, but it seems QWebFrame::hitTestContent wants a position
# relative to the QWebView, not to the frame. This makes no sense to
# me, but it works this way.
hitresult = frame.hitTestContent(pos)
if hitresult.isNull():
# For some reason, the whole hit result can be null sometimes (e.g.
# on doodle menu links).
log.webview.debug("Hit test result is null!")
callback(None)
return
try:
elem = webkitelem.WebKitElement(hitresult.element(), tab=self._tab)
except webkitelem.IsNullError:
# For some reason, the hit result element can be a null element
# sometimes (e.g. when clicking the timetable fields on
# http://www.sbb.ch/ ).
log.webview.debug("Hit test result element is null!")
callback(None)
return
callback(elem)
class WebKitAudio(browsertab.AbstractAudio):
"""Dummy handling of audio status for QtWebKit."""
def set_muted(self, muted: bool, override: bool = False) -> None:
raise browsertab.WebTabError('Muting is not supported on QtWebKit!')
def is_muted(self):
return False
def is_recently_audible(self):
return False
class WebKitTabPrivate(browsertab.AbstractTabPrivate):
"""QtWebKit-related methods which aren't part of the public API."""
def networkaccessmanager(self):
return self._widget.page().networkAccessManager()
def clear_ssl_errors(self):
self.networkaccessmanager().clear_all_ssl_errors()
def event_target(self):
return self._widget
def shutdown(self):
self._widget.shutdown()
class WebKitTab(browsertab.AbstractTab):
"""A QtWebKit tab in the browser."""
def __init__(self, *, win_id, mode_manager, private, parent=None):
super().__init__(win_id=win_id, private=private, parent=parent)
widget = webview.WebView(win_id=win_id, tab_id=self.tab_id,
private=private, tab=self)
if private:
self._make_private(widget)
self.history = WebKitHistory(tab=self)
self.scroller = WebKitScroller(tab=self, parent=self)
self.caret = WebKitCaret(mode_manager=mode_manager,
tab=self, parent=self)
self.zoom = WebKitZoom(tab=self, parent=self)
self.search = WebKitSearch(tab=self, parent=self)
self.printing = WebKitPrinting(tab=self)
self.elements = WebKitElements(tab=self)
self.action = WebKitAction(tab=self)
self.audio = WebKitAudio(tab=self, parent=self)
self.private_api = WebKitTabPrivate(mode_manager=mode_manager,
tab=self)
# We're assigning settings in _set_widget
self.settings = webkitsettings.WebKitSettings(settings=None)
self._set_widget(widget)
self._connect_signals()
self.backend = usertypes.Backend.QtWebKit
def _install_event_filter(self):
self._widget.installEventFilter(self._tab_event_filter)
def _make_private(self, widget):
settings = widget.settings()
settings.setAttribute(QWebSettings.PrivateBrowsingEnabled, True)
def load_url(self, url, *, emit_before_load_started=True):
self._load_url_prepare(
url, emit_before_load_started=emit_before_load_started)
self._widget.load(url)
def url(self, *, requested=False):
frame = self._widget.page().mainFrame()
if requested:
return frame.requestedUrl()
else:
return frame.url()
def dump_async(self, callback, *, plain=False):
frame = self._widget.page().mainFrame()
if plain:
callback(frame.toPlainText())
else:
callback(frame.toHtml())
def run_js_async(self, code, callback=None, *, world=None):
if world is not None and world != usertypes.JsWorld.jseval:
log.webview.warning("Ignoring world ID {}".format(world))
document_element = self._widget.page().mainFrame().documentElement()
result = document_element.evaluateJavaScript(code)
if callback is not None:
callback(result)
def icon(self):
return self._widget.icon()
def reload(self, *, force=False):
if force:
action = QWebPage.ReloadAndBypassCache
else:
action = QWebPage.Reload
self._widget.triggerPageAction(action)
def stop(self):
self._widget.stop()
def title(self):
return self._widget.title()
@pyqtSlot()
def _on_history_trigger(self):
url = self.url()
requested_url = self.url(requested=True)
self.history_item_triggered.emit(url, requested_url, self.title())
def set_html(self, html, base_url=QUrl()):
self._widget.setHtml(html, base_url)
@pyqtSlot()
def _on_load_started(self):
super()._on_load_started()
nam = self._widget.page().networkAccessManager()
nam.netrc_used = False
# Make sure the icon is cleared when navigating to a page without one.
self.icon_changed.emit(QIcon())
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
super()._on_load_finished(ok)
self._update_load_status(ok)
@pyqtSlot()
def _on_frame_load_finished(self):
"""Make sure we emit an appropriate status when loading finished.
While Qt has a bool "ok" attribute for loadFinished, it always is True
when using error pages... See
https://github.com/qutebrowser/qutebrowser/issues/84
"""
self._on_load_finished(not self._widget.page().error_occurred)
@pyqtSlot()
def _on_webkit_icon_changed(self):
"""Emit iconChanged with a QIcon like QWebEngineView does."""
if sip.isdeleted(self._widget):
log.webview.debug("Got _on_webkit_icon_changed for deleted view!")
return
self.icon_changed.emit(self._widget.icon())
@pyqtSlot(QWebFrame)
def _on_frame_created(self, frame):
"""Connect the contentsSizeChanged signal of each frame."""
# FIXME:qtwebengine those could theoretically regress:
# https://github.com/qutebrowser/qutebrowser/issues/152
# https://github.com/qutebrowser/qutebrowser/issues/263
frame.contentsSizeChanged.connect(self._on_contents_size_changed)
@pyqtSlot(QSize)
def _on_contents_size_changed(self, size):
self.contents_size_changed.emit(QSizeF(size))
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(self, navigation):
super()._on_navigation_request(navigation)
if not navigation.accepted:
return
log.webview.debug("target {} override {}".format(
self.data.open_target, self.data.override_target))
if self.data.override_target is not None:
target = self.data.override_target
self.data.override_target = None
else:
target = self.data.open_target
if (navigation.navigation_type == navigation.Type.link_clicked and
target != usertypes.ClickTarget.normal):
tab = shared.get_tab(self.win_id, target)
tab.load_url(navigation.url)
self.data.open_target = usertypes.ClickTarget.normal
navigation.accepted = False
if navigation.is_main_frame:
self.settings.update_for_url(navigation.url)
@pyqtSlot()
def _on_ssl_errors(self):
self._has_ssl_errors = True
def _connect_signals(self):
view = self._widget
page = view.page()
frame = page.mainFrame()
page.windowCloseRequested.connect(self.window_close_requested)
page.linkHovered.connect(self.link_hovered)
page.loadProgress.connect(self._on_load_progress)
frame.loadStarted.connect(self._on_load_started)
view.scroll_pos_changed.connect(self.scroller.perc_changed)
view.titleChanged.connect(self.title_changed)
view.urlChanged.connect(self._on_url_changed)
view.shutting_down.connect(self.shutting_down)
page.networkAccessManager().sslErrors.connect(self._on_ssl_errors)
frame.loadFinished.connect(self._on_frame_load_finished)
view.iconChanged.connect(self._on_webkit_icon_changed)
page.frameCreated.connect(self._on_frame_created)
frame.contentsSizeChanged.connect(self._on_contents_size_changed)
frame.initialLayoutCompleted.connect(self._on_history_trigger)
page.navigation_request.connect(self._on_navigation_request)
| ./CrossVul/dataset_final_sorted/CWE-684/py/bad_3922_2 |
crossvul-python_data_good_3917_0 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2019 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Base class for a wrapper over QWebView/QWebEngineView."""
import enum
import itertools
import typing
import attr
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, QUrl, QObject, QSizeF, Qt,
QEvent, QPoint)
from PyQt5.QtGui import QKeyEvent, QIcon
from PyQt5.QtWidgets import QWidget, QApplication, QDialog
from PyQt5.QtPrintSupport import QPrintDialog, QPrinter
from PyQt5.QtNetwork import QNetworkAccessManager
import pygments
import pygments.lexers
import pygments.formatters
from qutebrowser.keyinput import modeman
from qutebrowser.config import config
from qutebrowser.utils import (utils, objreg, usertypes, log, qtutils,
urlutils, message)
from qutebrowser.misc import miscwidgets, objects
from qutebrowser.browser import eventfilter, hints
from qutebrowser.qt import sip
if typing.TYPE_CHECKING:
# pylint: disable=unused-import,useless-suppression
from qutebrowser.browser import webelem
from qutebrowser.browser.inspector import AbstractWebInspector
tab_id_gen = itertools.count(0)
def create(win_id: int,
private: bool,
parent: QWidget = None) -> 'AbstractTab':
"""Get a QtWebKit/QtWebEngine tab object.
Args:
win_id: The window ID where the tab will be shown.
private: Whether the tab is a private/off the record tab.
parent: The Qt parent to set.
"""
# Importing modules here so we don't depend on QtWebEngine without the
# argument and to avoid circular imports.
mode_manager = modeman.instance(win_id)
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
tab_class = webenginetab.WebEngineTab
else:
from qutebrowser.browser.webkit import webkittab
tab_class = webkittab.WebKitTab
return tab_class(win_id=win_id, mode_manager=mode_manager, private=private,
parent=parent)
def init() -> None:
"""Initialize backend-specific modules."""
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginetab
webenginetab.init()
class WebTabError(Exception):
"""Base class for various errors."""
class UnsupportedOperationError(WebTabError):
"""Raised when an operation is not supported with the given backend."""
TerminationStatus = enum.Enum('TerminationStatus', [
'normal',
'abnormal', # non-zero exit status
'crashed', # e.g. segfault
'killed',
'unknown',
])
@attr.s
class TabData:
"""A simple namespace with a fixed set of attributes.
Attributes:
keep_icon: Whether the (e.g. cloned) icon should not be cleared on page
load.
inspector: The QWebInspector used for this webview.
viewing_source: Set if we're currently showing a source view.
Only used when sources are shown via pygments.
open_target: Where to open the next link.
Only used for QtWebKit.
override_target: Override for open_target for fake clicks (like hints).
Only used for QtWebKit.
pinned: Flag to pin the tab.
fullscreen: Whether the tab has a video shown fullscreen currently.
netrc_used: Whether netrc authentication was performed.
input_mode: current input mode for the tab.
"""
keep_icon = attr.ib(False) # type: bool
viewing_source = attr.ib(False) # type: bool
inspector = attr.ib(None) # type: typing.Optional[AbstractWebInspector]
open_target = attr.ib(
usertypes.ClickTarget.normal) # type: usertypes.ClickTarget
override_target = attr.ib(None) # type: usertypes.ClickTarget
pinned = attr.ib(False) # type: bool
fullscreen = attr.ib(False) # type: bool
netrc_used = attr.ib(False) # type: bool
input_mode = attr.ib(usertypes.KeyMode.normal) # type: usertypes.KeyMode
def should_show_icon(self) -> bool:
return (config.val.tabs.favicons.show == 'always' or
config.val.tabs.favicons.show == 'pinned' and self.pinned)
class AbstractAction:
"""Attribute ``action`` of AbstractTab for Qt WebActions."""
# The class actions are defined on (QWeb{Engine,}Page)
action_class = None # type: type
# The type of the actions (QWeb{Engine,}Page.WebAction)
action_base = None # type: type
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = typing.cast(QWidget, None)
self._tab = tab
def exit_fullscreen(self) -> None:
"""Exit the fullscreen mode."""
raise NotImplementedError
def save_page(self) -> None:
"""Save the current page."""
raise NotImplementedError
def run_string(self, name: str) -> None:
"""Run a webaction based on its name."""
member = getattr(self.action_class, name, None)
if not isinstance(member, self.action_base):
raise WebTabError("{} is not a valid web action!".format(name))
self._widget.triggerPageAction(member)
def show_source(
self,
pygments: bool = False # pylint: disable=redefined-outer-name
) -> None:
"""Show the source of the current page in a new tab."""
raise NotImplementedError
def _show_source_pygments(self) -> None:
def show_source_cb(source: str) -> None:
"""Show source as soon as it's ready."""
# WORKAROUND for https://github.com/PyCQA/pylint/issues/491
# pylint: disable=no-member
lexer = pygments.lexers.HtmlLexer()
formatter = pygments.formatters.HtmlFormatter(
full=True, linenos='table')
# pylint: enable=no-member
highlighted = pygments.highlight(source, lexer, formatter)
tb = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
new_tab = tb.tabopen(background=False, related=True)
new_tab.set_html(highlighted, self._tab.url())
new_tab.data.viewing_source = True
self._tab.dump_async(show_source_cb)
class AbstractPrinting:
"""Attribute ``printing`` of AbstractTab for printing the page."""
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = None
self._tab = tab
def check_pdf_support(self) -> None:
"""Check whether writing to PDFs is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_printer_support(self) -> None:
"""Check whether writing to a printer is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def check_preview_support(self) -> None:
"""Check whether showing a print preview is supported.
If it's not supported (by the current Qt version), a WebTabError is
raised.
"""
raise NotImplementedError
def to_pdf(self, filename: str) -> bool:
"""Print the tab to a PDF with the given filename."""
raise NotImplementedError
def to_printer(self, printer: QPrinter,
callback: typing.Callable[[bool], None] = None) -> None:
"""Print the tab.
Args:
printer: The QPrinter to print to.
callback: Called with a boolean
(True if printing succeeded, False otherwise)
"""
raise NotImplementedError
def show_dialog(self) -> None:
"""Print with a QPrintDialog."""
self.check_printer_support()
def print_callback(ok: bool) -> None:
"""Called when printing finished."""
if not ok:
message.error("Printing failed!")
diag.deleteLater()
def do_print() -> None:
"""Called when the dialog was closed."""
self.to_printer(diag.printer(), print_callback)
diag = QPrintDialog(self._tab)
if utils.is_mac:
# For some reason we get a segfault when using open() on macOS
ret = diag.exec_()
if ret == QDialog.Accepted:
do_print()
else:
diag.open(do_print)
class AbstractSearch(QObject):
"""Attribute ``search`` of AbstractTab for doing searches.
Attributes:
text: The last thing this view was searched for.
search_displayed: Whether we're currently displaying search results in
this view.
_flags: The flags of the last search (needs to be set by subclasses).
_widget: The underlying WebView widget.
"""
#: Signal emitted when a search was finished
#: (True if the text was found, False otherwise)
finished = pyqtSignal(bool)
#: Signal emitted when an existing search was cleared.
cleared = pyqtSignal()
_Callback = typing.Callable[[bool], None]
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = None
self.text = None # type: typing.Optional[str]
self.search_displayed = False
def _is_case_sensitive(self, ignore_case: usertypes.IgnoreCase) -> bool:
"""Check if case-sensitivity should be used.
This assumes self.text is already set properly.
Arguments:
ignore_case: The ignore_case value from the config.
"""
assert self.text is not None
mapping = {
usertypes.IgnoreCase.smart: not self.text.islower(),
usertypes.IgnoreCase.never: True,
usertypes.IgnoreCase.always: False,
}
return mapping[ignore_case]
def search(self, text: str, *,
ignore_case: usertypes.IgnoreCase = usertypes.IgnoreCase.never,
reverse: bool = False,
result_cb: _Callback = None) -> None:
"""Find the given text on the page.
Args:
text: The text to search for.
ignore_case: Search case-insensitively.
reverse: Reverse search direction.
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def clear(self) -> None:
"""Clear the current search."""
raise NotImplementedError
def prev_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the previous result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
def next_result(self, *, result_cb: _Callback = None) -> None:
"""Go to the next result of the current search.
Args:
result_cb: Called with a bool indicating whether a match was found.
"""
raise NotImplementedError
class AbstractZoom(QObject):
"""Attribute ``zoom`` of AbstractTab for controlling zoom."""
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = None
# Whether zoom was changed from the default.
self._default_zoom_changed = False
self._init_neighborlist()
config.instance.changed.connect(self._on_config_changed)
self._zoom_factor = float(config.val.zoom.default) / 100
@pyqtSlot(str)
def _on_config_changed(self, option: str) -> None:
if option in ['zoom.levels', 'zoom.default']:
if not self._default_zoom_changed:
factor = float(config.val.zoom.default) / 100
self.set_factor(factor)
self._init_neighborlist()
def _init_neighborlist(self) -> None:
"""Initialize self._neighborlist.
It is a NeighborList with the zoom levels."""
levels = config.val.zoom.levels
self._neighborlist = usertypes.NeighborList(
levels, mode=usertypes.NeighborList.Modes.edge)
self._neighborlist.fuzzyval = config.val.zoom.default
def apply_offset(self, offset: int) -> None:
"""Increase/Decrease the zoom level by the given offset.
Args:
offset: The offset in the zoom level list.
Return:
The new zoom percentage.
"""
level = self._neighborlist.getitem(offset)
self.set_factor(float(level) / 100, fuzzyval=False)
return level
def _set_factor_internal(self, factor: float) -> None:
raise NotImplementedError
def set_factor(self, factor: float, *, fuzzyval: bool = True) -> None:
"""Zoom to a given zoom factor.
Args:
factor: The zoom factor as float.
fuzzyval: Whether to set the NeighborLists fuzzyval.
"""
if fuzzyval:
self._neighborlist.fuzzyval = int(factor * 100)
if factor < 0:
raise ValueError("Can't zoom to factor {}!".format(factor))
default_zoom_factor = float(config.val.zoom.default) / 100
self._default_zoom_changed = (factor != default_zoom_factor)
self._zoom_factor = factor
self._set_factor_internal(factor)
def factor(self) -> float:
return self._zoom_factor
def apply_default(self) -> None:
self._set_factor_internal(float(config.val.zoom.default) / 100)
def reapply(self) -> None:
self._set_factor_internal(self._zoom_factor)
class AbstractCaret(QObject):
"""Attribute ``caret`` of AbstractTab for caret browsing."""
#: Signal emitted when the selection was toggled.
#: (argument - whether the selection is now active)
selection_toggled = pyqtSignal(bool)
#: Emitted when a ``follow_selection`` action is done.
follow_selected_done = pyqtSignal()
def __init__(self,
tab: 'AbstractTab',
mode_manager: modeman.ModeManager,
parent: QWidget = None) -> None:
super().__init__(parent)
self._tab = tab
self._widget = None
self.selection_enabled = False
self._mode_manager = mode_manager
mode_manager.entered.connect(self._on_mode_entered)
mode_manager.left.connect(self._on_mode_left)
def _on_mode_entered(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def _on_mode_left(self, mode: usertypes.KeyMode) -> None:
raise NotImplementedError
def move_to_next_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_line(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_char(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_next_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_prev_word(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_line(self) -> None:
raise NotImplementedError
def move_to_end_of_line(self) -> None:
raise NotImplementedError
def move_to_start_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_next_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_end_of_prev_block(self, count: int = 1) -> None:
raise NotImplementedError
def move_to_start_of_document(self) -> None:
raise NotImplementedError
def move_to_end_of_document(self) -> None:
raise NotImplementedError
def toggle_selection(self) -> None:
raise NotImplementedError
def drop_selection(self) -> None:
raise NotImplementedError
def selection(self, callback: typing.Callable[[str], None]) -> None:
raise NotImplementedError
def reverse_selection(self) -> None:
raise NotImplementedError
def _follow_enter(self, tab: bool) -> None:
"""Follow a link by faking an enter press."""
if tab:
self._tab.fake_key_press(Qt.Key_Enter, modifier=Qt.ControlModifier)
else:
self._tab.fake_key_press(Qt.Key_Enter)
def follow_selected(self, *, tab: bool = False) -> None:
raise NotImplementedError
class AbstractScroller(QObject):
"""Attribute ``scroller`` of AbstractTab to manage scroll position."""
#: Signal emitted when the scroll position changed (int, int)
perc_changed = pyqtSignal(int, int)
#: Signal emitted before the user requested a jump.
#: Used to set the special ' mark so the user can return.
before_jump_requested = pyqtSignal()
def __init__(self, tab: 'AbstractTab', parent: QWidget = None):
super().__init__(parent)
self._tab = tab
self._widget = None # type: typing.Optional[QWidget]
if 'log-scroll-pos' in objects.debug_flags:
self.perc_changed.connect(self._log_scroll_pos_change)
@pyqtSlot()
def _log_scroll_pos_change(self) -> None:
log.webview.vdebug( # type: ignore
"Scroll position changed to {}".format(self.pos_px()))
def _init_widget(self, widget: QWidget) -> None:
self._widget = widget
def pos_px(self) -> int:
raise NotImplementedError
def pos_perc(self) -> int:
raise NotImplementedError
def to_perc(self, x: int = None, y: int = None) -> None:
raise NotImplementedError
def to_point(self, point: QPoint) -> None:
raise NotImplementedError
def to_anchor(self, name: str) -> None:
raise NotImplementedError
def delta(self, x: int = 0, y: int = 0) -> None:
raise NotImplementedError
def delta_page(self, x: float = 0, y: float = 0) -> None:
raise NotImplementedError
def up(self, count: int = 1) -> None:
raise NotImplementedError
def down(self, count: int = 1) -> None:
raise NotImplementedError
def left(self, count: int = 1) -> None:
raise NotImplementedError
def right(self, count: int = 1) -> None:
raise NotImplementedError
def top(self) -> None:
raise NotImplementedError
def bottom(self) -> None:
raise NotImplementedError
def page_up(self, count: int = 1) -> None:
raise NotImplementedError
def page_down(self, count: int = 1) -> None:
raise NotImplementedError
def at_top(self) -> bool:
raise NotImplementedError
def at_bottom(self) -> bool:
raise NotImplementedError
class AbstractHistoryPrivate:
"""Private API related to the history."""
def __init__(self, tab: 'AbstractTab'):
self._tab = tab
self._history = None
def serialize(self) -> bytes:
"""Serialize into an opaque format understood by self.deserialize."""
raise NotImplementedError
def deserialize(self, data: bytes) -> None:
"""Deserialize from a format produced by self.serialize."""
raise NotImplementedError
def load_items(self, items: typing.Sequence) -> None:
"""Deserialize from a list of WebHistoryItems."""
raise NotImplementedError
class AbstractHistory:
"""The history attribute of a AbstractTab."""
def __init__(self, tab: 'AbstractTab') -> None:
self._tab = tab
self._history = None
self.private_api = AbstractHistoryPrivate(tab)
def __len__(self) -> int:
raise NotImplementedError
def __iter__(self) -> typing.Iterable:
raise NotImplementedError
def _check_count(self, count: int) -> None:
"""Check whether the count is positive."""
if count < 0:
raise WebTabError("count needs to be positive!")
def current_idx(self) -> int:
raise NotImplementedError
def back(self, count: int = 1) -> None:
"""Go back in the tab's history."""
self._check_count(count)
idx = self.current_idx() - count
if idx >= 0:
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(0))
raise WebTabError("At beginning of history.")
def forward(self, count: int = 1) -> None:
"""Go forward in the tab's history."""
self._check_count(count)
idx = self.current_idx() + count
if idx < len(self):
self._go_to_item(self._item_at(idx))
else:
self._go_to_item(self._item_at(len(self) - 1))
raise WebTabError("At end of history.")
def can_go_back(self) -> bool:
raise NotImplementedError
def can_go_forward(self) -> bool:
raise NotImplementedError
def _item_at(self, i: int) -> typing.Any:
raise NotImplementedError
def _go_to_item(self, item: typing.Any) -> None:
raise NotImplementedError
class AbstractElements:
"""Finding and handling of elements on the page."""
_MultiCallback = typing.Callable[
[typing.Sequence['webelem.AbstractWebElement']], None]
_SingleCallback = typing.Callable[
[typing.Optional['webelem.AbstractWebElement']], None]
_ErrorCallback = typing.Callable[[Exception], None]
def __init__(self, tab: 'AbstractTab') -> None:
self._widget = None
self._tab = tab
def find_css(self, selector: str,
callback: _MultiCallback,
error_cb: _ErrorCallback, *,
only_visible: bool = False) -> None:
"""Find all HTML elements matching a given selector async.
If there's an error, the callback is called with a webelem.Error
instance.
Args:
callback: The callback to be called when the search finished.
error_cb: The callback to be called when an error occurred.
selector: The CSS selector to search for.
only_visible: Only show elements which are visible on screen.
"""
raise NotImplementedError
def find_id(self, elem_id: str, callback: _SingleCallback) -> None:
"""Find the HTML element with the given ID async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
elem_id: The ID to search for.
"""
raise NotImplementedError
def find_focused(self, callback: _SingleCallback) -> None:
"""Find the focused element on the page async.
Args:
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
def find_at_pos(self, pos: QPoint, callback: _SingleCallback) -> None:
"""Find the element at the given position async.
This is also called "hit test" elsewhere.
Args:
pos: The QPoint to get the element for.
callback: The callback to be called when the search finished.
Called with a WebEngineElement or None.
"""
raise NotImplementedError
class AbstractAudio(QObject):
"""Handling of audio/muting for this tab."""
muted_changed = pyqtSignal(bool)
recently_audible_changed = pyqtSignal(bool)
def __init__(self, tab: 'AbstractTab', parent: QWidget = None) -> None:
super().__init__(parent)
self._widget = None # type: typing.Optional[QWidget]
self._tab = tab
def set_muted(self, muted: bool, override: bool = False) -> None:
"""Set this tab as muted or not.
Arguments:
override: If set to True, muting/unmuting was done manually and
overrides future automatic mute/unmute changes based on
the URL.
"""
raise NotImplementedError
def is_muted(self) -> bool:
raise NotImplementedError
def is_recently_audible(self) -> bool:
"""Whether this tab has had audio playing recently."""
raise NotImplementedError
class AbstractTabPrivate:
"""Tab-related methods which are only needed in the core.
Those methods are not part of the API which is exposed to extensions, and
should ideally be removed at some point in the future.
"""
def __init__(self, mode_manager: modeman.ModeManager,
tab: 'AbstractTab') -> None:
self._widget = None # type: typing.Optional[QWidget]
self._tab = tab
self._mode_manager = mode_manager
def event_target(self) -> QWidget:
"""Return the widget events should be sent to."""
raise NotImplementedError
def handle_auto_insert_mode(self, ok: bool) -> None:
"""Handle `input.insert_mode.auto_load` after loading finished."""
if not ok or not config.cache['input.insert_mode.auto_load']:
return
cur_mode = self._mode_manager.mode
if cur_mode == usertypes.KeyMode.insert:
return
def _auto_insert_mode_cb(
elem: typing.Optional['webelem.AbstractWebElement']
) -> None:
"""Called from JS after finding the focused element."""
if elem is None:
log.webview.debug("No focused element!")
return
if elem.is_editable():
modeman.enter(self._tab.win_id, usertypes.KeyMode.insert,
'load finished', only_if_normal=True)
self._tab.elements.find_focused(_auto_insert_mode_cb)
def clear_ssl_errors(self) -> None:
raise NotImplementedError
def networkaccessmanager(self) -> typing.Optional[QNetworkAccessManager]:
"""Get the QNetworkAccessManager for this tab.
This is only implemented for QtWebKit.
For QtWebEngine, always returns None.
"""
raise NotImplementedError
def user_agent(self) -> typing.Optional[str]:
"""Get the user agent for this tab.
This is only implemented for QtWebKit.
For QtWebEngine, always returns None.
"""
raise NotImplementedError
def shutdown(self) -> None:
raise NotImplementedError
class AbstractTab(QWidget):
"""An adapter for QWebView/QWebEngineView representing a single tab."""
#: Signal emitted when a website requests to close this tab.
window_close_requested = pyqtSignal()
#: Signal emitted when a link is hovered (the hover text)
link_hovered = pyqtSignal(str)
#: Signal emitted when a page started loading
load_started = pyqtSignal()
#: Signal emitted when a page is loading (progress percentage)
load_progress = pyqtSignal(int)
#: Signal emitted when a page finished loading (success as bool)
load_finished = pyqtSignal(bool)
#: Signal emitted when a page's favicon changed (icon as QIcon)
icon_changed = pyqtSignal(QIcon)
#: Signal emitted when a page's title changed (new title as str)
title_changed = pyqtSignal(str)
#: Signal emitted when a new tab should be opened (url as QUrl)
new_tab_requested = pyqtSignal(QUrl)
#: Signal emitted when a page's URL changed (url as QUrl)
url_changed = pyqtSignal(QUrl)
#: Signal emitted when a tab's content size changed
#: (new size as QSizeF)
contents_size_changed = pyqtSignal(QSizeF)
#: Signal emitted when a page requested full-screen (bool)
fullscreen_requested = pyqtSignal(bool)
#: Signal emitted before load starts (URL as QUrl)
before_load_started = pyqtSignal(QUrl)
# Signal emitted when a page's load status changed
# (argument: usertypes.LoadStatus)
load_status_changed = pyqtSignal(usertypes.LoadStatus)
# Signal emitted before shutting down
shutting_down = pyqtSignal()
# Signal emitted when a history item should be added
history_item_triggered = pyqtSignal(QUrl, QUrl, str)
# Signal emitted when the underlying renderer process terminated.
# arg 0: A TerminationStatus member.
# arg 1: The exit code.
renderer_process_terminated = pyqtSignal(TerminationStatus, int)
# Hosts for which a certificate error happened. Shared between all tabs.
#
# Note that we remember hosts here, without scheme/port:
# QtWebEngine/Chromium also only remembers hostnames, and certificates are
# for a given hostname anyways.
_insecure_hosts = set() # type: typing.Set[str]
def __init__(self, *, win_id: int, private: bool,
parent: QWidget = None) -> None:
self.is_private = private
self.win_id = win_id
self.tab_id = next(tab_id_gen)
super().__init__(parent)
self.registry = objreg.ObjectRegistry()
tab_registry = objreg.get('tab-registry', scope='window',
window=win_id)
tab_registry[self.tab_id] = self
objreg.register('tab', self, registry=self.registry)
self.data = TabData()
self._layout = miscwidgets.WrapperLayout(self)
self._widget = None # type: typing.Optional[QWidget]
self._progress = 0
self._load_status = usertypes.LoadStatus.none
self._tab_event_filter = eventfilter.TabEventFilter(
self, parent=self)
self.backend = None
# FIXME:qtwebengine Should this be public api via self.hints?
# Also, should we get it out of objreg?
hintmanager = hints.HintManager(win_id, self.tab_id, parent=self)
objreg.register('hintmanager', hintmanager, scope='tab',
window=self.win_id, tab=self.tab_id)
self.before_load_started.connect(self._on_before_load_started)
def _set_widget(self, widget: QWidget) -> None:
# pylint: disable=protected-access
self._widget = widget
self._layout.wrap(self, widget)
self.history._history = widget.history()
self.history.private_api._history = widget.history()
self.scroller._init_widget(widget)
self.caret._widget = widget
self.zoom._widget = widget
self.search._widget = widget
self.printing._widget = widget
self.action._widget = widget
self.elements._widget = widget
self.audio._widget = widget
self.private_api._widget = widget
self.settings._settings = widget.settings()
self._install_event_filter()
self.zoom.apply_default()
def _install_event_filter(self) -> None:
raise NotImplementedError
def _set_load_status(self, val: usertypes.LoadStatus) -> None:
"""Setter for load_status."""
if not isinstance(val, usertypes.LoadStatus):
raise TypeError("Type {} is no LoadStatus member!".format(val))
log.webview.debug("load status for {}: {}".format(repr(self), val))
self._load_status = val
self.load_status_changed.emit(val)
def send_event(self, evt: QEvent) -> None:
"""Send the given event to the underlying widget.
The event will be sent via QApplication.postEvent.
Note that a posted event must not be re-used in any way!
"""
# This only gives us some mild protection against re-using events, but
# it's certainly better than a segfault.
if getattr(evt, 'posted', False):
raise utils.Unreachable("Can't re-use an event which was already "
"posted!")
recipient = self.private_api.event_target()
if recipient is None:
# https://github.com/qutebrowser/qutebrowser/issues/3888
log.webview.warning("Unable to find event target!")
return
evt.posted = True
QApplication.postEvent(recipient, evt)
def navigation_blocked(self) -> bool:
"""Test if navigation is allowed on the current tab."""
return self.data.pinned and config.val.tabs.pinned.frozen
@pyqtSlot(QUrl)
def _on_before_load_started(self, url: QUrl) -> None:
"""Adjust the title if we are going to visit a URL soon."""
qtutils.ensure_valid(url)
url_string = url.toDisplayString()
log.webview.debug("Going to start loading: {}".format(url_string))
self.title_changed.emit(url_string)
@pyqtSlot(QUrl)
def _on_url_changed(self, url: QUrl) -> None:
"""Update title when URL has changed and no title is available."""
if url.isValid() and not self.title():
self.title_changed.emit(url.toDisplayString())
self.url_changed.emit(url)
@pyqtSlot()
def _on_load_started(self) -> None:
self._progress = 0
self.data.viewing_source = False
self._set_load_status(usertypes.LoadStatus.loading)
self.load_started.emit()
@pyqtSlot(usertypes.NavigationRequest)
def _on_navigation_request(
self,
navigation: usertypes.NavigationRequest
) -> None:
"""Handle common acceptNavigationRequest code."""
url = utils.elide(navigation.url.toDisplayString(), 100)
log.webview.debug("navigation request: url {}, type {}, is_main_frame "
"{}".format(url,
navigation.navigation_type,
navigation.is_main_frame))
if not navigation.url.isValid():
# Also a WORKAROUND for missing IDNA 2008 support in QUrl, see
# https://bugreports.qt.io/browse/QTBUG-60364
if navigation.navigation_type == navigation.Type.link_clicked:
msg = urlutils.get_errstring(navigation.url,
"Invalid link clicked")
message.error(msg)
self.data.open_target = usertypes.ClickTarget.normal
log.webview.debug("Ignoring invalid URL {} in "
"acceptNavigationRequest: {}".format(
navigation.url.toDisplayString(),
navigation.url.errorString()))
navigation.accepted = False
@pyqtSlot(bool)
def _on_load_finished(self, ok: bool) -> None:
assert self._widget is not None
if sip.isdeleted(self._widget):
# https://github.com/qutebrowser/qutebrowser/issues/3498
return
try:
sess_manager = objreg.get('session-manager')
except KeyError:
# https://github.com/qutebrowser/qutebrowser/issues/4311
return
sess_manager.save_autosave()
self.load_finished.emit(ok)
if not self.title():
self.title_changed.emit(self.url().toDisplayString())
self.zoom.reapply()
def _update_load_status(self, ok: bool) -> None:
"""Update the load status after a page finished loading.
Needs to be called by subclasses to trigger a load status update, e.g.
as a response to a loadFinished signal.
"""
if ok:
if self.url().scheme() == 'https':
if self.url().host() in self._insecure_hosts:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.success_https)
else:
self._set_load_status(usertypes.LoadStatus.success)
elif ok:
self._set_load_status(usertypes.LoadStatus.warn)
else:
self._set_load_status(usertypes.LoadStatus.error)
@pyqtSlot()
def _on_history_trigger(self) -> None:
"""Emit history_item_triggered based on backend-specific signal."""
raise NotImplementedError
@pyqtSlot(int)
def _on_load_progress(self, perc: int) -> None:
self._progress = perc
self.load_progress.emit(perc)
def url(self, *, requested: bool = False) -> QUrl:
raise NotImplementedError
def progress(self) -> int:
return self._progress
def load_status(self) -> usertypes.LoadStatus:
return self._load_status
def _load_url_prepare(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
qtutils.ensure_valid(url)
if emit_before_load_started:
self.before_load_started.emit(url)
def load_url(self, url: QUrl, *,
emit_before_load_started: bool = True) -> None:
raise NotImplementedError
def reload(self, *, force: bool = False) -> None:
raise NotImplementedError
def stop(self) -> None:
raise NotImplementedError
def fake_key_press(self,
key: Qt.Key,
modifier: Qt.KeyboardModifier = Qt.NoModifier) -> None:
"""Send a fake key event to this tab."""
press_evt = QKeyEvent(QEvent.KeyPress, key, modifier, 0, 0, 0)
release_evt = QKeyEvent(QEvent.KeyRelease, key, modifier,
0, 0, 0)
self.send_event(press_evt)
self.send_event(release_evt)
def dump_async(self,
callback: typing.Callable[[str], None], *,
plain: bool = False) -> None:
"""Dump the current page's html asynchronously.
The given callback will be called with the result when dumping is
complete.
"""
raise NotImplementedError
def run_js_async(
self,
code: str,
callback: typing.Callable[[typing.Any], None] = None, *,
world: typing.Union[usertypes.JsWorld, int] = None
) -> None:
"""Run javascript async.
The given callback will be called with the result when running JS is
complete.
Args:
code: The javascript code to run.
callback: The callback to call with the result, or None.
world: A world ID (int or usertypes.JsWorld member) to run the JS
in the main world or in another isolated world.
"""
raise NotImplementedError
def title(self) -> str:
raise NotImplementedError
def icon(self) -> None:
raise NotImplementedError
def set_html(self, html: str, base_url: QUrl = QUrl()) -> None:
raise NotImplementedError
def __repr__(self) -> str:
try:
qurl = self.url()
url = qurl.toDisplayString(QUrl.EncodeUnicode) # type: ignore
except (AttributeError, RuntimeError) as exc:
url = '<{}>'.format(exc.__class__.__name__)
else:
url = utils.elide(url, 100)
return utils.get_repr(self, tab_id=self.tab_id, url=url)
def is_deleted(self) -> bool:
assert self._widget is not None
return sip.isdeleted(self._widget)
| ./CrossVul/dataset_final_sorted/CWE-684/py/good_3917_0 |
crossvul-python_data_good_2410_3 | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""``tornado.web`` provides a simple web framework with asynchronous
features that allow it to scale to large numbers of open connections,
making it ideal for `long polling
<http://en.wikipedia.org/wiki/Push_technology#Long_polling>`_.
Here is a simple "Hello, world" example app::
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
See the :doc:`Tornado overview <overview>` for more details and a good getting
started guide.
Thread-safety notes
-------------------
In general, methods on `RequestHandler` and elsewhere in Tornado are
not thread-safe. In particular, methods such as
`~RequestHandler.write()`, `~RequestHandler.finish()`, and
`~RequestHandler.flush()` must only be called from the main thread. If
you use multiple threads it is important to use `.IOLoop.add_callback`
to transfer control back to the main thread before finishing the
request.
"""
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
import datetime
import email.utils
import functools
import gzip
import hashlib
import hmac
import mimetypes
import numbers
import os.path
import re
import stat
import sys
import threading
import time
import tornado
import traceback
import types
from tornado.concurrent import Future
from tornado import escape
from tornado import httputil
from tornado import locale
from tornado.log import access_log, app_log, gen_log
from tornado import stack_context
from tornado import template
from tornado.escape import utf8, _unicode
from tornado.util import bytes_type, import_object, ObjectDict, raise_exc_info, unicode_type, _websocket_mask
try:
from io import BytesIO # python 3
except ImportError:
from cStringIO import StringIO as BytesIO # python 2
try:
import Cookie # py2
except ImportError:
import http.cookies as Cookie # py3
try:
import urlparse # py2
except ImportError:
import urllib.parse as urlparse # py3
try:
from urllib import urlencode # py2
except ImportError:
from urllib.parse import urlencode # py3
MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1
"""The oldest signed value version supported by this version of Tornado.
Signed values older than this version cannot be decoded.
.. versionadded:: 3.2.1
"""
MAX_SUPPORTED_SIGNED_VALUE_VERSION = 2
"""The newest signed value version supported by this version of Tornado.
Signed values newer than this version cannot be decoded.
.. versionadded:: 3.2.1
"""
DEFAULT_SIGNED_VALUE_VERSION = 2
"""The signed value version produced by `.RequestHandler.create_signed_value`.
May be overridden by passing a ``version`` keyword argument.
.. versionadded:: 3.2.1
"""
DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`.
May be overrided by passing a ``min_version`` keyword argument.
.. versionadded:: 3.2.1
"""
class RequestHandler(object):
"""Subclass this class and define `get()` or `post()` to make a handler.
If you want to support more methods than the standard GET/HEAD/POST, you
should override the class variable ``SUPPORTED_METHODS`` in your
`RequestHandler` subclass.
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
"OPTIONS")
_template_loaders = {} # {path: template.BaseLoader}
_template_loader_lock = threading.Lock()
_remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
def __init__(self, application, request, **kwargs):
super(RequestHandler, self).__init__()
self.application = application
self.request = request
self._headers_written = False
self._finished = False
self._auto_finish = True
self._transforms = None # will be set in _execute
self.path_args = None
self.path_kwargs = None
self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
application.ui_methods.items())
# UIModules are available as both `modules` and `_tt_modules` in the
# template namespace. Historically only `modules` was available
# but could be clobbered by user additions to the namespace.
# The template {% module %} directive looks in `_tt_modules` to avoid
# possible conflicts.
self.ui["_tt_modules"] = _UIModuleNamespace(self,
application.ui_modules)
self.ui["modules"] = self.ui["_tt_modules"]
self.clear()
# Check since connection is not available in WSGI
if getattr(self.request, "connection", None):
self.request.connection.set_close_callback(
self.on_connection_close)
self.initialize(**kwargs)
def initialize(self):
"""Hook for subclass initialization.
A dictionary passed as the third argument of a url spec will be
supplied as keyword arguments to initialize().
Example::
class ProfileHandler(RequestHandler):
def initialize(self, database):
self.database = database
def get(self, username):
...
app = Application([
(r'/user/(.*)', ProfileHandler, dict(database=database)),
])
"""
pass
@property
def settings(self):
"""An alias for `self.application.settings <Application.settings>`."""
return self.application.settings
def head(self, *args, **kwargs):
raise HTTPError(405)
def get(self, *args, **kwargs):
raise HTTPError(405)
def post(self, *args, **kwargs):
raise HTTPError(405)
def delete(self, *args, **kwargs):
raise HTTPError(405)
def patch(self, *args, **kwargs):
raise HTTPError(405)
def put(self, *args, **kwargs):
raise HTTPError(405)
def options(self, *args, **kwargs):
raise HTTPError(405)
def prepare(self):
"""Called at the beginning of a request before `get`/`post`/etc.
Override this method to perform common initialization regardless
of the request method.
Asynchronous support: Decorate this method with `.gen.coroutine`
or `.return_future` to make it asynchronous (the
`asynchronous` decorator cannot be used on `prepare`).
If this method returns a `.Future` execution will not proceed
until the `.Future` is done.
.. versionadded:: 3.1
Asynchronous support.
"""
pass
def on_finish(self):
"""Called after the end of a request.
Override this method to perform cleanup, logging, etc.
This method is a counterpart to `prepare`. ``on_finish`` may
not produce any output, as it is called after the response
has been sent to the client.
"""
pass
def on_connection_close(self):
"""Called in async handlers if the client closed the connection.
Override this to clean up resources associated with
long-lived connections. Note that this method is called only if
the connection was closed during asynchronous processing; if you
need to do cleanup after every request override `on_finish`
instead.
Proxies may keep a connection open for a time (perhaps
indefinitely) after the client has gone away, so this method
may not be called promptly after the end user closes their
connection.
"""
pass
def clear(self):
"""Resets all headers and content for this response."""
self._headers = httputil.HTTPHeaders({
"Server": "TornadoServer/%s" % tornado.version,
"Content-Type": "text/html; charset=UTF-8",
"Date": httputil.format_timestamp(time.time()),
})
self.set_default_headers()
if (not self.request.supports_http_1_1() and
getattr(self.request, 'connection', None) and
not self.request.connection.no_keep_alive):
conn_header = self.request.headers.get("Connection")
if conn_header and (conn_header.lower() == "keep-alive"):
self._headers["Connection"] = "Keep-Alive"
self._write_buffer = []
self._status_code = 200
self._reason = httputil.responses[200]
def set_default_headers(self):
"""Override this to set HTTP headers at the beginning of the request.
For example, this is the place to set a custom ``Server`` header.
Note that setting such headers in the normal flow of request
processing may not do what you want, since headers may be reset
during error handling.
"""
pass
def set_status(self, status_code, reason=None):
"""Sets the status code for our response.
:arg int status_code: Response status code. If ``reason`` is ``None``,
it must be present in `httplib.responses <http.client.responses>`.
:arg string reason: Human-readable reason phrase describing the status
code. If ``None``, it will be filled in from
`httplib.responses <http.client.responses>`.
"""
self._status_code = status_code
if reason is not None:
self._reason = escape.native_str(reason)
else:
try:
self._reason = httputil.responses[status_code]
except KeyError:
raise ValueError("unknown status code %d", status_code)
def get_status(self):
"""Returns the status code for our response."""
return self._status_code
def set_header(self, name, value):
"""Sets the given response header name and value.
If a datetime is given, we automatically format it according to the
HTTP specification. If the value is not a string, we convert it to
a string. All header values are then encoded as UTF-8.
"""
self._headers[name] = self._convert_header_value(value)
def add_header(self, name, value):
"""Adds the given response header and value.
Unlike `set_header`, `add_header` may be called multiple times
to return multiple values for the same header.
"""
self._headers.add(name, self._convert_header_value(value))
def clear_header(self, name):
"""Clears an outgoing header, undoing a previous `set_header` call.
Note that this method does not apply to multi-valued headers
set by `add_header`.
"""
if name in self._headers:
del self._headers[name]
_INVALID_HEADER_CHAR_RE = re.compile(br"[\x00-\x1f]")
def _convert_header_value(self, value):
if isinstance(value, bytes_type):
pass
elif isinstance(value, unicode_type):
value = value.encode('utf-8')
elif isinstance(value, numbers.Integral):
# return immediately since we know the converted value will be safe
return str(value)
elif isinstance(value, datetime.datetime):
return httputil.format_timestamp(value)
else:
raise TypeError("Unsupported header value %r" % value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request. Also cap length to
# prevent obviously erroneous values.
if (len(value) > 4000 or
RequestHandler._INVALID_HEADER_CHAR_RE.search(value)):
raise ValueError("Unsafe header value %r", value)
return value
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
return self._get_argument(name, default, self.request.arguments, strip)
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
return self._get_arguments(name, self.request.arguments, strip)
def get_body_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name
from the request body.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
.. versionadded:: 3.2
"""
return self._get_argument(name, default, self.request.body_arguments, strip)
def get_body_arguments(self, name, strip=True):
"""Returns a list of the body arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
.. versionadded:: 3.2
"""
return self._get_arguments(name, self.request.body_arguments, strip)
def get_query_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name
from the request query string.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
.. versionadded:: 3.2
"""
return self._get_argument(name, default, self.request.query_arguments, strip)
def get_query_arguments(self, name, strip=True):
"""Returns a list of the query arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
.. versionadded:: 3.2
"""
return self._get_arguments(name, self.request.query_arguments, strip)
def _get_argument(self, name, default, source, strip=True):
args = self._get_arguments(name, source, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise MissingArgumentError(name)
return default
return args[-1]
def _get_arguments(self, name, source, strip=True):
values = []
for v in source.get(name, []):
v = self.decode_argument(v, name=name)
if isinstance(v, unicode_type):
# Get rid of any weird control chars (unless decoding gave
# us bytes, in which case leave it alone)
v = RequestHandler._remove_control_chars_regex.sub(" ", v)
if strip:
v = v.strip()
values.append(v)
return values
def decode_argument(self, value, name=None):
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
By default, this method decodes the argument as utf-8 and returns
a unicode string, but this may be overridden in subclasses.
This method is used as a filter for both `get_argument()` and for
values extracted from the url and passed to `get()`/`post()`/etc.
The name of the argument is provided if known, but may be None
(e.g. for unnamed groups in the url regex).
"""
try:
return _unicode(value)
except UnicodeDecodeError:
raise HTTPError(400, "Invalid unicode in %s: %r" %
(name or "url", value[:40]))
@property
def cookies(self):
"""An alias for `self.request.cookies <.httpserver.HTTPRequest.cookies>`."""
return self.request.cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if self.request.cookies is not None and name in self.request.cookies:
return self.request.cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
# The cookie library only accepts type str, in both python 2 and 3
name = escape.native_str(name)
value = escape.native_str(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookie"):
self._new_cookie = Cookie.SimpleCookie()
if name in self._new_cookie:
del self._new_cookie[name]
self._new_cookie[name] = value
morsel = self._new_cookie[name]
if domain:
morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
morsel["expires"] = httputil.format_timestamp(expires)
if path:
morsel["path"] = path
for k, v in kwargs.items():
if k == 'max_age':
k = 'max-age'
morsel[k] = v
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name.
Due to limitations of the cookie protocol, you must pass the same
path and domain to clear a cookie as were used when that cookie
was set (but there is no way to find out on the server side
which values were used for a given cookie).
"""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self, path="/", domain=None):
"""Deletes all the cookies the user sent with this request.
See `clear_cookie` for more information on the path and domain
parameters.
.. versionchanged:: 3.2
Added the ``path`` and ``domain`` parameters.
"""
for name in self.request.cookies:
self.clear_cookie(name, path=path, domain=domain)
def set_secure_cookie(self, name, value, expires_days=30, version=None,
**kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
You must specify the ``cookie_secret`` setting in your Application
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use `get_secure_cookie()`.
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
Secure cookies may contain arbitrary byte values, not just unicode
strings (unlike regular cookies)
.. versionchanged:: 3.2.1
Added the ``version`` argument. Introduced cookie version 2
and made it the default.
"""
self.set_cookie(name, self.create_signed_value(name, value,
version=version),
expires_days=expires_days, **kwargs)
def create_signed_value(self, name, value, version=None):
"""Signs and timestamps a string so it cannot be forged.
Normally used via set_secure_cookie, but provided as a separate
method for non-cookie uses. To decode a value not stored
as a cookie use the optional value argument to get_secure_cookie.
.. versionchanged:: 3.2.1
Added the ``version`` argument. Introduced cookie version 2
and made it the default.
"""
self.require_setting("cookie_secret", "secure cookies")
return create_signed_value(self.application.settings["cookie_secret"],
name, value, version=version)
def get_secure_cookie(self, name, value=None, max_age_days=31,
min_version=None):
"""Returns the given signed cookie if it validates, or None.
The decoded cookie value is returned as a byte string (unlike
`get_cookie`).
.. versionchanged:: 3.2.1
Added the ``min_version`` argument. Introduced cookie version 2;
both versions 1 and 2 are accepted by default.
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
value = self.get_cookie(name)
return decode_signed_value(self.application.settings["cookie_secret"],
name, value, max_age_days=max_age_days,
min_version=min_version)
def redirect(self, url, permanent=False, status=None):
"""Sends a redirect to the given (optionally relative) URL.
If the ``status`` argument is specified, that value is used as the
HTTP status code; otherwise either 301 (permanent) or 302
(temporary) is chosen based on the ``permanent`` argument.
The default is 302 (temporary).
"""
if self._headers_written:
raise Exception("Cannot redirect after headers have been written")
if status is None:
status = 301 if permanent else 302
else:
assert isinstance(status, int) and 300 <= status <= 399
self.set_status(status)
self.set_header("Location", urlparse.urljoin(utf8(self.request.uri),
utf8(url)))
self.finish()
def write(self, chunk):
"""Writes the given chunk to the output buffer.
To write the output to the network, use the flush() method below.
If the given chunk is a dictionary, we write it as JSON and set
the Content-Type of the response to be ``application/json``.
(if you want to send JSON as a different ``Content-Type``, call
set_header *after* calling write()).
Note that lists are not converted to JSON because of a potential
cross-site security vulnerability. All JSON output should be
wrapped in a dictionary. More details at
http://haacked.com/archive/2008/11/20/anatomy-of-a-subtle-json-vulnerability.aspx
"""
if self._finished:
raise RuntimeError("Cannot write() after finish(). May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
chunk = utf8(chunk)
self._write_buffer.append(chunk)
def render(self, template_name, **kwargs):
"""Renders the template with the given arguments as the response."""
html = self.render_string(template_name, **kwargs)
# Insert the additional JS and CSS added by the modules on the page
js_embed = []
js_files = []
css_embed = []
css_files = []
html_heads = []
html_bodies = []
for module in getattr(self, "_active_modules", {}).values():
embed_part = module.embedded_javascript()
if embed_part:
js_embed.append(utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
js_files.append(file_part)
else:
js_files.extend(file_part)
embed_part = module.embedded_css()
if embed_part:
css_embed.append(utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
css_files.append(file_part)
else:
css_files.extend(file_part)
head_part = module.html_head()
if head_part:
html_heads.append(utf8(head_part))
body_part = module.html_body()
if body_part:
html_bodies.append(utf8(body_part))
def is_absolute(path):
return any(path.startswith(x) for x in ["/", "http:", "https:"])
if js_files:
# Maintain order of JavaScript files given by modules
paths = []
unique_paths = set()
for path in js_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
js = ''.join('<script src="' + escape.xhtml_escape(p) +
'" type="text/javascript"></script>'
for p in paths)
sloc = html.rindex(b'</body>')
html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
if js_embed:
js = b'<script type="text/javascript">\n//<![CDATA[\n' + \
b'\n'.join(js_embed) + b'\n//]]>\n</script>'
sloc = html.rindex(b'</body>')
html = html[:sloc] + js + b'\n' + html[sloc:]
if css_files:
paths = []
unique_paths = set()
for path in css_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths)
hloc = html.index(b'</head>')
html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
if css_embed:
css = b'<style type="text/css">\n' + b'\n'.join(css_embed) + \
b'\n</style>'
hloc = html.index(b'</head>')
html = html[:hloc] + css + b'\n' + html[hloc:]
if html_heads:
hloc = html.index(b'</head>')
html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
if html_bodies:
hloc = html.index(b'</body>')
html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
self.finish(html)
def render_string(self, template_name, **kwargs):
"""Generate the given template with the given arguments.
We return the generated byte string (in utf8). To generate and
write a template as a response, use render() above.
"""
# If no template_path is specified, use the path of the calling file
template_path = self.get_template_path()
if not template_path:
frame = sys._getframe(0)
web_file = frame.f_code.co_filename
while frame.f_code.co_filename == web_file:
frame = frame.f_back
template_path = os.path.dirname(frame.f_code.co_filename)
with RequestHandler._template_loader_lock:
if template_path not in RequestHandler._template_loaders:
loader = self.create_template_loader(template_path)
RequestHandler._template_loaders[template_path] = loader
else:
loader = RequestHandler._template_loaders[template_path]
t = loader.load(template_name)
namespace = self.get_template_namespace()
namespace.update(kwargs)
return t.generate(**namespace)
def get_template_namespace(self):
"""Returns a dictionary to be used as the default template namespace.
May be overridden by subclasses to add or modify values.
The results of this method will be combined with additional
defaults in the `tornado.template` module and keyword arguments
to `render` or `render_string`.
"""
namespace = dict(
handler=self,
request=self.request,
current_user=self.current_user,
locale=self.locale,
_=self.locale.translate,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
reverse_url=self.reverse_url
)
namespace.update(self.ui)
return namespace
def create_template_loader(self, template_path):
"""Returns a new template loader for the given path.
May be overridden by subclasses. By default returns a
directory-based loader on the given path, using the
``autoescape`` application setting. If a ``template_loader``
application setting is supplied, uses that instead.
"""
settings = self.application.settings
if "template_loader" in settings:
return settings["template_loader"]
kwargs = {}
if "autoescape" in settings:
# autoescape=None means "no escaping", so we have to be sure
# to only pass this kwarg if the user asked for it.
kwargs["autoescape"] = settings["autoescape"]
return template.Loader(template_path, **kwargs)
def flush(self, include_footers=False, callback=None):
"""Flushes the current output buffer to the network.
The ``callback`` argument, if given, can be used for flow control:
it will be run when all flushed data has been written to the socket.
Note that only one flush callback can be outstanding at a time;
if another flush occurs before the previous flush's callback
has been run, the previous callback will be discarded.
"""
if self.application._wsgi:
# WSGI applications cannot usefully support flush, so just make
# it a no-op (and run the callback immediately).
if callback is not None:
callback()
return
chunk = b"".join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
self._status_code, self._headers, chunk = \
transform.transform_first_chunk(
self._status_code, self._headers, chunk, include_footers)
headers = self._generate_headers()
else:
for transform in self._transforms:
chunk = transform.transform_chunk(chunk, include_footers)
headers = b""
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
if headers:
self.request.write(headers, callback=callback)
return
self.request.write(headers + chunk, callback=callback)
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
if self._finished:
raise RuntimeError("finish() called twice. May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if chunk is not None:
self.write(chunk)
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
if not self._headers_written:
if (self._status_code == 200 and
self.request.method in ("GET", "HEAD") and
"Etag" not in self._headers):
self.set_etag_header()
if self.check_etag_header():
self._write_buffer = []
self.set_status(304)
if self._status_code == 304:
assert not self._write_buffer, "Cannot send body with 304"
self._clear_headers_for_304()
elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
if hasattr(self.request, "connection"):
# Now that the request is finished, clear the callback we
# set on the HTTPConnection (which would otherwise prevent the
# garbage collection of the RequestHandler when there
# are keepalive connections)
self.request.connection.set_close_callback(None)
if not self.application._wsgi:
self.flush(include_footers=True)
self.request.finish()
self._log()
self._finished = True
self.on_finish()
# Break up a reference cycle between this handler and the
# _ui_module closures to allow for faster GC on CPython.
self.ui = None
def send_error(self, status_code=500, **kwargs):
"""Sends the given HTTP error code to the browser.
If `flush()` has already been called, it is not possible to send
an error, so this method will simply terminate the response.
If output has been written but not yet flushed, it will be discarded
and replaced with the error page.
Override `write_error()` to customize the error page that is returned.
Additional keyword arguments are passed through to `write_error`.
"""
if self._headers_written:
gen_log.error("Cannot send error response after headers written")
if not self._finished:
self.finish()
return
self.clear()
reason = None
if 'exc_info' in kwargs:
exception = kwargs['exc_info'][1]
if isinstance(exception, HTTPError) and exception.reason:
reason = exception.reason
self.set_status(status_code, reason=reason)
try:
self.write_error(status_code, **kwargs)
except Exception:
app_log.error("Uncaught exception in write_error", exc_info=True)
if not self._finished:
self.finish()
def write_error(self, status_code, **kwargs):
"""Override to implement custom error pages.
``write_error`` may call `write`, `render`, `set_header`, etc
to produce output as usual.
If this error was caused by an uncaught exception (including
HTTPError), an ``exc_info`` triple will be available as
``kwargs["exc_info"]``. Note that this exception may not be
the "current" exception for purposes of methods like
``sys.exc_info()`` or ``traceback.format_exc``.
For historical reasons, if a method ``get_error_html`` exists,
it will be used instead of the default ``write_error`` implementation.
``get_error_html`` returned a string instead of producing output
normally, and had different semantics for exception handling.
Users of ``get_error_html`` are encouraged to convert their code
to override ``write_error`` instead.
"""
if hasattr(self, 'get_error_html'):
if 'exc_info' in kwargs:
exc_info = kwargs.pop('exc_info')
kwargs['exception'] = exc_info[1]
try:
# Put the traceback into sys.exc_info()
raise_exc_info(exc_info)
except Exception:
self.finish(self.get_error_html(status_code, **kwargs))
else:
self.finish(self.get_error_html(status_code, **kwargs))
return
if self.settings.get("serve_traceback") and "exc_info" in kwargs:
# in debug mode, try to send a traceback
self.set_header('Content-Type', 'text/plain')
for line in traceback.format_exception(*kwargs["exc_info"]):
self.write(line)
self.finish()
else:
self.finish("<html><title>%(code)d: %(message)s</title>"
"<body>%(code)d: %(message)s</body></html>" % {
"code": status_code,
"message": self._reason,
})
@property
def locale(self):
"""The local for the current session.
Determined by either `get_user_locale`, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or `get_browser_locale`, which uses the ``Accept-Language``
header.
"""
if not hasattr(self, "_locale"):
self._locale = self.get_user_locale()
if not self._locale:
self._locale = self.get_browser_locale()
assert self._locale
return self._locale
def get_user_locale(self):
"""Override to determine the locale from the authenticated user.
If None is returned, we fall back to `get_browser_locale()`.
This method should return a `tornado.locale.Locale` object,
most likely obtained via a call like ``tornado.locale.get("en")``
"""
return None
def get_browser_locale(self, default="en_US"):
"""Determines the user's locale from ``Accept-Language`` header.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
if "Accept-Language" in self.request.headers:
languages = self.request.headers["Accept-Language"].split(",")
locales = []
for language in languages:
parts = language.strip().split(";")
if len(parts) > 1 and parts[1].startswith("q="):
try:
score = float(parts[1][2:])
except (ValueError, TypeError):
score = 0.0
else:
score = 1.0
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda pair: pair[1], reverse=True)
codes = [l[0] for l in locales]
return locale.get(*codes)
return locale.get(default)
@property
def current_user(self):
"""The authenticated user for this request.
This is a cached version of `get_current_user`, which you can
override to set the user based on, e.g., a cookie. If that
method is not overridden, this method always returns None.
We lazy-load the current user the first time this method is called
and cache the result after that.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
@current_user.setter
def current_user(self, value):
self._current_user = value
def get_current_user(self):
"""Override to determine the current user from, e.g., a cookie."""
return None
def get_login_url(self):
"""Override to customize the login URL based on the request.
By default, we use the ``login_url`` application setting.
"""
self.require_setting("login_url", "@tornado.web.authenticated")
return self.application.settings["login_url"]
def get_template_path(self):
"""Override to customize template path for each handler.
By default, we use the ``template_path`` application setting.
Return None to load templates relative to the calling file.
"""
return self.application.settings.get("template_path")
@property
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if not hasattr(self, "_xsrf_token"):
version, token, timestamp = self._get_raw_xsrf_token()
mask = os.urandom(4)
self._xsrf_token = b"|".join([
b"2",
binascii.b2a_hex(mask),
binascii.b2a_hex(_websocket_mask(mask, token)),
utf8(str(int(timestamp)))])
if version is None or version != 2:
expires_days = 30 if self.current_user else None
self.set_cookie("_xsrf", self._xsrf_token,
expires_days=expires_days)
return self._xsrf_token
def _get_raw_xsrf_token(self):
if not hasattr(self, '_raw_xsrf_token'):
cookie = self.get_cookie("_xsrf")
if cookie:
version, token, timestamp = self._decode_xsrf_token(cookie)
else:
version, token, timestamp = None, None, None
if token is None:
version = None
token = os.urandom(16)
timestamp = time.time()
self._raw_xsrf_token = (version, token, timestamp)
return self._raw_xsrf_token
def _decode_xsrf_token(self, cookie):
m = _signed_value_version_re.match(utf8(cookie))
if m:
version = int(m.group(1))
if version == 2:
_, mask, masked_token, timestamp = cookie.split("|")
mask = binascii.a2b_hex(utf8(mask))
token = _websocket_mask(
mask, binascii.a2b_hex(utf8(masked_token)))
timestamp = int(timestamp)
return version, token, timestamp
else:
# Treat unknown versions as not present instead of failing.
return None, None, None
elif len(cookie) == 32:
version = 1
token = binascii.a2b_hex(cookie)
# We don't have a usable timestamp in older versions.
timestamp = int(time.time())
return (version, token, timestamp)
else:
return None, None, None
def check_xsrf_cookie(self):
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
To prevent cross-site request forgery, we set an ``_xsrf``
cookie and include the same value as a non-cookie
field with all ``POST`` requests. If the two do not match, we
reject the form submission as a potential forgery.
The ``_xsrf`` value may be set as either a form field named ``_xsrf``
or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
(the latter is accepted for compatibility with Django).
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
Prior to release 1.1.1, this check was ignored if the HTTP header
``X-Requested-With: XMLHTTPRequest`` was present. This exception
has been shown to be insecure and has been removed. For more
information please see
http://www.djangoproject.com/weblog/2011/feb/08/security/
http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
"""
token = (self.get_argument("_xsrf", None) or
self.request.headers.get("X-Xsrftoken") or
self.request.headers.get("X-Csrftoken"))
if not token:
raise HTTPError(403, "'_xsrf' argument missing from POST")
_, token, _ = self._decode_xsrf_token(token)
_, expected_token, _ = self._get_raw_xsrf_token()
if not _time_independent_equals(utf8(token), utf8(expected_token)):
raise HTTPError(403, "XSRF cookie does not match POST argument")
def xsrf_form_html(self):
"""An HTML ``<input/>`` element to be included with all POST forms.
It defines the ``_xsrf`` input value, which we check on all POST
requests to prevent cross-site request forgery. If you have set
the ``xsrf_cookies`` application setting, you must include this
HTML within all of your HTML forms.
In a template, this method should be called with ``{% module
xsrf_form_html() %}``
See `check_xsrf_cookie()` above for more information.
"""
return '<input type="hidden" name="_xsrf" value="' + \
escape.xhtml_escape(self.xsrf_token) + '"/>'
def static_url(self, path, include_host=None, **kwargs):
"""Returns a static URL for the given relative static file path.
This method requires you set the ``static_path`` setting in your
application (which specifies the root directory of your static
files).
This method returns a versioned url (by default appending
``?v=<signature>``), which allows the static files to be
cached indefinitely. This can be disabled by passing
``include_version=False`` (in the default implementation;
other static file implementations are not required to support
this, but they may support other options).
By default this method returns URLs relative to the current
host, but if ``include_host`` is true the URL returned will be
absolute. If this handler has an ``include_host`` attribute,
that value will be used as the default for all `static_url`
calls that do not pass ``include_host`` as a keyword argument.
"""
self.require_setting("static_path", "static_url")
get_url = self.settings.get("static_handler_class",
StaticFileHandler).make_static_url
if include_host is None:
include_host = getattr(self, "include_host", False)
if include_host:
base = self.request.protocol + "://" + self.request.host
else:
base = ""
return base + get_url(self.settings, path, **kwargs)
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is unnecessary since Tornado 1.1.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception as e:
if self._headers_written:
app_log.error("Exception after headers written",
exc_info=True)
else:
self._handle_request_exception(e)
return wrapper
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name):
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
def reverse_url(self, name, *args):
"""Alias for `Application.reverse_url`."""
return self.application.reverse_url(name, *args)
def compute_etag(self):
"""Computes the etag header to be used for this request.
By default uses a hash of the content written so far.
May be overridden to provide custom etag implementations,
or may return None to disable tornado's default etag support.
"""
hasher = hashlib.sha1()
for part in self._write_buffer:
hasher.update(part)
return '"%s"' % hasher.hexdigest()
def set_etag_header(self):
"""Sets the response's Etag header using ``self.compute_etag()``.
Note: no header will be set if ``compute_etag()`` returns ``None``.
This method is called automatically when the request is finished.
"""
etag = self.compute_etag()
if etag is not None:
self.set_header("Etag", etag)
def check_etag_header(self):
"""Checks the ``Etag`` header against requests's ``If-None-Match``.
Returns ``True`` if the request's Etag matches and a 304 should be
returned. For example::
self.set_etag_header()
if self.check_etag_header():
self.set_status(304)
return
This method is called automatically when the request is finished,
but may be called earlier for applications that override
`compute_etag` and want to do an early check for ``If-None-Match``
before completing the request. The ``Etag`` header should be set
(perhaps with `set_etag_header`) before calling this method.
"""
etag = self._headers.get("Etag")
inm = utf8(self.request.headers.get("If-None-Match", ""))
return bool(etag and inm and inm.find(etag) >= 0)
def _stack_context_handle_exception(self, type, value, traceback):
try:
# For historical reasons _handle_request_exception only takes
# the exception value instead of the full triple,
# so re-raise the exception to ensure that it's in
# sys.exc_info()
raise_exc_info((type, value, traceback))
except Exception:
self._handle_request_exception(value)
return True
def _execute(self, transforms, *args, **kwargs):
"""Executes this request with the given output transforms."""
self._transforms = transforms
try:
if self.request.method not in self.SUPPORTED_METHODS:
raise HTTPError(405)
self.path_args = [self.decode_argument(arg) for arg in args]
self.path_kwargs = dict((k, self.decode_argument(v, name=k))
for (k, v) in kwargs.items())
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie()
self._when_complete(self.prepare(), self._execute_method)
except Exception as e:
self._handle_request_exception(e)
def _when_complete(self, result, callback):
try:
if result is None:
callback()
elif isinstance(result, Future):
if result.done():
if result.result() is not None:
raise ValueError('Expected None, got %r' % result.result())
callback()
else:
# Delayed import of IOLoop because it's not available
# on app engine
from tornado.ioloop import IOLoop
IOLoop.current().add_future(
result, functools.partial(self._when_complete,
callback=callback))
else:
raise ValueError("Expected Future or None, got %r" % result)
except Exception as e:
self._handle_request_exception(e)
def _execute_method(self):
if not self._finished:
method = getattr(self, self.request.method.lower())
self._when_complete(method(*self.path_args, **self.path_kwargs),
self._execute_finish)
def _execute_finish(self):
if self._auto_finish and not self._finished:
self.finish()
def _generate_headers(self):
reason = self._reason
lines = [utf8(self.request.version + " " +
str(self._status_code) +
" " + reason)]
lines.extend([utf8(n) + b": " + utf8(v) for n, v in self._headers.get_all()])
if hasattr(self, "_new_cookie"):
for cookie in self._new_cookie.values():
lines.append(utf8("Set-Cookie: " + cookie.OutputString(None)))
return b"\r\n".join(lines) + b"\r\n\r\n"
def _log(self):
"""Logs the current request.
Sort of deprecated since this functionality was moved to the
Application, but left in place for the benefit of existing apps
that have overridden this method.
"""
self.application.log_request(self)
def _request_summary(self):
return self.request.method + " " + self.request.uri + \
" (" + self.request.remote_ip + ")"
def _handle_request_exception(self, e):
self.log_exception(*sys.exc_info())
if self._finished:
# Extra errors after the request has been finished should
# be logged, but there is no reason to continue to try and
# send a response.
return
if isinstance(e, HTTPError):
if e.status_code not in httputil.responses and not e.reason:
gen_log.error("Bad HTTP status code: %d", e.status_code)
self.send_error(500, exc_info=sys.exc_info())
else:
self.send_error(e.status_code, exc_info=sys.exc_info())
else:
self.send_error(500, exc_info=sys.exc_info())
def log_exception(self, typ, value, tb):
"""Override to customize logging of uncaught exceptions.
By default logs instances of `HTTPError` as warnings without
stack traces (on the ``tornado.general`` logger), and all
other exceptions as errors with stack traces (on the
``tornado.application`` logger).
.. versionadded:: 3.1
"""
if isinstance(value, HTTPError):
if value.log_message:
format = "%d %s: " + value.log_message
args = ([value.status_code, self._request_summary()] +
list(value.args))
gen_log.warning(format, *args)
else:
app_log.error("Uncaught exception %s\n%r", self._request_summary(),
self.request, exc_info=(typ, value, tb))
def _ui_module(self, name, module):
def render(*args, **kwargs):
if not hasattr(self, "_active_modules"):
self._active_modules = {}
if name not in self._active_modules:
self._active_modules[name] = module(self)
rendered = self._active_modules[name].render(*args, **kwargs)
return rendered
return render
def _ui_method(self, method):
return lambda *args, **kwargs: method(self, *args, **kwargs)
def _clear_headers_for_304(self):
# 304 responses should not contain entity headers (defined in
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
# not explicitly allowed by
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
headers = ["Allow", "Content-Encoding", "Content-Language",
"Content-Length", "Content-MD5", "Content-Range",
"Content-Type", "Last-Modified"]
for h in headers:
self.clear_header(h)
def asynchronous(method):
"""Wrap request handler methods with this if they are asynchronous.
This decorator is unnecessary if the method is also decorated with
``@gen.coroutine`` (it is legal but unnecessary to use the two
decorators together, in which case ``@asynchronous`` must be
first).
This decorator should only be applied to the :ref:`HTTP verb
methods <verbs>`; its behavior is undefined for any other method.
This decorator does not *make* a method asynchronous; it tells
the framework that the method *is* asynchronous. For this decorator
to be useful the method must (at least sometimes) do something
asynchronous.
If this decorator is given, the response is not finished when the
method returns. It is up to the request handler to call
`self.finish() <RequestHandler.finish>` to finish the HTTP
request. Without this decorator, the request is automatically
finished when the ``get()`` or ``post()`` method returns. Example::
class MyRequestHandler(web.RequestHandler):
@web.asynchronous
def get(self):
http = httpclient.AsyncHTTPClient()
http.fetch("http://friendfeed.com/", self._on_download)
def _on_download(self, response):
self.write("Downloaded!")
self.finish()
.. versionadded:: 3.1
The ability to use ``@gen.coroutine`` without ``@asynchronous``.
"""
# Delay the IOLoop import because it's not available on app engine.
from tornado.ioloop import IOLoop
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.application._wsgi:
raise Exception("@asynchronous is not supported for WSGI apps")
self._auto_finish = False
with stack_context.ExceptionStackContext(
self._stack_context_handle_exception):
result = method(self, *args, **kwargs)
if isinstance(result, Future):
# If @asynchronous is used with @gen.coroutine, (but
# not @gen.engine), we can automatically finish the
# request when the future resolves. Additionally,
# the Future will swallow any exceptions so we need
# to throw them back out to the stack context to finish
# the request.
def future_complete(f):
f.result()
if not self._finished:
self.finish()
IOLoop.current().add_future(result, future_complete)
# Once we have done this, hide the Future from our
# caller (i.e. RequestHandler._when_complete), which
# would otherwise set up its own callback and
# exception handler (resulting in exceptions being
# logged twice).
return None
return result
return wrapper
def removeslash(method):
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path.rstrip("/")
if uri: # don't try to redirect '/' to ''
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
else:
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
def addslash(method):
"""Use this decorator to add a missing trailing slash to the request path.
For example, a request to ``/foo`` would redirect to ``/foo/`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/?'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path + "/"
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
class Application(object):
"""A collection of request handlers that make up a web application.
Instances of this class are callable and can be passed directly to
HTTPServer to serve the application::
application = web.Application([
(r"/", MainPageHandler),
])
http_server = httpserver.HTTPServer(application)
http_server.listen(8080)
ioloop.IOLoop.instance().start()
The constructor for this class takes in a list of `URLSpec` objects
or (regexp, request_class) tuples. When we receive requests, we
iterate over the list in order and instantiate an instance of the
first request class whose regexp matches the request path.
The request class can be specified as either a class object or a
(fully-qualified) name.
Each tuple can contain additional elements, which correspond to the
arguments to the `URLSpec` constructor. (Prior to Tornado 3.2, this
only tuples of two or three elements were allowed).
A dictionary may be passed as the third element of the tuple,
which will be used as keyword arguments to the handler's
constructor and `~RequestHandler.initialize` method. This pattern
is used for the `StaticFileHandler` in this example (note that a
`StaticFileHandler` can be installed automatically with the
static_path setting described below)::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
We support virtual hosts with the `add_handlers` method, which takes in
a host regular expression as the first argument::
application.add_handlers(r"www\.myhost\.com", [
(r"/article/([0-9]+)", ArticleHandler),
])
You can serve static files by sending the ``static_path`` setting
as a keyword argument. We will serve those files from the
``/static/`` URI (this is configurable with the
``static_url_prefix`` setting), and we will serve ``/favicon.ico``
and ``/robots.txt`` from the same directory. A custom subclass of
`StaticFileHandler` can be specified with the
``static_handler_class`` setting.
"""
def __init__(self, handlers=None, default_host="", transforms=None,
wsgi=False, **settings):
if transforms is None:
self.transforms = []
if settings.get("gzip"):
self.transforms.append(GZipContentEncoding)
self.transforms.append(ChunkedTransferEncoding)
else:
self.transforms = transforms
self.handlers = []
self.named_handlers = {}
self.default_host = default_host
self.settings = settings
self.ui_modules = {'linkify': _linkify,
'xsrf_form_html': _xsrf_form_html,
'Template': TemplateModule,
}
self.ui_methods = {}
self._wsgi = wsgi
self._load_ui_modules(settings.get("ui_modules", {}))
self._load_ui_methods(settings.get("ui_methods", {}))
if self.settings.get("static_path"):
path = self.settings["static_path"]
handlers = list(handlers or [])
static_url_prefix = settings.get("static_url_prefix",
"/static/")
static_handler_class = settings.get("static_handler_class",
StaticFileHandler)
static_handler_args = settings.get("static_handler_args", {})
static_handler_args['path'] = path
for pattern in [re.escape(static_url_prefix) + r"(.*)",
r"/(favicon\.ico)", r"/(robots\.txt)"]:
handlers.insert(0, (pattern, static_handler_class,
static_handler_args))
if handlers:
self.add_handlers(".*$", handlers)
if self.settings.get('debug'):
self.settings.setdefault('autoreload', True)
self.settings.setdefault('compiled_template_cache', False)
self.settings.setdefault('static_hash_cache', False)
self.settings.setdefault('serve_traceback', True)
# Automatically reload modified modules
if self.settings.get('autoreload') and not wsgi:
from tornado import autoreload
autoreload.start()
def listen(self, port, address="", **kwargs):
"""Starts an HTTP server for this application on the given port.
This is a convenience alias for creating an `.HTTPServer`
object and calling its listen method. Keyword arguments not
supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
`.HTTPServer` constructor. For advanced uses
(e.g. multi-process mode), do not use this method; create an
`.HTTPServer` and call its
`.TCPServer.bind`/`.TCPServer.start` methods directly.
Note that after calling this method you still need to call
``IOLoop.instance().start()`` to start the server.
"""
# import is here rather than top level because HTTPServer
# is not importable on appengine
from tornado.httpserver import HTTPServer
server = HTTPServer(self, **kwargs)
server.listen(port, address)
def add_handlers(self, host_pattern, host_handlers):
"""Appends the given handlers to our handler list.
Host patterns are processed sequentially in the order they were
added. All matching patterns will be considered.
"""
if not host_pattern.endswith("$"):
host_pattern += "$"
handlers = []
# The handlers with the wildcard host_pattern are a special
# case - they're added in the constructor but should have lower
# precedence than the more-precise handlers added later.
# If a wildcard handler group exists, it should always be last
# in the list, so insert new groups just before it.
if self.handlers and self.handlers[-1][0].pattern == '.*$':
self.handlers.insert(-1, (re.compile(host_pattern), handlers))
else:
self.handlers.append((re.compile(host_pattern), handlers))
for spec in host_handlers:
if isinstance(spec, (tuple, list)):
assert len(spec) in (2, 3, 4)
spec = URLSpec(*spec)
handlers.append(spec)
if spec.name:
if spec.name in self.named_handlers:
app_log.warning(
"Multiple handlers named %s; replacing previous value",
spec.name)
self.named_handlers[spec.name] = spec
def add_transform(self, transform_class):
self.transforms.append(transform_class)
def _get_host_handlers(self, request):
host = request.host.lower().split(':')[0]
matches = []
for pattern, handlers in self.handlers:
if pattern.match(host):
matches.extend(handlers)
# Look for default host if not behind load balancer (for debugging)
if not matches and "X-Real-Ip" not in request.headers:
for pattern, handlers in self.handlers:
if pattern.match(self.default_host):
matches.extend(handlers)
return matches or None
def _load_ui_methods(self, methods):
if isinstance(methods, types.ModuleType):
self._load_ui_methods(dict((n, getattr(methods, n))
for n in dir(methods)))
elif isinstance(methods, list):
for m in methods:
self._load_ui_methods(m)
else:
for name, fn in methods.items():
if not name.startswith("_") and hasattr(fn, "__call__") \
and name[0].lower() == name[0]:
self.ui_methods[name] = fn
def _load_ui_modules(self, modules):
if isinstance(modules, types.ModuleType):
self._load_ui_modules(dict((n, getattr(modules, n))
for n in dir(modules)))
elif isinstance(modules, list):
for m in modules:
self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.items():
try:
if issubclass(cls, UIModule):
self.ui_modules[name] = cls
except TypeError:
pass
def __call__(self, request):
"""Called by HTTPServer to execute the request."""
transforms = [t(request) for t in self.transforms]
handler = None
args = []
kwargs = {}
handlers = self._get_host_handlers(request)
if not handlers:
handler = RedirectHandler(
self, request, url="http://" + self.default_host + "/")
else:
for spec in handlers:
match = spec.regex.match(request.path)
if match:
handler = spec.handler_class(self, request, **spec.kwargs)
if spec.regex.groups:
# None-safe wrapper around url_unescape to handle
# unmatched optional groups correctly
def unquote(s):
if s is None:
return s
return escape.url_unescape(s, encoding=None,
plus=False)
# Pass matched groups to the handler. Since
# match.groups() includes both named and unnamed groups,
# we want to use either groups or groupdict but not both.
# Note that args are passed as bytes so the handler can
# decide what encoding to use.
if spec.regex.groupindex:
kwargs = dict(
(str(k), unquote(v))
for (k, v) in match.groupdict().items())
else:
args = [unquote(s) for s in match.groups()]
break
if not handler:
if self.settings.get('default_handler_class'):
handler_class = self.settings['default_handler_class']
handler_args = self.settings.get(
'default_handler_args', {})
else:
handler_class = ErrorHandler
handler_args = dict(status_code=404)
handler = handler_class(self, request, **handler_args)
# If template cache is disabled (usually in the debug mode),
# re-compile templates and reload static files on every
# request so you don't need to restart to see changes
if not self.settings.get("compiled_template_cache", True):
with RequestHandler._template_loader_lock:
for loader in RequestHandler._template_loaders.values():
loader.reset()
if not self.settings.get('static_hash_cache', True):
StaticFileHandler.reset()
handler._execute(transforms, *args, **kwargs)
return handler
def reverse_url(self, name, *args):
"""Returns a URL path for handler named ``name``
The handler must be added to the application as a named `URLSpec`.
Args will be substituted for capturing groups in the `URLSpec` regex.
They will be converted to strings if necessary, encoded as utf8,
and url-escaped.
"""
if name in self.named_handlers:
return self.named_handlers[name].reverse(*args)
raise KeyError("%s not found in named urls" % name)
def log_request(self, handler):
"""Writes a completed HTTP request to the logs.
By default writes to the python root logger. To change
this behavior either subclass Application and override this method,
or pass a function in the application settings dictionary as
``log_function``.
"""
if "log_function" in self.settings:
self.settings["log_function"](handler)
return
if handler.get_status() < 400:
log_method = access_log.info
elif handler.get_status() < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * handler.request.request_time()
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time)
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response.
Raising an `HTTPError` is a convenient alternative to calling
`RequestHandler.send_error` since it automatically ends the
current function.
:arg int status_code: HTTP status code. Must be listed in
`httplib.responses <http.client.responses>` unless the ``reason``
keyword argument is given.
:arg string log_message: Message to be written to the log for this error
(will not be shown to the user unless the `Application` is in debug
mode). May contain ``%s``-style placeholders, which will be filled
in with remaining positional parameters.
:arg string reason: Keyword-only argument. The HTTP "reason" phrase
to pass in the status line along with ``status_code``. Normally
determined automatically from ``status_code``, but can be used
to use a non-standard numeric code.
"""
def __init__(self, status_code, log_message=None, *args, **kwargs):
self.status_code = status_code
self.log_message = log_message
self.args = args
self.reason = kwargs.get('reason', None)
def __str__(self):
message = "HTTP %d: %s" % (
self.status_code,
self.reason or httputil.responses.get(self.status_code, 'Unknown'))
if self.log_message:
return message + " (" + (self.log_message % self.args) + ")"
else:
return message
class MissingArgumentError(HTTPError):
"""Exception raised by `RequestHandler.get_argument`.
This is a subclass of `HTTPError`, so if it is uncaught a 400 response
code will be used instead of 500 (and a stack trace will not be logged).
.. versionadded:: 3.1
"""
def __init__(self, arg_name):
super(MissingArgumentError, self).__init__(
400, 'Missing argument %s' % arg_name)
self.arg_name = arg_name
class ErrorHandler(RequestHandler):
"""Generates an error response with ``status_code`` for all requests."""
def initialize(self, status_code):
self.set_status(status_code)
def prepare(self):
raise HTTPError(self._status_code)
def check_xsrf_cookie(self):
# POSTs to an ErrorHandler don't actually have side effects,
# so we don't need to check the xsrf token. This allows POSTs
# to the wrong url to return a 404 instead of 403.
pass
class RedirectHandler(RequestHandler):
"""Redirects the client to the given URL for all GET requests.
You should provide the keyword argument ``url`` to the handler, e.g.::
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
"""
def initialize(self, url, permanent=True):
self._url = url
self._permanent = permanent
def get(self):
self.redirect(self._url, permanent=self._permanent)
class StaticFileHandler(RequestHandler):
"""A simple handler that can serve static content from a directory.
A `StaticFileHandler` is configured automatically if you pass the
``static_path`` keyword argument to `Application`. This handler
can be customized with the ``static_url_prefix``, ``static_handler_class``,
and ``static_handler_args`` settings.
To map an additional path to this handler for a static data directory
you would add a line to your application like::
application = web.Application([
(r"/content/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
The handler constructor requires a ``path`` argument, which specifies the
local root directory of the content to be served.
Note that a capture group in the regex is required to parse the value for
the ``path`` argument to the get() method (different than the constructor
argument above); see `URLSpec` for details.
To maximize the effectiveness of browser caching, this class supports
versioned urls (by default using the argument ``?v=``). If a version
is given, we instruct the browser to cache this file indefinitely.
`make_static_url` (also available as `RequestHandler.static_url`) can
be used to construct a versioned url.
This handler is intended primarily for use in development and light-duty
file serving; for heavy traffic it will be more efficient to use
a dedicated static file server (such as nginx or Apache). We support
the HTTP ``Accept-Ranges`` mechanism to return partial content (because
some browsers require this functionality to be present to seek in
HTML5 audio or video), but this handler should not be used with
files that are too large to fit comfortably in memory.
**Subclassing notes**
This class is designed to be extensible by subclassing, but because
of the way static urls are generated with class methods rather than
instance methods, the inheritance patterns are somewhat unusual.
Be sure to use the ``@classmethod`` decorator when overriding a
class method. Instance methods may use the attributes ``self.path``
``self.absolute_path``, and ``self.modified``.
Subclasses should only override methods discussed in this section;
overriding other methods is error-prone. Overriding
``StaticFileHandler.get`` is particularly problematic due to the
tight coupling with ``compute_etag`` and other methods.
To change the way static urls are generated (e.g. to match the behavior
of another server or CDN), override `make_static_url`, `parse_url_path`,
`get_cache_time`, and/or `get_version`.
To replace all interaction with the filesystem (e.g. to serve
static content from a database), override `get_content`,
`get_content_size`, `get_modified_time`, `get_absolute_path`, and
`validate_absolute_path`.
.. versionchanged:: 3.1
Many of the methods for subclasses were added in Tornado 3.1.
"""
CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years
_static_hashes = {}
_lock = threading.Lock() # protects _static_hashes
def initialize(self, path, default_filename=None):
self.root = path
self.default_filename = default_filename
@classmethod
def reset(cls):
with cls._lock:
cls._static_hashes = {}
def head(self, path):
self.get(path, include_body=False)
def get(self, path, include_body=True):
# Set up our path instance variables.
self.path = self.parse_url_path(path)
del path # make sure we don't refer to path instead of self.path again
absolute_path = self.get_absolute_path(self.root, self.path)
self.absolute_path = self.validate_absolute_path(
self.root, absolute_path)
if self.absolute_path is None:
return
self.modified = self.get_modified_time()
self.set_headers()
if self.should_return_304():
self.set_status(304)
return
request_range = None
range_header = self.request.headers.get("Range")
if range_header:
# As per RFC 2616 14.16, if an invalid Range header is specified,
# the request will be treated as if the header didn't exist.
request_range = httputil._parse_request_range(range_header)
if request_range:
start, end = request_range
size = self.get_content_size()
if (start is not None and start >= size) or end == 0:
# As per RFC 2616 14.35.1, a range is not satisfiable only: if
# the first requested byte is equal to or greater than the
# content, or when a suffix with length 0 is specified
self.set_status(416) # Range Not Satisfiable
self.set_header("Content-Type", "text/plain")
self.set_header("Content-Range", "bytes */%s" % (size, ))
return
if start is not None and start < 0:
start += size
if end is not None and end > size:
# Clients sometimes blindly use a large range to limit their
# download size; cap the endpoint at the actual file size.
end = size
# Note: only return HTTP 206 if less than the entire range has been
# requested. Not only is this semantically correct, but Chrome
# refuses to play audio if it gets an HTTP 206 in response to
# ``Range: bytes=0-``.
if size != (end or size) - (start or 0):
self.set_status(206) # Partial Content
self.set_header("Content-Range",
httputil._get_content_range(start, end, size))
else:
start = end = None
content = self.get_content(self.absolute_path, start, end)
if isinstance(content, bytes_type):
content = [content]
content_length = 0
for chunk in content:
if include_body:
self.write(chunk)
else:
content_length += len(chunk)
if not include_body:
assert self.request.method == "HEAD"
self.set_header("Content-Length", content_length)
def compute_etag(self):
"""Sets the ``Etag`` header based on static url version.
This allows efficient ``If-None-Match`` checks against cached
versions, and sends the correct ``Etag`` for a partial response
(i.e. the same ``Etag`` as the full file).
.. versionadded:: 3.1
"""
version_hash = self._get_cached_version(self.absolute_path)
if not version_hash:
return None
return '"%s"' % (version_hash, )
def set_headers(self):
"""Sets the content and caching headers on the response.
.. versionadded:: 3.1
"""
self.set_header("Accept-Ranges", "bytes")
self.set_etag_header()
if self.modified is not None:
self.set_header("Last-Modified", self.modified)
content_type = self.get_content_type()
if content_type:
self.set_header("Content-Type", content_type)
cache_time = self.get_cache_time(self.path, self.modified, content_type)
if cache_time > 0:
self.set_header("Expires", datetime.datetime.utcnow() +
datetime.timedelta(seconds=cache_time))
self.set_header("Cache-Control", "max-age=" + str(cache_time))
self.set_extra_headers(self.path)
def should_return_304(self):
"""Returns True if the headers indicate that we should return 304.
.. versionadded:: 3.1
"""
if self.check_etag_header():
return True
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if date_tuple is not None:
if_since = datetime.datetime(*date_tuple[:6])
if if_since >= self.modified:
return True
return False
@classmethod
def get_absolute_path(cls, root, path):
"""Returns the absolute location of ``path`` relative to ``root``.
``root`` is the path configured for this `StaticFileHandler`
(in most cases the ``static_path`` `Application` setting).
This class method may be overridden in subclasses. By default
it returns a filesystem path, but other strings may be used
as long as they are unique and understood by the subclass's
overridden `get_content`.
.. versionadded:: 3.1
"""
abspath = os.path.abspath(os.path.join(root, path))
return abspath
def validate_absolute_path(self, root, absolute_path):
"""Validate and return the absolute path.
``root`` is the configured path for the `StaticFileHandler`,
and ``path`` is the result of `get_absolute_path`
This is an instance method called during request processing,
so it may raise `HTTPError` or use methods like
`RequestHandler.redirect` (return None after redirecting to
halt further processing). This is where 404 errors for missing files
are generated.
This method may modify the path before returning it, but note that
any such modifications will not be understood by `make_static_url`.
In instance methods, this method's result is available as
``self.absolute_path``.
.. versionadded:: 3.1
"""
root = os.path.abspath(root)
# os.path.abspath strips a trailing /
# it needs to be temporarily added back for requests to root/
if not (absolute_path + os.path.sep).startswith(root):
raise HTTPError(403, "%s is not in root static directory",
self.path)
if (os.path.isdir(absolute_path) and
self.default_filename is not None):
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
self.redirect(self.request.path + "/", permanent=True)
return
absolute_path = os.path.join(absolute_path, self.default_filename)
if not os.path.exists(absolute_path):
raise HTTPError(404)
if not os.path.isfile(absolute_path):
raise HTTPError(403, "%s is not a file", self.path)
return absolute_path
@classmethod
def get_content(cls, abspath, start=None, end=None):
"""Retrieve the content of the requested resource which is located
at the given absolute path.
This class method may be overridden by subclasses. Note that its
signature is different from other overridable class methods
(no ``settings`` argument); this is deliberate to ensure that
``abspath`` is able to stand on its own as a cache key.
This method should either return a byte string or an iterator
of byte strings. The latter is preferred for large files
as it helps reduce memory fragmentation.
.. versionadded:: 3.1
"""
with open(abspath, "rb") as file:
if start is not None:
file.seek(start)
if end is not None:
remaining = end - (start or 0)
else:
remaining = None
while True:
chunk_size = 64 * 1024
if remaining is not None and remaining < chunk_size:
chunk_size = remaining
chunk = file.read(chunk_size)
if chunk:
if remaining is not None:
remaining -= len(chunk)
yield chunk
else:
if remaining is not None:
assert remaining == 0
return
@classmethod
def get_content_version(cls, abspath):
"""Returns a version string for the resource at the given path.
This class method may be overridden by subclasses. The
default implementation is a hash of the file's contents.
.. versionadded:: 3.1
"""
data = cls.get_content(abspath)
hasher = hashlib.md5()
if isinstance(data, bytes_type):
hasher.update(data)
else:
for chunk in data:
hasher.update(chunk)
return hasher.hexdigest()
def _stat(self):
if not hasattr(self, '_stat_result'):
self._stat_result = os.stat(self.absolute_path)
return self._stat_result
def get_content_size(self):
"""Retrieve the total size of the resource at the given path.
This method may be overridden by subclasses. It will only
be called if a partial result is requested from `get_content`
.. versionadded:: 3.1
"""
stat_result = self._stat()
return stat_result[stat.ST_SIZE]
def get_modified_time(self):
"""Returns the time that ``self.absolute_path`` was last modified.
May be overridden in subclasses. Should return a `~datetime.datetime`
object or None.
.. versionadded:: 3.1
"""
stat_result = self._stat()
modified = datetime.datetime.utcfromtimestamp(stat_result[stat.ST_MTIME])
return modified
def get_content_type(self):
"""Returns the ``Content-Type`` header to be used for this request.
.. versionadded:: 3.1
"""
mime_type, encoding = mimetypes.guess_type(self.absolute_path)
return mime_type
def set_extra_headers(self, path):
"""For subclass to add extra headers to the response"""
pass
def get_cache_time(self, path, modified, mime_type):
"""Override to customize cache control behavior.
Return a positive number of seconds to make the result
cacheable for that amount of time or 0 to mark resource as
cacheable for an unspecified amount of time (subject to
browser heuristics).
By default returns cache expiry of 10 years for resources requested
with ``v`` argument.
"""
return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
@classmethod
def make_static_url(cls, settings, path, include_version=True):
"""Constructs a versioned url for the given path.
This method may be overridden in subclasses (but note that it
is a class method rather than an instance method). Subclasses
are only required to implement the signature
``make_static_url(cls, settings, path)``; other keyword
arguments may be passed through `~RequestHandler.static_url`
but are not standard.
``settings`` is the `Application.settings` dictionary. ``path``
is the static path being requested. The url returned should be
relative to the current host.
``include_version`` determines whether the generated URL should
include the query string containing the version hash of the
file corresponding to the given ``path``.
"""
url = settings.get('static_url_prefix', '/static/') + path
if not include_version:
return url
version_hash = cls.get_version(settings, path)
if not version_hash:
return url
return '%s?v=%s' % (url, version_hash)
def parse_url_path(self, url_path):
"""Converts a static URL path into a filesystem path.
``url_path`` is the path component of the URL with
``static_url_prefix`` removed. The return value should be
filesystem path relative to ``static_path``.
This is the inverse of `make_static_url`.
"""
if os.path.sep != "/":
url_path = url_path.replace("/", os.path.sep)
return url_path
@classmethod
def get_version(cls, settings, path):
"""Generate the version string to be used in static URLs.
``settings`` is the `Application.settings` dictionary and ``path``
is the relative location of the requested asset on the filesystem.
The returned value should be a string, or ``None`` if no version
could be determined.
.. versionchanged:: 3.1
This method was previously recommended for subclasses to override;
`get_content_version` is now preferred as it allows the base
class to handle caching of the result.
"""
abs_path = cls.get_absolute_path(settings['static_path'], path)
return cls._get_cached_version(abs_path)
@classmethod
def _get_cached_version(cls, abs_path):
with cls._lock:
hashes = cls._static_hashes
if abs_path not in hashes:
try:
hashes[abs_path] = cls.get_content_version(abs_path)
except Exception:
gen_log.error("Could not open static file %r", abs_path)
hashes[abs_path] = None
hsh = hashes.get(abs_path)
if hsh:
return hsh
return None
class FallbackHandler(RequestHandler):
"""A `RequestHandler` that wraps another HTTP server callback.
The fallback is a callable object that accepts an
`~.httpserver.HTTPRequest`, such as an `Application` or
`tornado.wsgi.WSGIContainer`. This is most useful to use both
Tornado ``RequestHandlers`` and WSGI in the same server. Typical
usage::
wsgi_app = tornado.wsgi.WSGIContainer(
django.core.handlers.wsgi.WSGIHandler())
application = tornado.web.Application([
(r"/foo", FooHandler),
(r".*", FallbackHandler, dict(fallback=wsgi_app),
])
"""
def initialize(self, fallback):
self.fallback = fallback
def prepare(self):
self.fallback(self.request)
self._finished = True
class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
A new transform instance is created for every request. See the
ChunkedTransferEncoding example below if you want to implement a
new Transform.
"""
def __init__(self, request):
pass
def transform_first_chunk(self, status_code, headers, chunk, finishing):
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
return chunk
class GZipContentEncoding(OutputTransform):
"""Applies the gzip content encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
"""
CONTENT_TYPES = set([
"text/plain", "text/html", "text/css", "text/xml", "application/javascript",
"application/x-javascript", "application/xml", "application/atom+xml",
"text/javascript", "application/json", "application/xhtml+xml"])
MIN_LENGTH = 5
def __init__(self, request):
self._gzipping = request.supports_http_1_1() and \
"gzip" in request.headers.get("Accept-Encoding", "")
def transform_first_chunk(self, status_code, headers, chunk, finishing):
if 'Vary' in headers:
headers['Vary'] += b', Accept-Encoding'
else:
headers['Vary'] = b'Accept-Encoding'
if self._gzipping:
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = (ctype in self.CONTENT_TYPES) and \
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
(finishing or "Content-Length" not in headers) and \
("Content-Encoding" not in headers)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
self._gzip_value = BytesIO()
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
chunk = self.transform_chunk(chunk, finishing)
if "Content-Length" in headers:
headers["Content-Length"] = str(len(chunk))
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
if self._gzipping:
self._gzip_file.write(chunk)
if finishing:
self._gzip_file.close()
else:
self._gzip_file.flush()
chunk = self._gzip_value.getvalue()
self._gzip_value.truncate(0)
self._gzip_value.seek(0)
return chunk
class ChunkedTransferEncoding(OutputTransform):
"""Applies the chunked transfer encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1
"""
def __init__(self, request):
self._chunking = request.supports_http_1_1()
def transform_first_chunk(self, status_code, headers, chunk, finishing):
# 304 responses have no body (not even a zero-length body), and so
# should not have either Content-Length or Transfer-Encoding headers.
if self._chunking and status_code != 304:
# No need to chunk the output if a Content-Length is specified
if "Content-Length" in headers or "Transfer-Encoding" in headers:
self._chunking = False
else:
headers["Transfer-Encoding"] = "chunked"
chunk = self.transform_chunk(chunk, finishing)
return status_code, headers, chunk
def transform_chunk(self, block, finishing):
if self._chunking:
# Don't write out empty chunks because that means END-OF-STREAM
# with chunked encoding
if block:
block = utf8("%x" % len(block)) + b"\r\n" + block + b"\r\n"
if finishing:
block += b"0\r\n\r\n"
return block
def authenticated(method):
"""Decorate methods with this to require that the user be logged in.
If the user is not logged in, they will be redirected to the configured
`login url <RequestHandler.get_login_url>`.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user:
if self.request.method in ("GET", "HEAD"):
url = self.get_login_url()
if "?" not in url:
if urlparse.urlsplit(url).scheme:
# if login url is absolute, make next absolute too
next_url = self.request.full_url()
else:
next_url = self.request.uri
url += "?" + urlencode(dict(next=next_url))
self.redirect(url)
return
raise HTTPError(403)
return method(self, *args, **kwargs)
return wrapper
class UIModule(object):
"""A re-usable, modular UI unit on a page.
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.ui = handler.ui
self.locale = handler.locale
@property
def current_user(self):
return self.handler.current_user
def render(self, *args, **kwargs):
"""Overridden in subclasses to return this module's output."""
raise NotImplementedError()
def embedded_javascript(self):
"""Returns a JavaScript string that will be embedded in the page."""
return None
def javascript_files(self):
"""Returns a list of JavaScript files required by this module."""
return None
def embedded_css(self):
"""Returns a CSS string that will be embedded in the page."""
return None
def css_files(self):
"""Returns a list of CSS files required by this module."""
return None
def html_head(self):
"""Returns a CSS string that will be put in the <head/> element"""
return None
def html_body(self):
"""Returns an HTML string that will be put in the <body/> element"""
return None
def render_string(self, path, **kwargs):
"""Renders a template and returns it as a string."""
return self.handler.render_string(path, **kwargs)
class _linkify(UIModule):
def render(self, text, **kwargs):
return escape.linkify(text, **kwargs)
class _xsrf_form_html(UIModule):
def render(self):
return self.handler.xsrf_form_html()
class TemplateModule(UIModule):
"""UIModule that simply renders the given template.
{% module Template("foo.html") %} is similar to {% include "foo.html" %},
but the module version gets its own namespace (with kwargs passed to
Template()) instead of inheriting the outer template's namespace.
Templates rendered through this module also get access to UIModule's
automatic javascript/css features. Simply call set_resources
inside the template and give it keyword arguments corresponding to
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
Note that these resources are output once per template file, not once
per instantiation of the template, so they must not depend on
any arguments to the template.
"""
def __init__(self, handler):
super(TemplateModule, self).__init__(handler)
# keep resources in both a list and a dict to preserve order
self._resource_list = []
self._resource_dict = {}
def render(self, path, **kwargs):
def set_resources(**kwargs):
if path not in self._resource_dict:
self._resource_list.append(kwargs)
self._resource_dict[path] = kwargs
else:
if self._resource_dict[path] != kwargs:
raise ValueError("set_resources called with different "
"resources for the same template")
return ""
return self.render_string(path, set_resources=set_resources,
**kwargs)
def _get_resources(self, key):
return (r[key] for r in self._resource_list if key in r)
def embedded_javascript(self):
return "\n".join(self._get_resources("embedded_javascript"))
def javascript_files(self):
result = []
for f in self._get_resources("javascript_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def embedded_css(self):
return "\n".join(self._get_resources("embedded_css"))
def css_files(self):
result = []
for f in self._get_resources("css_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def html_head(self):
return "".join(self._get_resources("html_head"))
def html_body(self):
return "".join(self._get_resources("html_body"))
class _UIModuleNamespace(object):
"""Lazy namespace which creates UIModule proxies bound to a handler."""
def __init__(self, handler, ui_modules):
self.handler = handler
self.ui_modules = ui_modules
def __getitem__(self, key):
return self.handler._ui_module(key, self.ui_modules[key])
def __getattr__(self, key):
try:
return self[key]
except KeyError as e:
raise AttributeError(str(e))
class URLSpec(object):
"""Specifies mappings between URLs and handlers."""
def __init__(self, pattern, handler, kwargs=None, name=None):
"""Parameters:
* ``pattern``: Regular expression to be matched. Any groups
in the regex will be passed in to the handler's get/post/etc
methods as arguments.
* ``handler_class``: `RequestHandler` subclass to be invoked.
* ``kwargs`` (optional): A dictionary of additional arguments
to be passed to the handler's constructor.
* ``name`` (optional): A name for this handler. Used by
`Application.reverse_url`.
"""
if not pattern.endswith('$'):
pattern += '$'
self.regex = re.compile(pattern)
assert len(self.regex.groupindex) in (0, self.regex.groups), \
("groups in url regexes must either be all named or all "
"positional: %r" % self.regex.pattern)
if isinstance(handler, str):
# import the Module and instantiate the class
# Must be a fully qualified name (module.ClassName)
handler = import_object(handler)
self.handler_class = handler
self.kwargs = kwargs or {}
self.name = name
self._path, self._group_count = self._find_groups()
def __repr__(self):
return '%s(%r, %s, kwargs=%r, name=%r)' % \
(self.__class__.__name__, self.regex.pattern,
self.handler_class, self.kwargs, self.name)
def _find_groups(self):
"""Returns a tuple (reverse string, group count) for a url.
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
would return ('/%s/%s/', 2).
"""
pattern = self.regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
if pattern.endswith('$'):
pattern = pattern[:-1]
if self.regex.groups != pattern.count('('):
# The pattern is too complicated for our simplistic matching,
# so we can't support reversing it.
return (None, None)
pieces = []
for fragment in pattern.split('('):
if ')' in fragment:
paren_loc = fragment.index(')')
if paren_loc >= 0:
pieces.append('%s' + fragment[paren_loc + 1:])
else:
pieces.append(fragment)
return (''.join(pieces), self.regex.groups)
def reverse(self, *args):
assert self._path is not None, \
"Cannot reverse url regex " + self.regex.pattern
assert len(args) == self._group_count, "required number of arguments "\
"not found"
if not len(args):
return self._path
converted_args = []
for a in args:
if not isinstance(a, (unicode_type, bytes_type)):
a = str(a)
converted_args.append(escape.url_escape(utf8(a), plus=False))
return self._path % tuple(converted_args)
url = URLSpec
if hasattr(hmac, 'compare_digest'): # python 3.3
_time_independent_equals = hmac.compare_digest
else:
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
if isinstance(a[0], int): # python3 byte strings
for x, y in zip(a, b):
result |= x ^ y
else: # python2
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def create_signed_value(secret, name, value, version=None, clock=None):
if version is None:
version = DEFAULT_SIGNED_VALUE_VERSION
if clock is None:
clock = time.time
timestamp = utf8(str(int(clock())))
value = base64.b64encode(utf8(value))
if version == 1:
signature = _create_signature_v1(secret, name, value, timestamp)
value = b"|".join([value, timestamp, signature])
return value
elif version == 2:
# The v2 format consists of a version number and a series of
# length-prefixed fields "%d:%s", the last of which is a
# signature, all separated by pipes. All numbers are in
# decimal format with no leading zeros. The signature is an
# HMAC-SHA256 of the whole string up to that point, including
# the final pipe.
#
# The fields are:
# - format version (i.e. 2; no length prefix)
# - key version (currently 0; reserved for future key rotation features)
# - timestamp (integer seconds since epoch)
# - name (not encoded; assumed to be ~alphanumeric)
# - value (base64-encoded)
# - signature (hex-encoded; no length prefix)
def format_field(s):
return utf8("%d:" % len(s)) + utf8(s)
to_sign = b"|".join([
b"2|1:0",
format_field(timestamp),
format_field(name),
format_field(value),
b''])
signature = _create_signature_v2(secret, to_sign)
return to_sign + signature
else:
raise ValueError("Unsupported version %d" % version)
# A leading version number in decimal with no leading zeros, followed by a pipe.
_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
def decode_signed_value(secret, name, value, max_age_days=31, clock=None,min_version=None):
if clock is None:
clock = time.time
if min_version is None:
min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION
if min_version > 2:
raise ValueError("Unsupported min_version %d" % min_version)
if not value:
return None
# Figure out what version this is. Version 1 did not include an
# explicit version field and started with arbitrary base64 data,
# which makes this tricky.
value = utf8(value)
m = _signed_value_version_re.match(value)
if m is None:
version = 1
else:
try:
version = int(m.group(1))
if version > 999:
# Certain payloads from the version-less v1 format may
# be parsed as valid integers. Due to base64 padding
# restrictions, this can only happen for numbers whose
# length is a multiple of 4, so we can treat all
# numbers up to 999 as versions, and for the rest we
# fall back to v1 format.
version = 1
except ValueError:
version = 1
if version < min_version:
return None
if version == 1:
return _decode_signed_value_v1(secret, name, value, max_age_days, clock)
elif version == 2:
return _decode_signed_value_v2(secret, name, value, max_age_days, clock)
else:
return None
def _decode_signed_value_v1(secret, name, value, max_age_days, clock):
parts = utf8(value).split(b"|")
if len(parts) != 3:
return None
signature = _create_signature_v1(secret, name, parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
gen_log.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < clock() - max_age_days * 86400:
gen_log.warning("Expired cookie %r", value)
return None
if timestamp > clock() + 31 * 86400:
# _cookie_signature does not hash a delimiter between the
# parts of the cookie, so an attacker could transfer trailing
# digits from the payload to the timestamp without altering the
# signature. For backwards compatibility, sanity-check timestamp
# here instead of modifying _cookie_signature.
gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
return None
if parts[1].startswith(b"0"):
gen_log.warning("Tampered cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
except Exception:
return None
def _decode_signed_value_v2(secret, name, value, max_age_days, clock):
def _consume_field(s):
length, _, rest = s.partition(b':')
n = int(length)
field_value = rest[:n]
# In python 3, indexing bytes returns small integers; we must
# use a slice to get a byte string as in python 2.
if rest[n:n+1] != b'|':
raise ValueError("malformed v2 signed value field")
rest = rest[n+1:]
return field_value, rest
rest = value[2:] # remove version number
try:
key_version, rest = _consume_field(rest)
timestamp, rest = _consume_field(rest)
name_field, rest = _consume_field(rest)
value_field, rest = _consume_field(rest)
except ValueError:
return None
passed_sig = rest
signed_string = value[:-len(passed_sig)]
expected_sig = _create_signature_v2(secret, signed_string)
if not _time_independent_equals(passed_sig, expected_sig):
return None
if name_field != utf8(name):
return None
timestamp = int(timestamp)
if timestamp < clock() - max_age_days * 86400:
# The signature has expired.
return None
try:
return base64.b64decode(value_field)
except Exception:
return None
def _create_signature_v1(secret, *parts):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
for part in parts:
hash.update(utf8(part))
return utf8(hash.hexdigest())
def _create_signature_v2(secret, s):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
hash.update(utf8(s))
return utf8(hash.hexdigest())
| ./CrossVul/dataset_final_sorted/CWE-203/py/good_2410_3 |
crossvul-python_data_bad_2410_2 | """Miscellaneous utility functions and classes.
This module is used internally by Tornado. It is not necessarily expected
that the functions and classes defined here will be useful to other
applications, but they are documented here in case they are.
The one public-facing part of this module is the `Configurable` class
and its `~Configurable.configure` method, which becomes a part of the
interface of its subclasses, including `.AsyncHTTPClient`, `.IOLoop`,
and `.Resolver`.
"""
from __future__ import absolute_import, division, print_function, with_statement
import inspect
import sys
import zlib
class ObjectDict(dict):
"""Makes a dictionary behave like an object, with attribute-style access.
"""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
class GzipDecompressor(object):
"""Streaming gzip decompressor.
The interface is like that of `zlib.decompressobj` (without the
optional arguments, but it understands gzip headers and checksums.
"""
def __init__(self):
# Magic parameter makes zlib module understand gzip header
# http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib
# This works on cpython and pypy, but not jython.
self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS)
def decompress(self, value):
"""Decompress a chunk, returning newly-available data.
Some data may be buffered for later processing; `flush` must
be called when there is no more input data to ensure that
all data was processed.
"""
return self.decompressobj.decompress(value)
def flush(self):
"""Return any remaining buffered data not yet returned by decompress.
Also checks for errors such as truncated input.
No other methods may be called on this object after `flush`.
"""
return self.decompressobj.flush()
def import_object(name):
"""Imports an object by name.
import_object('x') is equivalent to 'import x'.
import_object('x.y.z') is equivalent to 'from x.y import z'.
>>> import tornado.escape
>>> import_object('tornado.escape') is tornado.escape
True
>>> import_object('tornado.escape.utf8') is tornado.escape.utf8
True
>>> import_object('tornado') is tornado
True
>>> import_object('tornado.missing_module')
Traceback (most recent call last):
...
ImportError: No module named missing_module
"""
if name.count('.') == 0:
return __import__(name, None, None)
parts = name.split('.')
obj = __import__('.'.join(parts[:-1]), None, None, [parts[-1]], 0)
try:
return getattr(obj, parts[-1])
except AttributeError:
raise ImportError("No module named %s" % parts[-1])
# Fake unicode literal support: Python 3.2 doesn't have the u'' marker for
# literal strings, and alternative solutions like "from __future__ import
# unicode_literals" have other problems (see PEP 414). u() can be applied
# to ascii strings that include \u escapes (but they must not contain
# literal non-ascii characters).
if type('') is not type(b''):
def u(s):
return s
bytes_type = bytes
unicode_type = str
basestring_type = str
else:
def u(s):
return s.decode('unicode_escape')
bytes_type = str
unicode_type = unicode
basestring_type = basestring
if sys.version_info > (3,):
exec("""
def raise_exc_info(exc_info):
raise exc_info[1].with_traceback(exc_info[2])
def exec_in(code, glob, loc=None):
if isinstance(code, str):
code = compile(code, '<string>', 'exec', dont_inherit=True)
exec(code, glob, loc)
""")
else:
exec("""
def raise_exc_info(exc_info):
raise exc_info[0], exc_info[1], exc_info[2]
def exec_in(code, glob, loc=None):
if isinstance(code, basestring):
# exec(string) inherits the caller's future imports; compile
# the string first to prevent that.
code = compile(code, '<string>', 'exec', dont_inherit=True)
exec code in glob, loc
""")
class Configurable(object):
"""Base class for configurable interfaces.
A configurable interface is an (abstract) class whose constructor
acts as a factory function for one of its implementation subclasses.
The implementation subclass as well as optional keyword arguments to
its initializer can be set globally at runtime with `configure`.
By using the constructor as the factory method, the interface
looks like a normal class, `isinstance` works as usual, etc. This
pattern is most useful when the choice of implementation is likely
to be a global decision (e.g. when `~select.epoll` is available,
always use it instead of `~select.select`), or when a
previously-monolithic class has been split into specialized
subclasses.
Configurable subclasses must define the class methods
`configurable_base` and `configurable_default`, and use the instance
method `initialize` instead of ``__init__``.
"""
__impl_class = None
__impl_kwargs = None
def __new__(cls, **kwargs):
base = cls.configurable_base()
args = {}
if cls is base:
impl = cls.configured_class()
if base.__impl_kwargs:
args.update(base.__impl_kwargs)
else:
impl = cls
args.update(kwargs)
instance = super(Configurable, cls).__new__(impl)
# initialize vs __init__ chosen for compatiblity with AsyncHTTPClient
# singleton magic. If we get rid of that we can switch to __init__
# here too.
instance.initialize(**args)
return instance
@classmethod
def configurable_base(cls):
"""Returns the base class of a configurable hierarchy.
This will normally return the class in which it is defined.
(which is *not* necessarily the same as the cls classmethod parameter).
"""
raise NotImplementedError()
@classmethod
def configurable_default(cls):
"""Returns the implementation class to be used if none is configured."""
raise NotImplementedError()
def initialize(self):
"""Initialize a `Configurable` subclass instance.
Configurable classes should use `initialize` instead of ``__init__``.
"""
@classmethod
def configure(cls, impl, **kwargs):
"""Sets the class to use when the base class is instantiated.
Keyword arguments will be saved and added to the arguments passed
to the constructor. This can be used to set global defaults for
some parameters.
"""
base = cls.configurable_base()
if isinstance(impl, (unicode_type, bytes_type)):
impl = import_object(impl)
if impl is not None and not issubclass(impl, cls):
raise ValueError("Invalid subclass of %s" % cls)
base.__impl_class = impl
base.__impl_kwargs = kwargs
@classmethod
def configured_class(cls):
"""Returns the currently configured class."""
base = cls.configurable_base()
if cls.__impl_class is None:
base.__impl_class = cls.configurable_default()
return base.__impl_class
@classmethod
def _save_configuration(cls):
base = cls.configurable_base()
return (base.__impl_class, base.__impl_kwargs)
@classmethod
def _restore_configuration(cls, saved):
base = cls.configurable_base()
base.__impl_class = saved[0]
base.__impl_kwargs = saved[1]
class ArgReplacer(object):
"""Replaces one value in an ``args, kwargs`` pair.
Inspects the function signature to find an argument by name
whether it is passed by position or keyword. For use in decorators
and similar wrappers.
"""
def __init__(self, func, name):
self.name = name
try:
self.arg_pos = inspect.getargspec(func).args.index(self.name)
except ValueError:
# Not a positional parameter
self.arg_pos = None
def replace(self, new_value, args, kwargs):
"""Replace the named argument in ``args, kwargs`` with ``new_value``.
Returns ``(old_value, args, kwargs)``. The returned ``args`` and
``kwargs`` objects may not be the same as the input objects, or
the input objects may be mutated.
If the named argument was not found, ``new_value`` will be added
to ``kwargs`` and None will be returned as ``old_value``.
"""
if self.arg_pos is not None and len(args) > self.arg_pos:
# The arg to replace is passed positionally
old_value = args[self.arg_pos]
args = list(args) # *args is normally a tuple
args[self.arg_pos] = new_value
else:
# The arg to replace is either omitted or passed by keyword.
old_value = kwargs.get(self.name)
kwargs[self.name] = new_value
return old_value, args, kwargs
def doctests():
import doctest
return doctest.DocTestSuite()
| ./CrossVul/dataset_final_sorted/CWE-203/py/bad_2410_2 |
crossvul-python_data_bad_2410_3 | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""``tornado.web`` provides a simple web framework with asynchronous
features that allow it to scale to large numbers of open connections,
making it ideal for `long polling
<http://en.wikipedia.org/wiki/Push_technology#Long_polling>`_.
Here is a simple "Hello, world" example app::
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
See the :doc:`Tornado overview <overview>` for more details and a good getting
started guide.
Thread-safety notes
-------------------
In general, methods on `RequestHandler` and elsewhere in Tornado are
not thread-safe. In particular, methods such as
`~RequestHandler.write()`, `~RequestHandler.finish()`, and
`~RequestHandler.flush()` must only be called from the main thread. If
you use multiple threads it is important to use `.IOLoop.add_callback`
to transfer control back to the main thread before finishing the
request.
"""
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
import datetime
import email.utils
import functools
import gzip
import hashlib
import hmac
import mimetypes
import numbers
import os.path
import re
import stat
import sys
import threading
import time
import tornado
import traceback
import types
import uuid
from tornado.concurrent import Future
from tornado import escape
from tornado import httputil
from tornado import locale
from tornado.log import access_log, app_log, gen_log
from tornado import stack_context
from tornado import template
from tornado.escape import utf8, _unicode
from tornado.util import bytes_type, import_object, ObjectDict, raise_exc_info, unicode_type
try:
from io import BytesIO # python 3
except ImportError:
from cStringIO import StringIO as BytesIO # python 2
try:
import Cookie # py2
except ImportError:
import http.cookies as Cookie # py3
try:
import urlparse # py2
except ImportError:
import urllib.parse as urlparse # py3
try:
from urllib import urlencode # py2
except ImportError:
from urllib.parse import urlencode # py3
MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1
"""The oldest signed value version supported by this version of Tornado.
Signed values older than this version cannot be decoded.
.. versionadded:: 3.2.1
"""
MAX_SUPPORTED_SIGNED_VALUE_VERSION = 2
"""The newest signed value version supported by this version of Tornado.
Signed values newer than this version cannot be decoded.
.. versionadded:: 3.2.1
"""
DEFAULT_SIGNED_VALUE_VERSION = 2
"""The signed value version produced by `.RequestHandler.create_signed_value`.
May be overridden by passing a ``version`` keyword argument.
.. versionadded:: 3.2.1
"""
DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`.
May be overrided by passing a ``min_version`` keyword argument.
.. versionadded:: 3.2.1
"""
class RequestHandler(object):
"""Subclass this class and define `get()` or `post()` to make a handler.
If you want to support more methods than the standard GET/HEAD/POST, you
should override the class variable ``SUPPORTED_METHODS`` in your
`RequestHandler` subclass.
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
"OPTIONS")
_template_loaders = {} # {path: template.BaseLoader}
_template_loader_lock = threading.Lock()
_remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
def __init__(self, application, request, **kwargs):
super(RequestHandler, self).__init__()
self.application = application
self.request = request
self._headers_written = False
self._finished = False
self._auto_finish = True
self._transforms = None # will be set in _execute
self.path_args = None
self.path_kwargs = None
self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
application.ui_methods.items())
# UIModules are available as both `modules` and `_tt_modules` in the
# template namespace. Historically only `modules` was available
# but could be clobbered by user additions to the namespace.
# The template {% module %} directive looks in `_tt_modules` to avoid
# possible conflicts.
self.ui["_tt_modules"] = _UIModuleNamespace(self,
application.ui_modules)
self.ui["modules"] = self.ui["_tt_modules"]
self.clear()
# Check since connection is not available in WSGI
if getattr(self.request, "connection", None):
self.request.connection.set_close_callback(
self.on_connection_close)
self.initialize(**kwargs)
def initialize(self):
"""Hook for subclass initialization.
A dictionary passed as the third argument of a url spec will be
supplied as keyword arguments to initialize().
Example::
class ProfileHandler(RequestHandler):
def initialize(self, database):
self.database = database
def get(self, username):
...
app = Application([
(r'/user/(.*)', ProfileHandler, dict(database=database)),
])
"""
pass
@property
def settings(self):
"""An alias for `self.application.settings <Application.settings>`."""
return self.application.settings
def head(self, *args, **kwargs):
raise HTTPError(405)
def get(self, *args, **kwargs):
raise HTTPError(405)
def post(self, *args, **kwargs):
raise HTTPError(405)
def delete(self, *args, **kwargs):
raise HTTPError(405)
def patch(self, *args, **kwargs):
raise HTTPError(405)
def put(self, *args, **kwargs):
raise HTTPError(405)
def options(self, *args, **kwargs):
raise HTTPError(405)
def prepare(self):
"""Called at the beginning of a request before `get`/`post`/etc.
Override this method to perform common initialization regardless
of the request method.
Asynchronous support: Decorate this method with `.gen.coroutine`
or `.return_future` to make it asynchronous (the
`asynchronous` decorator cannot be used on `prepare`).
If this method returns a `.Future` execution will not proceed
until the `.Future` is done.
.. versionadded:: 3.1
Asynchronous support.
"""
pass
def on_finish(self):
"""Called after the end of a request.
Override this method to perform cleanup, logging, etc.
This method is a counterpart to `prepare`. ``on_finish`` may
not produce any output, as it is called after the response
has been sent to the client.
"""
pass
def on_connection_close(self):
"""Called in async handlers if the client closed the connection.
Override this to clean up resources associated with
long-lived connections. Note that this method is called only if
the connection was closed during asynchronous processing; if you
need to do cleanup after every request override `on_finish`
instead.
Proxies may keep a connection open for a time (perhaps
indefinitely) after the client has gone away, so this method
may not be called promptly after the end user closes their
connection.
"""
pass
def clear(self):
"""Resets all headers and content for this response."""
self._headers = httputil.HTTPHeaders({
"Server": "TornadoServer/%s" % tornado.version,
"Content-Type": "text/html; charset=UTF-8",
"Date": httputil.format_timestamp(time.time()),
})
self.set_default_headers()
if (not self.request.supports_http_1_1() and
getattr(self.request, 'connection', None) and
not self.request.connection.no_keep_alive):
conn_header = self.request.headers.get("Connection")
if conn_header and (conn_header.lower() == "keep-alive"):
self._headers["Connection"] = "Keep-Alive"
self._write_buffer = []
self._status_code = 200
self._reason = httputil.responses[200]
def set_default_headers(self):
"""Override this to set HTTP headers at the beginning of the request.
For example, this is the place to set a custom ``Server`` header.
Note that setting such headers in the normal flow of request
processing may not do what you want, since headers may be reset
during error handling.
"""
pass
def set_status(self, status_code, reason=None):
"""Sets the status code for our response.
:arg int status_code: Response status code. If ``reason`` is ``None``,
it must be present in `httplib.responses <http.client.responses>`.
:arg string reason: Human-readable reason phrase describing the status
code. If ``None``, it will be filled in from
`httplib.responses <http.client.responses>`.
"""
self._status_code = status_code
if reason is not None:
self._reason = escape.native_str(reason)
else:
try:
self._reason = httputil.responses[status_code]
except KeyError:
raise ValueError("unknown status code %d", status_code)
def get_status(self):
"""Returns the status code for our response."""
return self._status_code
def set_header(self, name, value):
"""Sets the given response header name and value.
If a datetime is given, we automatically format it according to the
HTTP specification. If the value is not a string, we convert it to
a string. All header values are then encoded as UTF-8.
"""
self._headers[name] = self._convert_header_value(value)
def add_header(self, name, value):
"""Adds the given response header and value.
Unlike `set_header`, `add_header` may be called multiple times
to return multiple values for the same header.
"""
self._headers.add(name, self._convert_header_value(value))
def clear_header(self, name):
"""Clears an outgoing header, undoing a previous `set_header` call.
Note that this method does not apply to multi-valued headers
set by `add_header`.
"""
if name in self._headers:
del self._headers[name]
_INVALID_HEADER_CHAR_RE = re.compile(br"[\x00-\x1f]")
def _convert_header_value(self, value):
if isinstance(value, bytes_type):
pass
elif isinstance(value, unicode_type):
value = value.encode('utf-8')
elif isinstance(value, numbers.Integral):
# return immediately since we know the converted value will be safe
return str(value)
elif isinstance(value, datetime.datetime):
return httputil.format_timestamp(value)
else:
raise TypeError("Unsupported header value %r" % value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request. Also cap length to
# prevent obviously erroneous values.
if (len(value) > 4000 or
RequestHandler._INVALID_HEADER_CHAR_RE.search(value)):
raise ValueError("Unsafe header value %r", value)
return value
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
return self._get_argument(name, default, self.request.arguments, strip)
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
return self._get_arguments(name, self.request.arguments, strip)
def get_body_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name
from the request body.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
.. versionadded:: 3.2
"""
return self._get_argument(name, default, self.request.body_arguments, strip)
def get_body_arguments(self, name, strip=True):
"""Returns a list of the body arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
.. versionadded:: 3.2
"""
return self._get_arguments(name, self.request.body_arguments, strip)
def get_query_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name
from the request query string.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
.. versionadded:: 3.2
"""
return self._get_argument(name, default, self.request.query_arguments, strip)
def get_query_arguments(self, name, strip=True):
"""Returns a list of the query arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
.. versionadded:: 3.2
"""
return self._get_arguments(name, self.request.query_arguments, strip)
def _get_argument(self, name, default, source, strip=True):
args = self._get_arguments(name, source, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise MissingArgumentError(name)
return default
return args[-1]
def _get_arguments(self, name, source, strip=True):
values = []
for v in source.get(name, []):
v = self.decode_argument(v, name=name)
if isinstance(v, unicode_type):
# Get rid of any weird control chars (unless decoding gave
# us bytes, in which case leave it alone)
v = RequestHandler._remove_control_chars_regex.sub(" ", v)
if strip:
v = v.strip()
values.append(v)
return values
def decode_argument(self, value, name=None):
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
By default, this method decodes the argument as utf-8 and returns
a unicode string, but this may be overridden in subclasses.
This method is used as a filter for both `get_argument()` and for
values extracted from the url and passed to `get()`/`post()`/etc.
The name of the argument is provided if known, but may be None
(e.g. for unnamed groups in the url regex).
"""
try:
return _unicode(value)
except UnicodeDecodeError:
raise HTTPError(400, "Invalid unicode in %s: %r" %
(name or "url", value[:40]))
@property
def cookies(self):
"""An alias for `self.request.cookies <.httpserver.HTTPRequest.cookies>`."""
return self.request.cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if self.request.cookies is not None and name in self.request.cookies:
return self.request.cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
# The cookie library only accepts type str, in both python 2 and 3
name = escape.native_str(name)
value = escape.native_str(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookie"):
self._new_cookie = Cookie.SimpleCookie()
if name in self._new_cookie:
del self._new_cookie[name]
self._new_cookie[name] = value
morsel = self._new_cookie[name]
if domain:
morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
morsel["expires"] = httputil.format_timestamp(expires)
if path:
morsel["path"] = path
for k, v in kwargs.items():
if k == 'max_age':
k = 'max-age'
morsel[k] = v
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name.
Due to limitations of the cookie protocol, you must pass the same
path and domain to clear a cookie as were used when that cookie
was set (but there is no way to find out on the server side
which values were used for a given cookie).
"""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self, path="/", domain=None):
"""Deletes all the cookies the user sent with this request.
See `clear_cookie` for more information on the path and domain
parameters.
.. versionchanged:: 3.2
Added the ``path`` and ``domain`` parameters.
"""
for name in self.request.cookies:
self.clear_cookie(name, path=path, domain=domain)
def set_secure_cookie(self, name, value, expires_days=30, version=None,
**kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
You must specify the ``cookie_secret`` setting in your Application
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use `get_secure_cookie()`.
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
Secure cookies may contain arbitrary byte values, not just unicode
strings (unlike regular cookies)
.. versionchanged:: 3.2.1
Added the ``version`` argument. Introduced cookie version 2
and made it the default.
"""
self.set_cookie(name, self.create_signed_value(name, value,
version=version),
expires_days=expires_days, **kwargs)
def create_signed_value(self, name, value, version=None):
"""Signs and timestamps a string so it cannot be forged.
Normally used via set_secure_cookie, but provided as a separate
method for non-cookie uses. To decode a value not stored
as a cookie use the optional value argument to get_secure_cookie.
.. versionchanged:: 3.2.1
Added the ``version`` argument. Introduced cookie version 2
and made it the default.
"""
self.require_setting("cookie_secret", "secure cookies")
return create_signed_value(self.application.settings["cookie_secret"],
name, value, version=version)
def get_secure_cookie(self, name, value=None, max_age_days=31,
min_version=None):
"""Returns the given signed cookie if it validates, or None.
The decoded cookie value is returned as a byte string (unlike
`get_cookie`).
.. versionchanged:: 3.2.1
Added the ``min_version`` argument. Introduced cookie version 2;
both versions 1 and 2 are accepted by default.
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
value = self.get_cookie(name)
return decode_signed_value(self.application.settings["cookie_secret"],
name, value, max_age_days=max_age_days,
min_version=min_version)
def redirect(self, url, permanent=False, status=None):
"""Sends a redirect to the given (optionally relative) URL.
If the ``status`` argument is specified, that value is used as the
HTTP status code; otherwise either 301 (permanent) or 302
(temporary) is chosen based on the ``permanent`` argument.
The default is 302 (temporary).
"""
if self._headers_written:
raise Exception("Cannot redirect after headers have been written")
if status is None:
status = 301 if permanent else 302
else:
assert isinstance(status, int) and 300 <= status <= 399
self.set_status(status)
self.set_header("Location", urlparse.urljoin(utf8(self.request.uri),
utf8(url)))
self.finish()
def write(self, chunk):
"""Writes the given chunk to the output buffer.
To write the output to the network, use the flush() method below.
If the given chunk is a dictionary, we write it as JSON and set
the Content-Type of the response to be ``application/json``.
(if you want to send JSON as a different ``Content-Type``, call
set_header *after* calling write()).
Note that lists are not converted to JSON because of a potential
cross-site security vulnerability. All JSON output should be
wrapped in a dictionary. More details at
http://haacked.com/archive/2008/11/20/anatomy-of-a-subtle-json-vulnerability.aspx
"""
if self._finished:
raise RuntimeError("Cannot write() after finish(). May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
chunk = utf8(chunk)
self._write_buffer.append(chunk)
def render(self, template_name, **kwargs):
"""Renders the template with the given arguments as the response."""
html = self.render_string(template_name, **kwargs)
# Insert the additional JS and CSS added by the modules on the page
js_embed = []
js_files = []
css_embed = []
css_files = []
html_heads = []
html_bodies = []
for module in getattr(self, "_active_modules", {}).values():
embed_part = module.embedded_javascript()
if embed_part:
js_embed.append(utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
js_files.append(file_part)
else:
js_files.extend(file_part)
embed_part = module.embedded_css()
if embed_part:
css_embed.append(utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
css_files.append(file_part)
else:
css_files.extend(file_part)
head_part = module.html_head()
if head_part:
html_heads.append(utf8(head_part))
body_part = module.html_body()
if body_part:
html_bodies.append(utf8(body_part))
def is_absolute(path):
return any(path.startswith(x) for x in ["/", "http:", "https:"])
if js_files:
# Maintain order of JavaScript files given by modules
paths = []
unique_paths = set()
for path in js_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
js = ''.join('<script src="' + escape.xhtml_escape(p) +
'" type="text/javascript"></script>'
for p in paths)
sloc = html.rindex(b'</body>')
html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
if js_embed:
js = b'<script type="text/javascript">\n//<![CDATA[\n' + \
b'\n'.join(js_embed) + b'\n//]]>\n</script>'
sloc = html.rindex(b'</body>')
html = html[:sloc] + js + b'\n' + html[sloc:]
if css_files:
paths = []
unique_paths = set()
for path in css_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths)
hloc = html.index(b'</head>')
html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
if css_embed:
css = b'<style type="text/css">\n' + b'\n'.join(css_embed) + \
b'\n</style>'
hloc = html.index(b'</head>')
html = html[:hloc] + css + b'\n' + html[hloc:]
if html_heads:
hloc = html.index(b'</head>')
html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
if html_bodies:
hloc = html.index(b'</body>')
html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
self.finish(html)
def render_string(self, template_name, **kwargs):
"""Generate the given template with the given arguments.
We return the generated byte string (in utf8). To generate and
write a template as a response, use render() above.
"""
# If no template_path is specified, use the path of the calling file
template_path = self.get_template_path()
if not template_path:
frame = sys._getframe(0)
web_file = frame.f_code.co_filename
while frame.f_code.co_filename == web_file:
frame = frame.f_back
template_path = os.path.dirname(frame.f_code.co_filename)
with RequestHandler._template_loader_lock:
if template_path not in RequestHandler._template_loaders:
loader = self.create_template_loader(template_path)
RequestHandler._template_loaders[template_path] = loader
else:
loader = RequestHandler._template_loaders[template_path]
t = loader.load(template_name)
namespace = self.get_template_namespace()
namespace.update(kwargs)
return t.generate(**namespace)
def get_template_namespace(self):
"""Returns a dictionary to be used as the default template namespace.
May be overridden by subclasses to add or modify values.
The results of this method will be combined with additional
defaults in the `tornado.template` module and keyword arguments
to `render` or `render_string`.
"""
namespace = dict(
handler=self,
request=self.request,
current_user=self.current_user,
locale=self.locale,
_=self.locale.translate,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
reverse_url=self.reverse_url
)
namespace.update(self.ui)
return namespace
def create_template_loader(self, template_path):
"""Returns a new template loader for the given path.
May be overridden by subclasses. By default returns a
directory-based loader on the given path, using the
``autoescape`` application setting. If a ``template_loader``
application setting is supplied, uses that instead.
"""
settings = self.application.settings
if "template_loader" in settings:
return settings["template_loader"]
kwargs = {}
if "autoescape" in settings:
# autoescape=None means "no escaping", so we have to be sure
# to only pass this kwarg if the user asked for it.
kwargs["autoescape"] = settings["autoescape"]
return template.Loader(template_path, **kwargs)
def flush(self, include_footers=False, callback=None):
"""Flushes the current output buffer to the network.
The ``callback`` argument, if given, can be used for flow control:
it will be run when all flushed data has been written to the socket.
Note that only one flush callback can be outstanding at a time;
if another flush occurs before the previous flush's callback
has been run, the previous callback will be discarded.
"""
if self.application._wsgi:
# WSGI applications cannot usefully support flush, so just make
# it a no-op (and run the callback immediately).
if callback is not None:
callback()
return
chunk = b"".join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
self._status_code, self._headers, chunk = \
transform.transform_first_chunk(
self._status_code, self._headers, chunk, include_footers)
headers = self._generate_headers()
else:
for transform in self._transforms:
chunk = transform.transform_chunk(chunk, include_footers)
headers = b""
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
if headers:
self.request.write(headers, callback=callback)
return
self.request.write(headers + chunk, callback=callback)
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
if self._finished:
raise RuntimeError("finish() called twice. May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if chunk is not None:
self.write(chunk)
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
if not self._headers_written:
if (self._status_code == 200 and
self.request.method in ("GET", "HEAD") and
"Etag" not in self._headers):
self.set_etag_header()
if self.check_etag_header():
self._write_buffer = []
self.set_status(304)
if self._status_code == 304:
assert not self._write_buffer, "Cannot send body with 304"
self._clear_headers_for_304()
elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
if hasattr(self.request, "connection"):
# Now that the request is finished, clear the callback we
# set on the HTTPConnection (which would otherwise prevent the
# garbage collection of the RequestHandler when there
# are keepalive connections)
self.request.connection.set_close_callback(None)
if not self.application._wsgi:
self.flush(include_footers=True)
self.request.finish()
self._log()
self._finished = True
self.on_finish()
# Break up a reference cycle between this handler and the
# _ui_module closures to allow for faster GC on CPython.
self.ui = None
def send_error(self, status_code=500, **kwargs):
"""Sends the given HTTP error code to the browser.
If `flush()` has already been called, it is not possible to send
an error, so this method will simply terminate the response.
If output has been written but not yet flushed, it will be discarded
and replaced with the error page.
Override `write_error()` to customize the error page that is returned.
Additional keyword arguments are passed through to `write_error`.
"""
if self._headers_written:
gen_log.error("Cannot send error response after headers written")
if not self._finished:
self.finish()
return
self.clear()
reason = None
if 'exc_info' in kwargs:
exception = kwargs['exc_info'][1]
if isinstance(exception, HTTPError) and exception.reason:
reason = exception.reason
self.set_status(status_code, reason=reason)
try:
self.write_error(status_code, **kwargs)
except Exception:
app_log.error("Uncaught exception in write_error", exc_info=True)
if not self._finished:
self.finish()
def write_error(self, status_code, **kwargs):
"""Override to implement custom error pages.
``write_error`` may call `write`, `render`, `set_header`, etc
to produce output as usual.
If this error was caused by an uncaught exception (including
HTTPError), an ``exc_info`` triple will be available as
``kwargs["exc_info"]``. Note that this exception may not be
the "current" exception for purposes of methods like
``sys.exc_info()`` or ``traceback.format_exc``.
For historical reasons, if a method ``get_error_html`` exists,
it will be used instead of the default ``write_error`` implementation.
``get_error_html`` returned a string instead of producing output
normally, and had different semantics for exception handling.
Users of ``get_error_html`` are encouraged to convert their code
to override ``write_error`` instead.
"""
if hasattr(self, 'get_error_html'):
if 'exc_info' in kwargs:
exc_info = kwargs.pop('exc_info')
kwargs['exception'] = exc_info[1]
try:
# Put the traceback into sys.exc_info()
raise_exc_info(exc_info)
except Exception:
self.finish(self.get_error_html(status_code, **kwargs))
else:
self.finish(self.get_error_html(status_code, **kwargs))
return
if self.settings.get("serve_traceback") and "exc_info" in kwargs:
# in debug mode, try to send a traceback
self.set_header('Content-Type', 'text/plain')
for line in traceback.format_exception(*kwargs["exc_info"]):
self.write(line)
self.finish()
else:
self.finish("<html><title>%(code)d: %(message)s</title>"
"<body>%(code)d: %(message)s</body></html>" % {
"code": status_code,
"message": self._reason,
})
@property
def locale(self):
"""The local for the current session.
Determined by either `get_user_locale`, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or `get_browser_locale`, which uses the ``Accept-Language``
header.
"""
if not hasattr(self, "_locale"):
self._locale = self.get_user_locale()
if not self._locale:
self._locale = self.get_browser_locale()
assert self._locale
return self._locale
def get_user_locale(self):
"""Override to determine the locale from the authenticated user.
If None is returned, we fall back to `get_browser_locale()`.
This method should return a `tornado.locale.Locale` object,
most likely obtained via a call like ``tornado.locale.get("en")``
"""
return None
def get_browser_locale(self, default="en_US"):
"""Determines the user's locale from ``Accept-Language`` header.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
if "Accept-Language" in self.request.headers:
languages = self.request.headers["Accept-Language"].split(",")
locales = []
for language in languages:
parts = language.strip().split(";")
if len(parts) > 1 and parts[1].startswith("q="):
try:
score = float(parts[1][2:])
except (ValueError, TypeError):
score = 0.0
else:
score = 1.0
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda pair: pair[1], reverse=True)
codes = [l[0] for l in locales]
return locale.get(*codes)
return locale.get(default)
@property
def current_user(self):
"""The authenticated user for this request.
This is a cached version of `get_current_user`, which you can
override to set the user based on, e.g., a cookie. If that
method is not overridden, this method always returns None.
We lazy-load the current user the first time this method is called
and cache the result after that.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
@current_user.setter
def current_user(self, value):
self._current_user = value
def get_current_user(self):
"""Override to determine the current user from, e.g., a cookie."""
return None
def get_login_url(self):
"""Override to customize the login URL based on the request.
By default, we use the ``login_url`` application setting.
"""
self.require_setting("login_url", "@tornado.web.authenticated")
return self.application.settings["login_url"]
def get_template_path(self):
"""Override to customize template path for each handler.
By default, we use the ``template_path`` application setting.
Return None to load templates relative to the calling file.
"""
return self.application.settings.get("template_path")
@property
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if not hasattr(self, "_xsrf_token"):
token = self.get_cookie("_xsrf")
if not token:
token = binascii.b2a_hex(os.urandom(16))
expires_days = 30 if self.current_user else None
self.set_cookie("_xsrf", token, expires_days=expires_days)
self._xsrf_token = token
return self._xsrf_token
def check_xsrf_cookie(self):
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
To prevent cross-site request forgery, we set an ``_xsrf``
cookie and include the same value as a non-cookie
field with all ``POST`` requests. If the two do not match, we
reject the form submission as a potential forgery.
The ``_xsrf`` value may be set as either a form field named ``_xsrf``
or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
(the latter is accepted for compatibility with Django).
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
Prior to release 1.1.1, this check was ignored if the HTTP header
``X-Requested-With: XMLHTTPRequest`` was present. This exception
has been shown to be insecure and has been removed. For more
information please see
http://www.djangoproject.com/weblog/2011/feb/08/security/
http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
"""
token = (self.get_argument("_xsrf", None) or
self.request.headers.get("X-Xsrftoken") or
self.request.headers.get("X-Csrftoken"))
if not token:
raise HTTPError(403, "'_xsrf' argument missing from POST")
if not _time_independent_equals(utf8(self.xsrf_token), utf8(token)):
raise HTTPError(403, "XSRF cookie does not match POST argument")
def xsrf_form_html(self):
"""An HTML ``<input/>`` element to be included with all POST forms.
It defines the ``_xsrf`` input value, which we check on all POST
requests to prevent cross-site request forgery. If you have set
the ``xsrf_cookies`` application setting, you must include this
HTML within all of your HTML forms.
In a template, this method should be called with ``{% module
xsrf_form_html() %}``
See `check_xsrf_cookie()` above for more information.
"""
return '<input type="hidden" name="_xsrf" value="' + \
escape.xhtml_escape(self.xsrf_token) + '"/>'
def static_url(self, path, include_host=None, **kwargs):
"""Returns a static URL for the given relative static file path.
This method requires you set the ``static_path`` setting in your
application (which specifies the root directory of your static
files).
This method returns a versioned url (by default appending
``?v=<signature>``), which allows the static files to be
cached indefinitely. This can be disabled by passing
``include_version=False`` (in the default implementation;
other static file implementations are not required to support
this, but they may support other options).
By default this method returns URLs relative to the current
host, but if ``include_host`` is true the URL returned will be
absolute. If this handler has an ``include_host`` attribute,
that value will be used as the default for all `static_url`
calls that do not pass ``include_host`` as a keyword argument.
"""
self.require_setting("static_path", "static_url")
get_url = self.settings.get("static_handler_class",
StaticFileHandler).make_static_url
if include_host is None:
include_host = getattr(self, "include_host", False)
if include_host:
base = self.request.protocol + "://" + self.request.host
else:
base = ""
return base + get_url(self.settings, path, **kwargs)
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is unnecessary since Tornado 1.1.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception as e:
if self._headers_written:
app_log.error("Exception after headers written",
exc_info=True)
else:
self._handle_request_exception(e)
return wrapper
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name):
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
def reverse_url(self, name, *args):
"""Alias for `Application.reverse_url`."""
return self.application.reverse_url(name, *args)
def compute_etag(self):
"""Computes the etag header to be used for this request.
By default uses a hash of the content written so far.
May be overridden to provide custom etag implementations,
or may return None to disable tornado's default etag support.
"""
hasher = hashlib.sha1()
for part in self._write_buffer:
hasher.update(part)
return '"%s"' % hasher.hexdigest()
def set_etag_header(self):
"""Sets the response's Etag header using ``self.compute_etag()``.
Note: no header will be set if ``compute_etag()`` returns ``None``.
This method is called automatically when the request is finished.
"""
etag = self.compute_etag()
if etag is not None:
self.set_header("Etag", etag)
def check_etag_header(self):
"""Checks the ``Etag`` header against requests's ``If-None-Match``.
Returns ``True`` if the request's Etag matches and a 304 should be
returned. For example::
self.set_etag_header()
if self.check_etag_header():
self.set_status(304)
return
This method is called automatically when the request is finished,
but may be called earlier for applications that override
`compute_etag` and want to do an early check for ``If-None-Match``
before completing the request. The ``Etag`` header should be set
(perhaps with `set_etag_header`) before calling this method.
"""
etag = self._headers.get("Etag")
inm = utf8(self.request.headers.get("If-None-Match", ""))
return bool(etag and inm and inm.find(etag) >= 0)
def _stack_context_handle_exception(self, type, value, traceback):
try:
# For historical reasons _handle_request_exception only takes
# the exception value instead of the full triple,
# so re-raise the exception to ensure that it's in
# sys.exc_info()
raise_exc_info((type, value, traceback))
except Exception:
self._handle_request_exception(value)
return True
def _execute(self, transforms, *args, **kwargs):
"""Executes this request with the given output transforms."""
self._transforms = transforms
try:
if self.request.method not in self.SUPPORTED_METHODS:
raise HTTPError(405)
self.path_args = [self.decode_argument(arg) for arg in args]
self.path_kwargs = dict((k, self.decode_argument(v, name=k))
for (k, v) in kwargs.items())
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie()
self._when_complete(self.prepare(), self._execute_method)
except Exception as e:
self._handle_request_exception(e)
def _when_complete(self, result, callback):
try:
if result is None:
callback()
elif isinstance(result, Future):
if result.done():
if result.result() is not None:
raise ValueError('Expected None, got %r' % result.result())
callback()
else:
# Delayed import of IOLoop because it's not available
# on app engine
from tornado.ioloop import IOLoop
IOLoop.current().add_future(
result, functools.partial(self._when_complete,
callback=callback))
else:
raise ValueError("Expected Future or None, got %r" % result)
except Exception as e:
self._handle_request_exception(e)
def _execute_method(self):
if not self._finished:
method = getattr(self, self.request.method.lower())
self._when_complete(method(*self.path_args, **self.path_kwargs),
self._execute_finish)
def _execute_finish(self):
if self._auto_finish and not self._finished:
self.finish()
def _generate_headers(self):
reason = self._reason
lines = [utf8(self.request.version + " " +
str(self._status_code) +
" " + reason)]
lines.extend([utf8(n) + b": " + utf8(v) for n, v in self._headers.get_all()])
if hasattr(self, "_new_cookie"):
for cookie in self._new_cookie.values():
lines.append(utf8("Set-Cookie: " + cookie.OutputString(None)))
return b"\r\n".join(lines) + b"\r\n\r\n"
def _log(self):
"""Logs the current request.
Sort of deprecated since this functionality was moved to the
Application, but left in place for the benefit of existing apps
that have overridden this method.
"""
self.application.log_request(self)
def _request_summary(self):
return self.request.method + " " + self.request.uri + \
" (" + self.request.remote_ip + ")"
def _handle_request_exception(self, e):
self.log_exception(*sys.exc_info())
if self._finished:
# Extra errors after the request has been finished should
# be logged, but there is no reason to continue to try and
# send a response.
return
if isinstance(e, HTTPError):
if e.status_code not in httputil.responses and not e.reason:
gen_log.error("Bad HTTP status code: %d", e.status_code)
self.send_error(500, exc_info=sys.exc_info())
else:
self.send_error(e.status_code, exc_info=sys.exc_info())
else:
self.send_error(500, exc_info=sys.exc_info())
def log_exception(self, typ, value, tb):
"""Override to customize logging of uncaught exceptions.
By default logs instances of `HTTPError` as warnings without
stack traces (on the ``tornado.general`` logger), and all
other exceptions as errors with stack traces (on the
``tornado.application`` logger).
.. versionadded:: 3.1
"""
if isinstance(value, HTTPError):
if value.log_message:
format = "%d %s: " + value.log_message
args = ([value.status_code, self._request_summary()] +
list(value.args))
gen_log.warning(format, *args)
else:
app_log.error("Uncaught exception %s\n%r", self._request_summary(),
self.request, exc_info=(typ, value, tb))
def _ui_module(self, name, module):
def render(*args, **kwargs):
if not hasattr(self, "_active_modules"):
self._active_modules = {}
if name not in self._active_modules:
self._active_modules[name] = module(self)
rendered = self._active_modules[name].render(*args, **kwargs)
return rendered
return render
def _ui_method(self, method):
return lambda *args, **kwargs: method(self, *args, **kwargs)
def _clear_headers_for_304(self):
# 304 responses should not contain entity headers (defined in
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
# not explicitly allowed by
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
headers = ["Allow", "Content-Encoding", "Content-Language",
"Content-Length", "Content-MD5", "Content-Range",
"Content-Type", "Last-Modified"]
for h in headers:
self.clear_header(h)
def asynchronous(method):
"""Wrap request handler methods with this if they are asynchronous.
This decorator is unnecessary if the method is also decorated with
``@gen.coroutine`` (it is legal but unnecessary to use the two
decorators together, in which case ``@asynchronous`` must be
first).
This decorator should only be applied to the :ref:`HTTP verb
methods <verbs>`; its behavior is undefined for any other method.
This decorator does not *make* a method asynchronous; it tells
the framework that the method *is* asynchronous. For this decorator
to be useful the method must (at least sometimes) do something
asynchronous.
If this decorator is given, the response is not finished when the
method returns. It is up to the request handler to call
`self.finish() <RequestHandler.finish>` to finish the HTTP
request. Without this decorator, the request is automatically
finished when the ``get()`` or ``post()`` method returns. Example::
class MyRequestHandler(web.RequestHandler):
@web.asynchronous
def get(self):
http = httpclient.AsyncHTTPClient()
http.fetch("http://friendfeed.com/", self._on_download)
def _on_download(self, response):
self.write("Downloaded!")
self.finish()
.. versionadded:: 3.1
The ability to use ``@gen.coroutine`` without ``@asynchronous``.
"""
# Delay the IOLoop import because it's not available on app engine.
from tornado.ioloop import IOLoop
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.application._wsgi:
raise Exception("@asynchronous is not supported for WSGI apps")
self._auto_finish = False
with stack_context.ExceptionStackContext(
self._stack_context_handle_exception):
result = method(self, *args, **kwargs)
if isinstance(result, Future):
# If @asynchronous is used with @gen.coroutine, (but
# not @gen.engine), we can automatically finish the
# request when the future resolves. Additionally,
# the Future will swallow any exceptions so we need
# to throw them back out to the stack context to finish
# the request.
def future_complete(f):
f.result()
if not self._finished:
self.finish()
IOLoop.current().add_future(result, future_complete)
# Once we have done this, hide the Future from our
# caller (i.e. RequestHandler._when_complete), which
# would otherwise set up its own callback and
# exception handler (resulting in exceptions being
# logged twice).
return None
return result
return wrapper
def removeslash(method):
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path.rstrip("/")
if uri: # don't try to redirect '/' to ''
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
else:
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
def addslash(method):
"""Use this decorator to add a missing trailing slash to the request path.
For example, a request to ``/foo`` would redirect to ``/foo/`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/?'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path + "/"
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
class Application(object):
"""A collection of request handlers that make up a web application.
Instances of this class are callable and can be passed directly to
HTTPServer to serve the application::
application = web.Application([
(r"/", MainPageHandler),
])
http_server = httpserver.HTTPServer(application)
http_server.listen(8080)
ioloop.IOLoop.instance().start()
The constructor for this class takes in a list of `URLSpec` objects
or (regexp, request_class) tuples. When we receive requests, we
iterate over the list in order and instantiate an instance of the
first request class whose regexp matches the request path.
The request class can be specified as either a class object or a
(fully-qualified) name.
Each tuple can contain additional elements, which correspond to the
arguments to the `URLSpec` constructor. (Prior to Tornado 3.2, this
only tuples of two or three elements were allowed).
A dictionary may be passed as the third element of the tuple,
which will be used as keyword arguments to the handler's
constructor and `~RequestHandler.initialize` method. This pattern
is used for the `StaticFileHandler` in this example (note that a
`StaticFileHandler` can be installed automatically with the
static_path setting described below)::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
We support virtual hosts with the `add_handlers` method, which takes in
a host regular expression as the first argument::
application.add_handlers(r"www\.myhost\.com", [
(r"/article/([0-9]+)", ArticleHandler),
])
You can serve static files by sending the ``static_path`` setting
as a keyword argument. We will serve those files from the
``/static/`` URI (this is configurable with the
``static_url_prefix`` setting), and we will serve ``/favicon.ico``
and ``/robots.txt`` from the same directory. A custom subclass of
`StaticFileHandler` can be specified with the
``static_handler_class`` setting.
"""
def __init__(self, handlers=None, default_host="", transforms=None,
wsgi=False, **settings):
if transforms is None:
self.transforms = []
if settings.get("gzip"):
self.transforms.append(GZipContentEncoding)
self.transforms.append(ChunkedTransferEncoding)
else:
self.transforms = transforms
self.handlers = []
self.named_handlers = {}
self.default_host = default_host
self.settings = settings
self.ui_modules = {'linkify': _linkify,
'xsrf_form_html': _xsrf_form_html,
'Template': TemplateModule,
}
self.ui_methods = {}
self._wsgi = wsgi
self._load_ui_modules(settings.get("ui_modules", {}))
self._load_ui_methods(settings.get("ui_methods", {}))
if self.settings.get("static_path"):
path = self.settings["static_path"]
handlers = list(handlers or [])
static_url_prefix = settings.get("static_url_prefix",
"/static/")
static_handler_class = settings.get("static_handler_class",
StaticFileHandler)
static_handler_args = settings.get("static_handler_args", {})
static_handler_args['path'] = path
for pattern in [re.escape(static_url_prefix) + r"(.*)",
r"/(favicon\.ico)", r"/(robots\.txt)"]:
handlers.insert(0, (pattern, static_handler_class,
static_handler_args))
if handlers:
self.add_handlers(".*$", handlers)
if self.settings.get('debug'):
self.settings.setdefault('autoreload', True)
self.settings.setdefault('compiled_template_cache', False)
self.settings.setdefault('static_hash_cache', False)
self.settings.setdefault('serve_traceback', True)
# Automatically reload modified modules
if self.settings.get('autoreload') and not wsgi:
from tornado import autoreload
autoreload.start()
def listen(self, port, address="", **kwargs):
"""Starts an HTTP server for this application on the given port.
This is a convenience alias for creating an `.HTTPServer`
object and calling its listen method. Keyword arguments not
supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
`.HTTPServer` constructor. For advanced uses
(e.g. multi-process mode), do not use this method; create an
`.HTTPServer` and call its
`.TCPServer.bind`/`.TCPServer.start` methods directly.
Note that after calling this method you still need to call
``IOLoop.instance().start()`` to start the server.
"""
# import is here rather than top level because HTTPServer
# is not importable on appengine
from tornado.httpserver import HTTPServer
server = HTTPServer(self, **kwargs)
server.listen(port, address)
def add_handlers(self, host_pattern, host_handlers):
"""Appends the given handlers to our handler list.
Host patterns are processed sequentially in the order they were
added. All matching patterns will be considered.
"""
if not host_pattern.endswith("$"):
host_pattern += "$"
handlers = []
# The handlers with the wildcard host_pattern are a special
# case - they're added in the constructor but should have lower
# precedence than the more-precise handlers added later.
# If a wildcard handler group exists, it should always be last
# in the list, so insert new groups just before it.
if self.handlers and self.handlers[-1][0].pattern == '.*$':
self.handlers.insert(-1, (re.compile(host_pattern), handlers))
else:
self.handlers.append((re.compile(host_pattern), handlers))
for spec in host_handlers:
if isinstance(spec, (tuple, list)):
assert len(spec) in (2, 3, 4)
spec = URLSpec(*spec)
handlers.append(spec)
if spec.name:
if spec.name in self.named_handlers:
app_log.warning(
"Multiple handlers named %s; replacing previous value",
spec.name)
self.named_handlers[spec.name] = spec
def add_transform(self, transform_class):
self.transforms.append(transform_class)
def _get_host_handlers(self, request):
host = request.host.lower().split(':')[0]
matches = []
for pattern, handlers in self.handlers:
if pattern.match(host):
matches.extend(handlers)
# Look for default host if not behind load balancer (for debugging)
if not matches and "X-Real-Ip" not in request.headers:
for pattern, handlers in self.handlers:
if pattern.match(self.default_host):
matches.extend(handlers)
return matches or None
def _load_ui_methods(self, methods):
if isinstance(methods, types.ModuleType):
self._load_ui_methods(dict((n, getattr(methods, n))
for n in dir(methods)))
elif isinstance(methods, list):
for m in methods:
self._load_ui_methods(m)
else:
for name, fn in methods.items():
if not name.startswith("_") and hasattr(fn, "__call__") \
and name[0].lower() == name[0]:
self.ui_methods[name] = fn
def _load_ui_modules(self, modules):
if isinstance(modules, types.ModuleType):
self._load_ui_modules(dict((n, getattr(modules, n))
for n in dir(modules)))
elif isinstance(modules, list):
for m in modules:
self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.items():
try:
if issubclass(cls, UIModule):
self.ui_modules[name] = cls
except TypeError:
pass
def __call__(self, request):
"""Called by HTTPServer to execute the request."""
transforms = [t(request) for t in self.transforms]
handler = None
args = []
kwargs = {}
handlers = self._get_host_handlers(request)
if not handlers:
handler = RedirectHandler(
self, request, url="http://" + self.default_host + "/")
else:
for spec in handlers:
match = spec.regex.match(request.path)
if match:
handler = spec.handler_class(self, request, **spec.kwargs)
if spec.regex.groups:
# None-safe wrapper around url_unescape to handle
# unmatched optional groups correctly
def unquote(s):
if s is None:
return s
return escape.url_unescape(s, encoding=None,
plus=False)
# Pass matched groups to the handler. Since
# match.groups() includes both named and unnamed groups,
# we want to use either groups or groupdict but not both.
# Note that args are passed as bytes so the handler can
# decide what encoding to use.
if spec.regex.groupindex:
kwargs = dict(
(str(k), unquote(v))
for (k, v) in match.groupdict().items())
else:
args = [unquote(s) for s in match.groups()]
break
if not handler:
if self.settings.get('default_handler_class'):
handler_class = self.settings['default_handler_class']
handler_args = self.settings.get(
'default_handler_args', {})
else:
handler_class = ErrorHandler
handler_args = dict(status_code=404)
handler = handler_class(self, request, **handler_args)
# If template cache is disabled (usually in the debug mode),
# re-compile templates and reload static files on every
# request so you don't need to restart to see changes
if not self.settings.get("compiled_template_cache", True):
with RequestHandler._template_loader_lock:
for loader in RequestHandler._template_loaders.values():
loader.reset()
if not self.settings.get('static_hash_cache', True):
StaticFileHandler.reset()
handler._execute(transforms, *args, **kwargs)
return handler
def reverse_url(self, name, *args):
"""Returns a URL path for handler named ``name``
The handler must be added to the application as a named `URLSpec`.
Args will be substituted for capturing groups in the `URLSpec` regex.
They will be converted to strings if necessary, encoded as utf8,
and url-escaped.
"""
if name in self.named_handlers:
return self.named_handlers[name].reverse(*args)
raise KeyError("%s not found in named urls" % name)
def log_request(self, handler):
"""Writes a completed HTTP request to the logs.
By default writes to the python root logger. To change
this behavior either subclass Application and override this method,
or pass a function in the application settings dictionary as
``log_function``.
"""
if "log_function" in self.settings:
self.settings["log_function"](handler)
return
if handler.get_status() < 400:
log_method = access_log.info
elif handler.get_status() < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * handler.request.request_time()
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time)
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response.
Raising an `HTTPError` is a convenient alternative to calling
`RequestHandler.send_error` since it automatically ends the
current function.
:arg int status_code: HTTP status code. Must be listed in
`httplib.responses <http.client.responses>` unless the ``reason``
keyword argument is given.
:arg string log_message: Message to be written to the log for this error
(will not be shown to the user unless the `Application` is in debug
mode). May contain ``%s``-style placeholders, which will be filled
in with remaining positional parameters.
:arg string reason: Keyword-only argument. The HTTP "reason" phrase
to pass in the status line along with ``status_code``. Normally
determined automatically from ``status_code``, but can be used
to use a non-standard numeric code.
"""
def __init__(self, status_code, log_message=None, *args, **kwargs):
self.status_code = status_code
self.log_message = log_message
self.args = args
self.reason = kwargs.get('reason', None)
def __str__(self):
message = "HTTP %d: %s" % (
self.status_code,
self.reason or httputil.responses.get(self.status_code, 'Unknown'))
if self.log_message:
return message + " (" + (self.log_message % self.args) + ")"
else:
return message
class MissingArgumentError(HTTPError):
"""Exception raised by `RequestHandler.get_argument`.
This is a subclass of `HTTPError`, so if it is uncaught a 400 response
code will be used instead of 500 (and a stack trace will not be logged).
.. versionadded:: 3.1
"""
def __init__(self, arg_name):
super(MissingArgumentError, self).__init__(
400, 'Missing argument %s' % arg_name)
self.arg_name = arg_name
class ErrorHandler(RequestHandler):
"""Generates an error response with ``status_code`` for all requests."""
def initialize(self, status_code):
self.set_status(status_code)
def prepare(self):
raise HTTPError(self._status_code)
def check_xsrf_cookie(self):
# POSTs to an ErrorHandler don't actually have side effects,
# so we don't need to check the xsrf token. This allows POSTs
# to the wrong url to return a 404 instead of 403.
pass
class RedirectHandler(RequestHandler):
"""Redirects the client to the given URL for all GET requests.
You should provide the keyword argument ``url`` to the handler, e.g.::
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
"""
def initialize(self, url, permanent=True):
self._url = url
self._permanent = permanent
def get(self):
self.redirect(self._url, permanent=self._permanent)
class StaticFileHandler(RequestHandler):
"""A simple handler that can serve static content from a directory.
A `StaticFileHandler` is configured automatically if you pass the
``static_path`` keyword argument to `Application`. This handler
can be customized with the ``static_url_prefix``, ``static_handler_class``,
and ``static_handler_args`` settings.
To map an additional path to this handler for a static data directory
you would add a line to your application like::
application = web.Application([
(r"/content/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
The handler constructor requires a ``path`` argument, which specifies the
local root directory of the content to be served.
Note that a capture group in the regex is required to parse the value for
the ``path`` argument to the get() method (different than the constructor
argument above); see `URLSpec` for details.
To maximize the effectiveness of browser caching, this class supports
versioned urls (by default using the argument ``?v=``). If a version
is given, we instruct the browser to cache this file indefinitely.
`make_static_url` (also available as `RequestHandler.static_url`) can
be used to construct a versioned url.
This handler is intended primarily for use in development and light-duty
file serving; for heavy traffic it will be more efficient to use
a dedicated static file server (such as nginx or Apache). We support
the HTTP ``Accept-Ranges`` mechanism to return partial content (because
some browsers require this functionality to be present to seek in
HTML5 audio or video), but this handler should not be used with
files that are too large to fit comfortably in memory.
**Subclassing notes**
This class is designed to be extensible by subclassing, but because
of the way static urls are generated with class methods rather than
instance methods, the inheritance patterns are somewhat unusual.
Be sure to use the ``@classmethod`` decorator when overriding a
class method. Instance methods may use the attributes ``self.path``
``self.absolute_path``, and ``self.modified``.
Subclasses should only override methods discussed in this section;
overriding other methods is error-prone. Overriding
``StaticFileHandler.get`` is particularly problematic due to the
tight coupling with ``compute_etag`` and other methods.
To change the way static urls are generated (e.g. to match the behavior
of another server or CDN), override `make_static_url`, `parse_url_path`,
`get_cache_time`, and/or `get_version`.
To replace all interaction with the filesystem (e.g. to serve
static content from a database), override `get_content`,
`get_content_size`, `get_modified_time`, `get_absolute_path`, and
`validate_absolute_path`.
.. versionchanged:: 3.1
Many of the methods for subclasses were added in Tornado 3.1.
"""
CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years
_static_hashes = {}
_lock = threading.Lock() # protects _static_hashes
def initialize(self, path, default_filename=None):
self.root = path
self.default_filename = default_filename
@classmethod
def reset(cls):
with cls._lock:
cls._static_hashes = {}
def head(self, path):
self.get(path, include_body=False)
def get(self, path, include_body=True):
# Set up our path instance variables.
self.path = self.parse_url_path(path)
del path # make sure we don't refer to path instead of self.path again
absolute_path = self.get_absolute_path(self.root, self.path)
self.absolute_path = self.validate_absolute_path(
self.root, absolute_path)
if self.absolute_path is None:
return
self.modified = self.get_modified_time()
self.set_headers()
if self.should_return_304():
self.set_status(304)
return
request_range = None
range_header = self.request.headers.get("Range")
if range_header:
# As per RFC 2616 14.16, if an invalid Range header is specified,
# the request will be treated as if the header didn't exist.
request_range = httputil._parse_request_range(range_header)
if request_range:
start, end = request_range
size = self.get_content_size()
if (start is not None and start >= size) or end == 0:
# As per RFC 2616 14.35.1, a range is not satisfiable only: if
# the first requested byte is equal to or greater than the
# content, or when a suffix with length 0 is specified
self.set_status(416) # Range Not Satisfiable
self.set_header("Content-Type", "text/plain")
self.set_header("Content-Range", "bytes */%s" % (size, ))
return
if start is not None and start < 0:
start += size
if end is not None and end > size:
# Clients sometimes blindly use a large range to limit their
# download size; cap the endpoint at the actual file size.
end = size
# Note: only return HTTP 206 if less than the entire range has been
# requested. Not only is this semantically correct, but Chrome
# refuses to play audio if it gets an HTTP 206 in response to
# ``Range: bytes=0-``.
if size != (end or size) - (start or 0):
self.set_status(206) # Partial Content
self.set_header("Content-Range",
httputil._get_content_range(start, end, size))
else:
start = end = None
content = self.get_content(self.absolute_path, start, end)
if isinstance(content, bytes_type):
content = [content]
content_length = 0
for chunk in content:
if include_body:
self.write(chunk)
else:
content_length += len(chunk)
if not include_body:
assert self.request.method == "HEAD"
self.set_header("Content-Length", content_length)
def compute_etag(self):
"""Sets the ``Etag`` header based on static url version.
This allows efficient ``If-None-Match`` checks against cached
versions, and sends the correct ``Etag`` for a partial response
(i.e. the same ``Etag`` as the full file).
.. versionadded:: 3.1
"""
version_hash = self._get_cached_version(self.absolute_path)
if not version_hash:
return None
return '"%s"' % (version_hash, )
def set_headers(self):
"""Sets the content and caching headers on the response.
.. versionadded:: 3.1
"""
self.set_header("Accept-Ranges", "bytes")
self.set_etag_header()
if self.modified is not None:
self.set_header("Last-Modified", self.modified)
content_type = self.get_content_type()
if content_type:
self.set_header("Content-Type", content_type)
cache_time = self.get_cache_time(self.path, self.modified, content_type)
if cache_time > 0:
self.set_header("Expires", datetime.datetime.utcnow() +
datetime.timedelta(seconds=cache_time))
self.set_header("Cache-Control", "max-age=" + str(cache_time))
self.set_extra_headers(self.path)
def should_return_304(self):
"""Returns True if the headers indicate that we should return 304.
.. versionadded:: 3.1
"""
if self.check_etag_header():
return True
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if date_tuple is not None:
if_since = datetime.datetime(*date_tuple[:6])
if if_since >= self.modified:
return True
return False
@classmethod
def get_absolute_path(cls, root, path):
"""Returns the absolute location of ``path`` relative to ``root``.
``root`` is the path configured for this `StaticFileHandler`
(in most cases the ``static_path`` `Application` setting).
This class method may be overridden in subclasses. By default
it returns a filesystem path, but other strings may be used
as long as they are unique and understood by the subclass's
overridden `get_content`.
.. versionadded:: 3.1
"""
abspath = os.path.abspath(os.path.join(root, path))
return abspath
def validate_absolute_path(self, root, absolute_path):
"""Validate and return the absolute path.
``root`` is the configured path for the `StaticFileHandler`,
and ``path`` is the result of `get_absolute_path`
This is an instance method called during request processing,
so it may raise `HTTPError` or use methods like
`RequestHandler.redirect` (return None after redirecting to
halt further processing). This is where 404 errors for missing files
are generated.
This method may modify the path before returning it, but note that
any such modifications will not be understood by `make_static_url`.
In instance methods, this method's result is available as
``self.absolute_path``.
.. versionadded:: 3.1
"""
root = os.path.abspath(root)
# os.path.abspath strips a trailing /
# it needs to be temporarily added back for requests to root/
if not (absolute_path + os.path.sep).startswith(root):
raise HTTPError(403, "%s is not in root static directory",
self.path)
if (os.path.isdir(absolute_path) and
self.default_filename is not None):
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
self.redirect(self.request.path + "/", permanent=True)
return
absolute_path = os.path.join(absolute_path, self.default_filename)
if not os.path.exists(absolute_path):
raise HTTPError(404)
if not os.path.isfile(absolute_path):
raise HTTPError(403, "%s is not a file", self.path)
return absolute_path
@classmethod
def get_content(cls, abspath, start=None, end=None):
"""Retrieve the content of the requested resource which is located
at the given absolute path.
This class method may be overridden by subclasses. Note that its
signature is different from other overridable class methods
(no ``settings`` argument); this is deliberate to ensure that
``abspath`` is able to stand on its own as a cache key.
This method should either return a byte string or an iterator
of byte strings. The latter is preferred for large files
as it helps reduce memory fragmentation.
.. versionadded:: 3.1
"""
with open(abspath, "rb") as file:
if start is not None:
file.seek(start)
if end is not None:
remaining = end - (start or 0)
else:
remaining = None
while True:
chunk_size = 64 * 1024
if remaining is not None and remaining < chunk_size:
chunk_size = remaining
chunk = file.read(chunk_size)
if chunk:
if remaining is not None:
remaining -= len(chunk)
yield chunk
else:
if remaining is not None:
assert remaining == 0
return
@classmethod
def get_content_version(cls, abspath):
"""Returns a version string for the resource at the given path.
This class method may be overridden by subclasses. The
default implementation is a hash of the file's contents.
.. versionadded:: 3.1
"""
data = cls.get_content(abspath)
hasher = hashlib.md5()
if isinstance(data, bytes_type):
hasher.update(data)
else:
for chunk in data:
hasher.update(chunk)
return hasher.hexdigest()
def _stat(self):
if not hasattr(self, '_stat_result'):
self._stat_result = os.stat(self.absolute_path)
return self._stat_result
def get_content_size(self):
"""Retrieve the total size of the resource at the given path.
This method may be overridden by subclasses. It will only
be called if a partial result is requested from `get_content`
.. versionadded:: 3.1
"""
stat_result = self._stat()
return stat_result[stat.ST_SIZE]
def get_modified_time(self):
"""Returns the time that ``self.absolute_path`` was last modified.
May be overridden in subclasses. Should return a `~datetime.datetime`
object or None.
.. versionadded:: 3.1
"""
stat_result = self._stat()
modified = datetime.datetime.utcfromtimestamp(stat_result[stat.ST_MTIME])
return modified
def get_content_type(self):
"""Returns the ``Content-Type`` header to be used for this request.
.. versionadded:: 3.1
"""
mime_type, encoding = mimetypes.guess_type(self.absolute_path)
return mime_type
def set_extra_headers(self, path):
"""For subclass to add extra headers to the response"""
pass
def get_cache_time(self, path, modified, mime_type):
"""Override to customize cache control behavior.
Return a positive number of seconds to make the result
cacheable for that amount of time or 0 to mark resource as
cacheable for an unspecified amount of time (subject to
browser heuristics).
By default returns cache expiry of 10 years for resources requested
with ``v`` argument.
"""
return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
@classmethod
def make_static_url(cls, settings, path, include_version=True):
"""Constructs a versioned url for the given path.
This method may be overridden in subclasses (but note that it
is a class method rather than an instance method). Subclasses
are only required to implement the signature
``make_static_url(cls, settings, path)``; other keyword
arguments may be passed through `~RequestHandler.static_url`
but are not standard.
``settings`` is the `Application.settings` dictionary. ``path``
is the static path being requested. The url returned should be
relative to the current host.
``include_version`` determines whether the generated URL should
include the query string containing the version hash of the
file corresponding to the given ``path``.
"""
url = settings.get('static_url_prefix', '/static/') + path
if not include_version:
return url
version_hash = cls.get_version(settings, path)
if not version_hash:
return url
return '%s?v=%s' % (url, version_hash)
def parse_url_path(self, url_path):
"""Converts a static URL path into a filesystem path.
``url_path`` is the path component of the URL with
``static_url_prefix`` removed. The return value should be
filesystem path relative to ``static_path``.
This is the inverse of `make_static_url`.
"""
if os.path.sep != "/":
url_path = url_path.replace("/", os.path.sep)
return url_path
@classmethod
def get_version(cls, settings, path):
"""Generate the version string to be used in static URLs.
``settings`` is the `Application.settings` dictionary and ``path``
is the relative location of the requested asset on the filesystem.
The returned value should be a string, or ``None`` if no version
could be determined.
.. versionchanged:: 3.1
This method was previously recommended for subclasses to override;
`get_content_version` is now preferred as it allows the base
class to handle caching of the result.
"""
abs_path = cls.get_absolute_path(settings['static_path'], path)
return cls._get_cached_version(abs_path)
@classmethod
def _get_cached_version(cls, abs_path):
with cls._lock:
hashes = cls._static_hashes
if abs_path not in hashes:
try:
hashes[abs_path] = cls.get_content_version(abs_path)
except Exception:
gen_log.error("Could not open static file %r", abs_path)
hashes[abs_path] = None
hsh = hashes.get(abs_path)
if hsh:
return hsh
return None
class FallbackHandler(RequestHandler):
"""A `RequestHandler` that wraps another HTTP server callback.
The fallback is a callable object that accepts an
`~.httpserver.HTTPRequest`, such as an `Application` or
`tornado.wsgi.WSGIContainer`. This is most useful to use both
Tornado ``RequestHandlers`` and WSGI in the same server. Typical
usage::
wsgi_app = tornado.wsgi.WSGIContainer(
django.core.handlers.wsgi.WSGIHandler())
application = tornado.web.Application([
(r"/foo", FooHandler),
(r".*", FallbackHandler, dict(fallback=wsgi_app),
])
"""
def initialize(self, fallback):
self.fallback = fallback
def prepare(self):
self.fallback(self.request)
self._finished = True
class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
A new transform instance is created for every request. See the
ChunkedTransferEncoding example below if you want to implement a
new Transform.
"""
def __init__(self, request):
pass
def transform_first_chunk(self, status_code, headers, chunk, finishing):
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
return chunk
class GZipContentEncoding(OutputTransform):
"""Applies the gzip content encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
"""
CONTENT_TYPES = set([
"text/plain", "text/html", "text/css", "text/xml", "application/javascript",
"application/x-javascript", "application/xml", "application/atom+xml",
"text/javascript", "application/json", "application/xhtml+xml"])
MIN_LENGTH = 5
def __init__(self, request):
self._gzipping = request.supports_http_1_1() and \
"gzip" in request.headers.get("Accept-Encoding", "")
def transform_first_chunk(self, status_code, headers, chunk, finishing):
if 'Vary' in headers:
headers['Vary'] += b', Accept-Encoding'
else:
headers['Vary'] = b'Accept-Encoding'
if self._gzipping:
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = (ctype in self.CONTENT_TYPES) and \
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
(finishing or "Content-Length" not in headers) and \
("Content-Encoding" not in headers)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
self._gzip_value = BytesIO()
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
chunk = self.transform_chunk(chunk, finishing)
if "Content-Length" in headers:
headers["Content-Length"] = str(len(chunk))
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
if self._gzipping:
self._gzip_file.write(chunk)
if finishing:
self._gzip_file.close()
else:
self._gzip_file.flush()
chunk = self._gzip_value.getvalue()
self._gzip_value.truncate(0)
self._gzip_value.seek(0)
return chunk
class ChunkedTransferEncoding(OutputTransform):
"""Applies the chunked transfer encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1
"""
def __init__(self, request):
self._chunking = request.supports_http_1_1()
def transform_first_chunk(self, status_code, headers, chunk, finishing):
# 304 responses have no body (not even a zero-length body), and so
# should not have either Content-Length or Transfer-Encoding headers.
if self._chunking and status_code != 304:
# No need to chunk the output if a Content-Length is specified
if "Content-Length" in headers or "Transfer-Encoding" in headers:
self._chunking = False
else:
headers["Transfer-Encoding"] = "chunked"
chunk = self.transform_chunk(chunk, finishing)
return status_code, headers, chunk
def transform_chunk(self, block, finishing):
if self._chunking:
# Don't write out empty chunks because that means END-OF-STREAM
# with chunked encoding
if block:
block = utf8("%x" % len(block)) + b"\r\n" + block + b"\r\n"
if finishing:
block += b"0\r\n\r\n"
return block
def authenticated(method):
"""Decorate methods with this to require that the user be logged in.
If the user is not logged in, they will be redirected to the configured
`login url <RequestHandler.get_login_url>`.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user:
if self.request.method in ("GET", "HEAD"):
url = self.get_login_url()
if "?" not in url:
if urlparse.urlsplit(url).scheme:
# if login url is absolute, make next absolute too
next_url = self.request.full_url()
else:
next_url = self.request.uri
url += "?" + urlencode(dict(next=next_url))
self.redirect(url)
return
raise HTTPError(403)
return method(self, *args, **kwargs)
return wrapper
class UIModule(object):
"""A re-usable, modular UI unit on a page.
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.ui = handler.ui
self.locale = handler.locale
@property
def current_user(self):
return self.handler.current_user
def render(self, *args, **kwargs):
"""Overridden in subclasses to return this module's output."""
raise NotImplementedError()
def embedded_javascript(self):
"""Returns a JavaScript string that will be embedded in the page."""
return None
def javascript_files(self):
"""Returns a list of JavaScript files required by this module."""
return None
def embedded_css(self):
"""Returns a CSS string that will be embedded in the page."""
return None
def css_files(self):
"""Returns a list of CSS files required by this module."""
return None
def html_head(self):
"""Returns a CSS string that will be put in the <head/> element"""
return None
def html_body(self):
"""Returns an HTML string that will be put in the <body/> element"""
return None
def render_string(self, path, **kwargs):
"""Renders a template and returns it as a string."""
return self.handler.render_string(path, **kwargs)
class _linkify(UIModule):
def render(self, text, **kwargs):
return escape.linkify(text, **kwargs)
class _xsrf_form_html(UIModule):
def render(self):
return self.handler.xsrf_form_html()
class TemplateModule(UIModule):
"""UIModule that simply renders the given template.
{% module Template("foo.html") %} is similar to {% include "foo.html" %},
but the module version gets its own namespace (with kwargs passed to
Template()) instead of inheriting the outer template's namespace.
Templates rendered through this module also get access to UIModule's
automatic javascript/css features. Simply call set_resources
inside the template and give it keyword arguments corresponding to
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
Note that these resources are output once per template file, not once
per instantiation of the template, so they must not depend on
any arguments to the template.
"""
def __init__(self, handler):
super(TemplateModule, self).__init__(handler)
# keep resources in both a list and a dict to preserve order
self._resource_list = []
self._resource_dict = {}
def render(self, path, **kwargs):
def set_resources(**kwargs):
if path not in self._resource_dict:
self._resource_list.append(kwargs)
self._resource_dict[path] = kwargs
else:
if self._resource_dict[path] != kwargs:
raise ValueError("set_resources called with different "
"resources for the same template")
return ""
return self.render_string(path, set_resources=set_resources,
**kwargs)
def _get_resources(self, key):
return (r[key] for r in self._resource_list if key in r)
def embedded_javascript(self):
return "\n".join(self._get_resources("embedded_javascript"))
def javascript_files(self):
result = []
for f in self._get_resources("javascript_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def embedded_css(self):
return "\n".join(self._get_resources("embedded_css"))
def css_files(self):
result = []
for f in self._get_resources("css_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def html_head(self):
return "".join(self._get_resources("html_head"))
def html_body(self):
return "".join(self._get_resources("html_body"))
class _UIModuleNamespace(object):
"""Lazy namespace which creates UIModule proxies bound to a handler."""
def __init__(self, handler, ui_modules):
self.handler = handler
self.ui_modules = ui_modules
def __getitem__(self, key):
return self.handler._ui_module(key, self.ui_modules[key])
def __getattr__(self, key):
try:
return self[key]
except KeyError as e:
raise AttributeError(str(e))
class URLSpec(object):
"""Specifies mappings between URLs and handlers."""
def __init__(self, pattern, handler, kwargs=None, name=None):
"""Parameters:
* ``pattern``: Regular expression to be matched. Any groups
in the regex will be passed in to the handler's get/post/etc
methods as arguments.
* ``handler_class``: `RequestHandler` subclass to be invoked.
* ``kwargs`` (optional): A dictionary of additional arguments
to be passed to the handler's constructor.
* ``name`` (optional): A name for this handler. Used by
`Application.reverse_url`.
"""
if not pattern.endswith('$'):
pattern += '$'
self.regex = re.compile(pattern)
assert len(self.regex.groupindex) in (0, self.regex.groups), \
("groups in url regexes must either be all named or all "
"positional: %r" % self.regex.pattern)
if isinstance(handler, str):
# import the Module and instantiate the class
# Must be a fully qualified name (module.ClassName)
handler = import_object(handler)
self.handler_class = handler
self.kwargs = kwargs or {}
self.name = name
self._path, self._group_count = self._find_groups()
def __repr__(self):
return '%s(%r, %s, kwargs=%r, name=%r)' % \
(self.__class__.__name__, self.regex.pattern,
self.handler_class, self.kwargs, self.name)
def _find_groups(self):
"""Returns a tuple (reverse string, group count) for a url.
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
would return ('/%s/%s/', 2).
"""
pattern = self.regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
if pattern.endswith('$'):
pattern = pattern[:-1]
if self.regex.groups != pattern.count('('):
# The pattern is too complicated for our simplistic matching,
# so we can't support reversing it.
return (None, None)
pieces = []
for fragment in pattern.split('('):
if ')' in fragment:
paren_loc = fragment.index(')')
if paren_loc >= 0:
pieces.append('%s' + fragment[paren_loc + 1:])
else:
pieces.append(fragment)
return (''.join(pieces), self.regex.groups)
def reverse(self, *args):
assert self._path is not None, \
"Cannot reverse url regex " + self.regex.pattern
assert len(args) == self._group_count, "required number of arguments "\
"not found"
if not len(args):
return self._path
converted_args = []
for a in args:
if not isinstance(a, (unicode_type, bytes_type)):
a = str(a)
converted_args.append(escape.url_escape(utf8(a), plus=False))
return self._path % tuple(converted_args)
url = URLSpec
if hasattr(hmac, 'compare_digest'): # python 3.3
_time_independent_equals = hmac.compare_digest
else:
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
if isinstance(a[0], int): # python3 byte strings
for x, y in zip(a, b):
result |= x ^ y
else: # python2
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def create_signed_value(secret, name, value, version=None, clock=None):
if version is None:
version = DEFAULT_SIGNED_VALUE_VERSION
if clock is None:
clock = time.time
timestamp = utf8(str(int(clock())))
value = base64.b64encode(utf8(value))
if version == 1:
signature = _create_signature_v1(secret, name, value, timestamp)
value = b"|".join([value, timestamp, signature])
return value
elif version == 2:
# The v2 format consists of a version number and a series of
# length-prefixed fields "%d:%s", the last of which is a
# signature, all separated by pipes. All numbers are in
# decimal format with no leading zeros. The signature is an
# HMAC-SHA256 of the whole string up to that point, including
# the final pipe.
#
# The fields are:
# - format version (i.e. 2; no length prefix)
# - key version (currently 0; reserved for future key rotation features)
# - timestamp (integer seconds since epoch)
# - name (not encoded; assumed to be ~alphanumeric)
# - value (base64-encoded)
# - signature (hex-encoded; no length prefix)
def format_field(s):
return utf8("%d:" % len(s)) + utf8(s)
to_sign = b"|".join([
b"2|1:0",
format_field(timestamp),
format_field(name),
format_field(value),
b''])
signature = _create_signature_v2(secret, to_sign)
return to_sign + signature
else:
raise ValueError("Unsupported version %d" % version)
# A leading version number in decimal with no leading zeros, followed by a pipe.
_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
def decode_signed_value(secret, name, value, max_age_days=31, clock=None,min_version=None):
if clock is None:
clock = time.time
if min_version is None:
min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION
if min_version > 2:
raise ValueError("Unsupported min_version %d" % min_version)
if not value:
return None
# Figure out what version this is. Version 1 did not include an
# explicit version field and started with arbitrary base64 data,
# which makes this tricky.
value = utf8(value)
m = _signed_value_version_re.match(value)
if m is None:
version = 1
else:
try:
version = int(m.group(1))
if version > 999:
# Certain payloads from the version-less v1 format may
# be parsed as valid integers. Due to base64 padding
# restrictions, this can only happen for numbers whose
# length is a multiple of 4, so we can treat all
# numbers up to 999 as versions, and for the rest we
# fall back to v1 format.
version = 1
except ValueError:
version = 1
if version < min_version:
return None
if version == 1:
return _decode_signed_value_v1(secret, name, value, max_age_days, clock)
elif version == 2:
return _decode_signed_value_v2(secret, name, value, max_age_days, clock)
else:
return None
def _decode_signed_value_v1(secret, name, value, max_age_days, clock):
parts = utf8(value).split(b"|")
if len(parts) != 3:
return None
signature = _create_signature_v1(secret, name, parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
gen_log.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < clock() - max_age_days * 86400:
gen_log.warning("Expired cookie %r", value)
return None
if timestamp > clock() + 31 * 86400:
# _cookie_signature does not hash a delimiter between the
# parts of the cookie, so an attacker could transfer trailing
# digits from the payload to the timestamp without altering the
# signature. For backwards compatibility, sanity-check timestamp
# here instead of modifying _cookie_signature.
gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
return None
if parts[1].startswith(b"0"):
gen_log.warning("Tampered cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
except Exception:
return None
def _decode_signed_value_v2(secret, name, value, max_age_days, clock):
def _consume_field(s):
length, _, rest = s.partition(b':')
n = int(length)
field_value = rest[:n]
# In python 3, indexing bytes returns small integers; we must
# use a slice to get a byte string as in python 2.
if rest[n:n+1] != b'|':
raise ValueError("malformed v2 signed value field")
rest = rest[n+1:]
return field_value, rest
rest = value[2:] # remove version number
try:
key_version, rest = _consume_field(rest)
timestamp, rest = _consume_field(rest)
name_field, rest = _consume_field(rest)
value_field, rest = _consume_field(rest)
except ValueError:
return None
passed_sig = rest
signed_string = value[:-len(passed_sig)]
expected_sig = _create_signature_v2(secret, signed_string)
if not _time_independent_equals(passed_sig, expected_sig):
return None
if name_field != utf8(name):
return None
timestamp = int(timestamp)
if timestamp < clock() - max_age_days * 86400:
# The signature has expired.
return None
try:
return base64.b64decode(value_field)
except Exception:
return None
def _create_signature_v1(secret, *parts):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
for part in parts:
hash.update(utf8(part))
return utf8(hash.hexdigest())
def _create_signature_v2(secret, s):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
hash.update(utf8(s))
return utf8(hash.hexdigest())
| ./CrossVul/dataset_final_sorted/CWE-203/py/bad_2410_3 |
crossvul-python_data_good_2410_2 | """Miscellaneous utility functions and classes.
This module is used internally by Tornado. It is not necessarily expected
that the functions and classes defined here will be useful to other
applications, but they are documented here in case they are.
The one public-facing part of this module is the `Configurable` class
and its `~Configurable.configure` method, which becomes a part of the
interface of its subclasses, including `.AsyncHTTPClient`, `.IOLoop`,
and `.Resolver`.
"""
from __future__ import absolute_import, division, print_function, with_statement
import array
import inspect
import os
import sys
import zlib
try:
xrange # py2
except NameError:
xrange = range # py3
class ObjectDict(dict):
"""Makes a dictionary behave like an object, with attribute-style access.
"""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
class GzipDecompressor(object):
"""Streaming gzip decompressor.
The interface is like that of `zlib.decompressobj` (without the
optional arguments, but it understands gzip headers and checksums.
"""
def __init__(self):
# Magic parameter makes zlib module understand gzip header
# http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib
# This works on cpython and pypy, but not jython.
self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS)
def decompress(self, value):
"""Decompress a chunk, returning newly-available data.
Some data may be buffered for later processing; `flush` must
be called when there is no more input data to ensure that
all data was processed.
"""
return self.decompressobj.decompress(value)
def flush(self):
"""Return any remaining buffered data not yet returned by decompress.
Also checks for errors such as truncated input.
No other methods may be called on this object after `flush`.
"""
return self.decompressobj.flush()
def import_object(name):
"""Imports an object by name.
import_object('x') is equivalent to 'import x'.
import_object('x.y.z') is equivalent to 'from x.y import z'.
>>> import tornado.escape
>>> import_object('tornado.escape') is tornado.escape
True
>>> import_object('tornado.escape.utf8') is tornado.escape.utf8
True
>>> import_object('tornado') is tornado
True
>>> import_object('tornado.missing_module')
Traceback (most recent call last):
...
ImportError: No module named missing_module
"""
if name.count('.') == 0:
return __import__(name, None, None)
parts = name.split('.')
obj = __import__('.'.join(parts[:-1]), None, None, [parts[-1]], 0)
try:
return getattr(obj, parts[-1])
except AttributeError:
raise ImportError("No module named %s" % parts[-1])
# Fake unicode literal support: Python 3.2 doesn't have the u'' marker for
# literal strings, and alternative solutions like "from __future__ import
# unicode_literals" have other problems (see PEP 414). u() can be applied
# to ascii strings that include \u escapes (but they must not contain
# literal non-ascii characters).
if type('') is not type(b''):
def u(s):
return s
bytes_type = bytes
unicode_type = str
basestring_type = str
else:
def u(s):
return s.decode('unicode_escape')
bytes_type = str
unicode_type = unicode
basestring_type = basestring
if sys.version_info > (3,):
exec("""
def raise_exc_info(exc_info):
raise exc_info[1].with_traceback(exc_info[2])
def exec_in(code, glob, loc=None):
if isinstance(code, str):
code = compile(code, '<string>', 'exec', dont_inherit=True)
exec(code, glob, loc)
""")
else:
exec("""
def raise_exc_info(exc_info):
raise exc_info[0], exc_info[1], exc_info[2]
def exec_in(code, glob, loc=None):
if isinstance(code, basestring):
# exec(string) inherits the caller's future imports; compile
# the string first to prevent that.
code = compile(code, '<string>', 'exec', dont_inherit=True)
exec code in glob, loc
""")
class Configurable(object):
"""Base class for configurable interfaces.
A configurable interface is an (abstract) class whose constructor
acts as a factory function for one of its implementation subclasses.
The implementation subclass as well as optional keyword arguments to
its initializer can be set globally at runtime with `configure`.
By using the constructor as the factory method, the interface
looks like a normal class, `isinstance` works as usual, etc. This
pattern is most useful when the choice of implementation is likely
to be a global decision (e.g. when `~select.epoll` is available,
always use it instead of `~select.select`), or when a
previously-monolithic class has been split into specialized
subclasses.
Configurable subclasses must define the class methods
`configurable_base` and `configurable_default`, and use the instance
method `initialize` instead of ``__init__``.
"""
__impl_class = None
__impl_kwargs = None
def __new__(cls, **kwargs):
base = cls.configurable_base()
args = {}
if cls is base:
impl = cls.configured_class()
if base.__impl_kwargs:
args.update(base.__impl_kwargs)
else:
impl = cls
args.update(kwargs)
instance = super(Configurable, cls).__new__(impl)
# initialize vs __init__ chosen for compatiblity with AsyncHTTPClient
# singleton magic. If we get rid of that we can switch to __init__
# here too.
instance.initialize(**args)
return instance
@classmethod
def configurable_base(cls):
"""Returns the base class of a configurable hierarchy.
This will normally return the class in which it is defined.
(which is *not* necessarily the same as the cls classmethod parameter).
"""
raise NotImplementedError()
@classmethod
def configurable_default(cls):
"""Returns the implementation class to be used if none is configured."""
raise NotImplementedError()
def initialize(self):
"""Initialize a `Configurable` subclass instance.
Configurable classes should use `initialize` instead of ``__init__``.
"""
@classmethod
def configure(cls, impl, **kwargs):
"""Sets the class to use when the base class is instantiated.
Keyword arguments will be saved and added to the arguments passed
to the constructor. This can be used to set global defaults for
some parameters.
"""
base = cls.configurable_base()
if isinstance(impl, (unicode_type, bytes_type)):
impl = import_object(impl)
if impl is not None and not issubclass(impl, cls):
raise ValueError("Invalid subclass of %s" % cls)
base.__impl_class = impl
base.__impl_kwargs = kwargs
@classmethod
def configured_class(cls):
"""Returns the currently configured class."""
base = cls.configurable_base()
if cls.__impl_class is None:
base.__impl_class = cls.configurable_default()
return base.__impl_class
@classmethod
def _save_configuration(cls):
base = cls.configurable_base()
return (base.__impl_class, base.__impl_kwargs)
@classmethod
def _restore_configuration(cls, saved):
base = cls.configurable_base()
base.__impl_class = saved[0]
base.__impl_kwargs = saved[1]
class ArgReplacer(object):
"""Replaces one value in an ``args, kwargs`` pair.
Inspects the function signature to find an argument by name
whether it is passed by position or keyword. For use in decorators
and similar wrappers.
"""
def __init__(self, func, name):
self.name = name
try:
self.arg_pos = inspect.getargspec(func).args.index(self.name)
except ValueError:
# Not a positional parameter
self.arg_pos = None
def replace(self, new_value, args, kwargs):
"""Replace the named argument in ``args, kwargs`` with ``new_value``.
Returns ``(old_value, args, kwargs)``. The returned ``args`` and
``kwargs`` objects may not be the same as the input objects, or
the input objects may be mutated.
If the named argument was not found, ``new_value`` will be added
to ``kwargs`` and None will be returned as ``old_value``.
"""
if self.arg_pos is not None and len(args) > self.arg_pos:
# The arg to replace is passed positionally
old_value = args[self.arg_pos]
args = list(args) # *args is normally a tuple
args[self.arg_pos] = new_value
else:
# The arg to replace is either omitted or passed by keyword.
old_value = kwargs.get(self.name)
kwargs[self.name] = new_value
return old_value, args, kwargs
def _websocket_mask_python(mask, data):
"""Websocket masking function.
`mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length.
Returns a `bytes` object of the same length as `data` with the mask applied
as specified in section 5.3 of RFC 6455.
This pure-python implementation may be replaced by an optimized version when available.
"""
mask = array.array("B", mask)
unmasked = array.array("B", data)
for i in xrange(len(data)):
unmasked[i] = unmasked[i] ^ mask[i % 4]
if hasattr(unmasked, 'tobytes'):
# tostring was deprecated in py32. It hasn't been removed,
# but since we turn on deprecation warnings in our tests
# we need to use the right one.
return unmasked.tobytes()
else:
return unmasked.tostring()
if (os.environ.get('TORNADO_NO_EXTENSION') or
os.environ.get('TORNADO_EXTENSION') == '0'):
# These environment variables exist to make it easier to do performance
# comparisons; they are not guaranteed to remain supported in the future.
_websocket_mask = _websocket_mask_python
else:
try:
from tornado.speedups import websocket_mask as _websocket_mask
except ImportError:
if os.environ.get('TORNADO_EXTENSION') == '1':
raise
_websocket_mask = _websocket_mask_python
def doctests():
import doctest
return doctest.DocTestSuite()
| ./CrossVul/dataset_final_sorted/CWE-203/py/good_2410_2 |
crossvul-python_data_good_2410_4 | """Implementation of the WebSocket protocol.
`WebSockets <http://dev.w3.org/html5/websockets/>`_ allow for bidirectional
communication between the browser and server.
.. warning::
The WebSocket protocol was recently finalized as `RFC 6455
<http://tools.ietf.org/html/rfc6455>`_ and is not yet supported in
all browsers. Refer to http://caniuse.com/websockets for details
on compatibility. In addition, during development the protocol
went through several incompatible versions, and some browsers only
support older versions. By default this module only supports the
latest version of the protocol, but optional support for an older
version (known as "draft 76" or "hixie-76") can be enabled by
overriding `WebSocketHandler.allow_draft76` (see that method's
documentation for caveats).
"""
from __future__ import absolute_import, division, print_function, with_statement
# Author: Jacob Kristhammar, 2010
import base64
import collections
import functools
import hashlib
import os
import struct
import time
import tornado.escape
import tornado.web
from tornado.concurrent import TracebackFuture
from tornado.escape import utf8, native_str
from tornado import httpclient, httputil
from tornado.ioloop import IOLoop
from tornado.iostream import StreamClosedError
from tornado.log import gen_log, app_log
from tornado.netutil import Resolver
from tornado import simple_httpclient
from tornado.util import bytes_type, unicode_type, _websocket_mask
class WebSocketError(Exception):
pass
class WebSocketClosedError(WebSocketError):
"""Raised by operations on a closed connection.
.. versionadded:: 3.2
"""
pass
class WebSocketHandler(tornado.web.RequestHandler):
"""Subclass this class to create a basic WebSocket handler.
Override `on_message` to handle incoming messages, and use
`write_message` to send messages to the client. You can also
override `open` and `on_close` to handle opened and closed
connections.
See http://dev.w3.org/html5/websockets/ for details on the
JavaScript interface. The protocol is specified at
http://tools.ietf.org/html/rfc6455.
Here is an example WebSocket handler that echos back all received messages
back to the client::
class EchoWebSocket(websocket.WebSocketHandler):
def open(self):
print "WebSocket opened"
def on_message(self, message):
self.write_message(u"You said: " + message)
def on_close(self):
print "WebSocket closed"
WebSockets are not standard HTTP connections. The "handshake" is
HTTP, but after the handshake, the protocol is
message-based. Consequently, most of the Tornado HTTP facilities
are not available in handlers of this type. The only communication
methods available to you are `write_message()`, `ping()`, and
`close()`. Likewise, your request handler class should implement
`open()` method rather than ``get()`` or ``post()``.
If you map the handler above to ``/websocket`` in your application, you can
invoke it in JavaScript with::
var ws = new WebSocket("ws://localhost:8888/websocket");
ws.onopen = function() {
ws.send("Hello, world");
};
ws.onmessage = function (evt) {
alert(evt.data);
};
This script pops up an alert box that says "You said: Hello, world".
"""
def __init__(self, application, request, **kwargs):
tornado.web.RequestHandler.__init__(self, application, request,
**kwargs)
self.stream = request.connection.stream
self.ws_connection = None
def _execute(self, transforms, *args, **kwargs):
self.open_args = args
self.open_kwargs = kwargs
# Websocket only supports GET method
if self.request.method != 'GET':
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 405 Method Not Allowed\r\n\r\n"
))
self.stream.close()
return
# Upgrade header should be present and should be equal to WebSocket
if self.request.headers.get("Upgrade", "").lower() != 'websocket':
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 400 Bad Request\r\n\r\n"
"Can \"Upgrade\" only to \"WebSocket\"."
))
self.stream.close()
return
# Connection header should be upgrade. Some proxy servers/load balancers
# might mess with it.
headers = self.request.headers
connection = map(lambda s: s.strip().lower(), headers.get("Connection", "").split(","))
if 'upgrade' not in connection:
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 400 Bad Request\r\n\r\n"
"\"Connection\" must be \"Upgrade\"."
))
self.stream.close()
return
# The difference between version 8 and 13 is that in 8 the
# client sends a "Sec-Websocket-Origin" header and in 13 it's
# simply "Origin".
if self.request.headers.get("Sec-WebSocket-Version") in ("7", "8", "13"):
self.ws_connection = WebSocketProtocol13(self)
self.ws_connection.accept_connection()
elif (self.allow_draft76() and
"Sec-WebSocket-Version" not in self.request.headers):
self.ws_connection = WebSocketProtocol76(self)
self.ws_connection.accept_connection()
else:
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 426 Upgrade Required\r\n"
"Sec-WebSocket-Version: 8\r\n\r\n"))
self.stream.close()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket.
The message may be either a string or a dict (which will be
encoded as json). If the ``binary`` argument is false, the
message will be sent as utf8; in binary mode any byte string
is allowed.
If the connection is already closed, raises `WebSocketClosedError`.
.. versionchanged:: 3.2
`WebSocketClosedError` was added (previously a closed connection
would raise an `AttributeError`)
"""
if self.ws_connection is None:
raise WebSocketClosedError()
if isinstance(message, dict):
message = tornado.escape.json_encode(message)
self.ws_connection.write_message(message, binary=binary)
def select_subprotocol(self, subprotocols):
"""Invoked when a new WebSocket requests specific subprotocols.
``subprotocols`` is a list of strings identifying the
subprotocols proposed by the client. This method may be
overridden to return one of those strings to select it, or
``None`` to not select a subprotocol. Failure to select a
subprotocol does not automatically abort the connection,
although clients may close the connection if none of their
proposed subprotocols was selected.
"""
return None
def open(self):
"""Invoked when a new WebSocket is opened.
The arguments to `open` are extracted from the `tornado.web.URLSpec`
regular expression, just like the arguments to
`tornado.web.RequestHandler.get`.
"""
pass
def on_message(self, message):
"""Handle incoming messages on the WebSocket
This method must be overridden.
"""
raise NotImplementedError
def ping(self, data):
"""Send ping frame to the remote end."""
if self.ws_connection is None:
raise WebSocketClosedError()
self.ws_connection.write_ping(data)
def on_pong(self, data):
"""Invoked when the response to a ping frame is received."""
pass
def on_close(self):
"""Invoked when the WebSocket is closed."""
pass
def close(self):
"""Closes this Web Socket.
Once the close handshake is successful the socket will be closed.
"""
if self.ws_connection:
self.ws_connection.close()
self.ws_connection = None
def allow_draft76(self):
"""Override to enable support for the older "draft76" protocol.
The draft76 version of the websocket protocol is disabled by
default due to security concerns, but it can be enabled by
overriding this method to return True.
Connections using the draft76 protocol do not support the
``binary=True`` flag to `write_message`.
Support for the draft76 protocol is deprecated and will be
removed in a future version of Tornado.
"""
return False
def set_nodelay(self, value):
"""Set the no-delay flag for this stream.
By default, small messages may be delayed and/or combined to minimize
the number of packets sent. This can sometimes cause 200-500ms delays
due to the interaction between Nagle's algorithm and TCP delayed
ACKs. To reduce this delay (at the expense of possibly increasing
bandwidth usage), call ``self.set_nodelay(True)`` once the websocket
connection is established.
See `.BaseIOStream.set_nodelay` for additional details.
.. versionadded:: 3.1
"""
self.stream.set_nodelay(value)
def get_websocket_scheme(self):
"""Return the url scheme used for this request, either "ws" or "wss".
This is normally decided by HTTPServer, but applications
may wish to override this if they are using an SSL proxy
that does not provide the X-Scheme header as understood
by HTTPServer.
Note that this is only used by the draft76 protocol.
"""
return "wss" if self.request.protocol == "https" else "ws"
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is normally unncecessary thanks to
`tornado.stack_context`.
"""
return self.ws_connection.async_callback(callback, *args, **kwargs)
def _not_supported(self, *args, **kwargs):
raise Exception("Method not supported for Web Sockets")
def on_connection_close(self):
if self.ws_connection:
self.ws_connection.on_connection_close()
self.ws_connection = None
self.on_close()
for method in ["write", "redirect", "set_header", "send_error", "set_cookie",
"set_status", "flush", "finish"]:
setattr(WebSocketHandler, method, WebSocketHandler._not_supported)
class WebSocketProtocol(object):
"""Base class for WebSocket protocol versions.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.stream = handler.stream
self.client_terminated = False
self.server_terminated = False
def async_callback(self, callback, *args, **kwargs):
"""Wrap callbacks with this if they are used on asynchronous requests.
Catches exceptions properly and closes this WebSocket if an exception
is uncaught.
"""
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception:
app_log.error("Uncaught exception in %s",
self.request.path, exc_info=True)
self._abort()
return wrapper
def on_connection_close(self):
self._abort()
def _abort(self):
"""Instantly aborts the WebSocket connection by closing the socket"""
self.client_terminated = True
self.server_terminated = True
self.stream.close() # forcibly tear down the connection
self.close() # let the subclass cleanup
class WebSocketProtocol76(WebSocketProtocol):
"""Implementation of the WebSockets protocol, version hixie-76.
This class provides basic functionality to process WebSockets requests as
specified in
http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76
"""
def __init__(self, handler):
WebSocketProtocol.__init__(self, handler)
self.challenge = None
self._waiting = None
def accept_connection(self):
try:
self._handle_websocket_headers()
except ValueError:
gen_log.debug("Malformed WebSocket request received")
self._abort()
return
scheme = self.handler.get_websocket_scheme()
# draft76 only allows a single subprotocol
subprotocol_header = ''
subprotocol = self.request.headers.get("Sec-WebSocket-Protocol", None)
if subprotocol:
selected = self.handler.select_subprotocol([subprotocol])
if selected:
assert selected == subprotocol
subprotocol_header = "Sec-WebSocket-Protocol: %s\r\n" % selected
# Write the initial headers before attempting to read the challenge.
# This is necessary when using proxies (such as HAProxy), which
# need to see the Upgrade headers before passing through the
# non-HTTP traffic that follows.
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 101 WebSocket Protocol Handshake\r\n"
"Upgrade: WebSocket\r\n"
"Connection: Upgrade\r\n"
"Server: TornadoServer/%(version)s\r\n"
"Sec-WebSocket-Origin: %(origin)s\r\n"
"Sec-WebSocket-Location: %(scheme)s://%(host)s%(uri)s\r\n"
"%(subprotocol)s"
"\r\n" % (dict(
version=tornado.version,
origin=self.request.headers["Origin"],
scheme=scheme,
host=self.request.host,
uri=self.request.uri,
subprotocol=subprotocol_header))))
self.stream.read_bytes(8, self._handle_challenge)
def challenge_response(self, challenge):
"""Generates the challenge response that's needed in the handshake
The challenge parameter should be the raw bytes as sent from the
client.
"""
key_1 = self.request.headers.get("Sec-Websocket-Key1")
key_2 = self.request.headers.get("Sec-Websocket-Key2")
try:
part_1 = self._calculate_part(key_1)
part_2 = self._calculate_part(key_2)
except ValueError:
raise ValueError("Invalid Keys/Challenge")
return self._generate_challenge_response(part_1, part_2, challenge)
def _handle_challenge(self, challenge):
try:
challenge_response = self.challenge_response(challenge)
except ValueError:
gen_log.debug("Malformed key data in WebSocket request")
self._abort()
return
self._write_response(challenge_response)
def _write_response(self, challenge):
self.stream.write(challenge)
self.async_callback(self.handler.open)(*self.handler.open_args, **self.handler.open_kwargs)
self._receive_message()
def _handle_websocket_headers(self):
"""Verifies all invariant- and required headers
If a header is missing or have an incorrect value ValueError will be
raised
"""
fields = ("Origin", "Host", "Sec-Websocket-Key1",
"Sec-Websocket-Key2")
if not all(map(lambda f: self.request.headers.get(f), fields)):
raise ValueError("Missing/Invalid WebSocket headers")
def _calculate_part(self, key):
"""Processes the key headers and calculates their key value.
Raises ValueError when feed invalid key."""
# pyflakes complains about variable reuse if both of these lines use 'c'
number = int(''.join(c for c in key if c.isdigit()))
spaces = len([c2 for c2 in key if c2.isspace()])
try:
key_number = number // spaces
except (ValueError, ZeroDivisionError):
raise ValueError
return struct.pack(">I", key_number)
def _generate_challenge_response(self, part_1, part_2, part_3):
m = hashlib.md5()
m.update(part_1)
m.update(part_2)
m.update(part_3)
return m.digest()
def _receive_message(self):
self.stream.read_bytes(1, self._on_frame_type)
def _on_frame_type(self, byte):
frame_type = ord(byte)
if frame_type == 0x00:
self.stream.read_until(b"\xff", self._on_end_delimiter)
elif frame_type == 0xff:
self.stream.read_bytes(1, self._on_length_indicator)
else:
self._abort()
def _on_end_delimiter(self, frame):
if not self.client_terminated:
self.async_callback(self.handler.on_message)(
frame[:-1].decode("utf-8", "replace"))
if not self.client_terminated:
self._receive_message()
def _on_length_indicator(self, byte):
if ord(byte) != 0x00:
self._abort()
return
self.client_terminated = True
self.close()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
if binary:
raise ValueError(
"Binary messages not supported by this version of websockets")
if isinstance(message, unicode_type):
message = message.encode("utf-8")
assert isinstance(message, bytes_type)
self.stream.write(b"\x00" + message + b"\xff")
def write_ping(self, data):
"""Send ping frame."""
raise ValueError("Ping messages not supported by this version of websockets")
def close(self):
"""Closes the WebSocket connection."""
if not self.server_terminated:
if not self.stream.closed():
self.stream.write("\xff\x00")
self.server_terminated = True
if self.client_terminated:
if self._waiting is not None:
self.stream.io_loop.remove_timeout(self._waiting)
self._waiting = None
self.stream.close()
elif self._waiting is None:
self._waiting = self.stream.io_loop.add_timeout(
time.time() + 5, self._abort)
class WebSocketProtocol13(WebSocketProtocol):
"""Implementation of the WebSocket protocol from RFC 6455.
This class supports versions 7 and 8 of the protocol in addition to the
final version 13.
"""
def __init__(self, handler, mask_outgoing=False):
WebSocketProtocol.__init__(self, handler)
self.mask_outgoing = mask_outgoing
self._final_frame = False
self._frame_opcode = None
self._masked_frame = None
self._frame_mask = None
self._frame_length = None
self._fragmented_message_buffer = None
self._fragmented_message_opcode = None
self._waiting = None
def accept_connection(self):
try:
self._handle_websocket_headers()
self._accept_connection()
except ValueError:
gen_log.debug("Malformed WebSocket request received", exc_info=True)
self._abort()
return
def _handle_websocket_headers(self):
"""Verifies all invariant- and required headers
If a header is missing or have an incorrect value ValueError will be
raised
"""
fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version")
if not all(map(lambda f: self.request.headers.get(f), fields)):
raise ValueError("Missing/Invalid WebSocket headers")
@staticmethod
def compute_accept_value(key):
"""Computes the value for the Sec-WebSocket-Accept header,
given the value for Sec-WebSocket-Key.
"""
sha1 = hashlib.sha1()
sha1.update(utf8(key))
sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11") # Magic value
return native_str(base64.b64encode(sha1.digest()))
def _challenge_response(self):
return WebSocketProtocol13.compute_accept_value(
self.request.headers.get("Sec-Websocket-Key"))
def _accept_connection(self):
subprotocol_header = ''
subprotocols = self.request.headers.get("Sec-WebSocket-Protocol", '')
subprotocols = [s.strip() for s in subprotocols.split(',')]
if subprotocols:
selected = self.handler.select_subprotocol(subprotocols)
if selected:
assert selected in subprotocols
subprotocol_header = "Sec-WebSocket-Protocol: %s\r\n" % selected
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 101 Switching Protocols\r\n"
"Upgrade: websocket\r\n"
"Connection: Upgrade\r\n"
"Sec-WebSocket-Accept: %s\r\n"
"%s"
"\r\n" % (self._challenge_response(), subprotocol_header)))
self.async_callback(self.handler.open)(*self.handler.open_args, **self.handler.open_kwargs)
self._receive_frame()
def _write_frame(self, fin, opcode, data):
if fin:
finbit = 0x80
else:
finbit = 0
frame = struct.pack("B", finbit | opcode)
l = len(data)
if self.mask_outgoing:
mask_bit = 0x80
else:
mask_bit = 0
if l < 126:
frame += struct.pack("B", l | mask_bit)
elif l <= 0xFFFF:
frame += struct.pack("!BH", 126 | mask_bit, l)
else:
frame += struct.pack("!BQ", 127 | mask_bit, l)
if self.mask_outgoing:
mask = os.urandom(4)
data = mask + _websocket_mask(mask, data)
frame += data
self.stream.write(frame)
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
if binary:
opcode = 0x2
else:
opcode = 0x1
message = tornado.escape.utf8(message)
assert isinstance(message, bytes_type)
try:
self._write_frame(True, opcode, message)
except StreamClosedError:
self._abort()
def write_ping(self, data):
"""Send ping frame."""
assert isinstance(data, bytes_type)
self._write_frame(True, 0x9, data)
def _receive_frame(self):
try:
self.stream.read_bytes(2, self._on_frame_start)
except StreamClosedError:
self._abort()
def _on_frame_start(self, data):
header, payloadlen = struct.unpack("BB", data)
self._final_frame = header & 0x80
reserved_bits = header & 0x70
self._frame_opcode = header & 0xf
self._frame_opcode_is_control = self._frame_opcode & 0x8
if reserved_bits:
# client is using as-yet-undefined extensions; abort
self._abort()
return
self._masked_frame = bool(payloadlen & 0x80)
payloadlen = payloadlen & 0x7f
if self._frame_opcode_is_control and payloadlen >= 126:
# control frames must have payload < 126
self._abort()
return
try:
if payloadlen < 126:
self._frame_length = payloadlen
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
elif payloadlen == 126:
self.stream.read_bytes(2, self._on_frame_length_16)
elif payloadlen == 127:
self.stream.read_bytes(8, self._on_frame_length_64)
except StreamClosedError:
self._abort()
def _on_frame_length_16(self, data):
self._frame_length = struct.unpack("!H", data)[0]
try:
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
except StreamClosedError:
self._abort()
def _on_frame_length_64(self, data):
self._frame_length = struct.unpack("!Q", data)[0]
try:
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
except StreamClosedError:
self._abort()
def _on_masking_key(self, data):
self._frame_mask = data
try:
self.stream.read_bytes(self._frame_length, self._on_masked_frame_data)
except StreamClosedError:
self._abort()
def _on_masked_frame_data(self, data):
self._on_frame_data(_websocket_mask(self._frame_mask, data))
def _on_frame_data(self, data):
if self._frame_opcode_is_control:
# control frames may be interleaved with a series of fragmented
# data frames, so control frames must not interact with
# self._fragmented_*
if not self._final_frame:
# control frames must not be fragmented
self._abort()
return
opcode = self._frame_opcode
elif self._frame_opcode == 0: # continuation frame
if self._fragmented_message_buffer is None:
# nothing to continue
self._abort()
return
self._fragmented_message_buffer += data
if self._final_frame:
opcode = self._fragmented_message_opcode
data = self._fragmented_message_buffer
self._fragmented_message_buffer = None
else: # start of new data message
if self._fragmented_message_buffer is not None:
# can't start new message until the old one is finished
self._abort()
return
if self._final_frame:
opcode = self._frame_opcode
else:
self._fragmented_message_opcode = self._frame_opcode
self._fragmented_message_buffer = data
if self._final_frame:
self._handle_message(opcode, data)
if not self.client_terminated:
self._receive_frame()
def _handle_message(self, opcode, data):
if self.client_terminated:
return
if opcode == 0x1:
# UTF-8 data
try:
decoded = data.decode("utf-8")
except UnicodeDecodeError:
self._abort()
return
self.async_callback(self.handler.on_message)(decoded)
elif opcode == 0x2:
# Binary data
self.async_callback(self.handler.on_message)(data)
elif opcode == 0x8:
# Close
self.client_terminated = True
self.close()
elif opcode == 0x9:
# Ping
self._write_frame(True, 0xA, data)
elif opcode == 0xA:
# Pong
self.async_callback(self.handler.on_pong)(data)
else:
self._abort()
def close(self):
"""Closes the WebSocket connection."""
if not self.server_terminated:
if not self.stream.closed():
self._write_frame(True, 0x8, b"")
self.server_terminated = True
if self.client_terminated:
if self._waiting is not None:
self.stream.io_loop.remove_timeout(self._waiting)
self._waiting = None
self.stream.close()
elif self._waiting is None:
# Give the client a few seconds to complete a clean shutdown,
# otherwise just close the connection.
self._waiting = self.stream.io_loop.add_timeout(
self.stream.io_loop.time() + 5, self._abort)
class WebSocketClientConnection(simple_httpclient._HTTPConnection):
"""WebSocket client connection.
This class should not be instantiated directly; use the
`websocket_connect` function instead.
"""
def __init__(self, io_loop, request):
self.connect_future = TracebackFuture()
self.read_future = None
self.read_queue = collections.deque()
self.key = base64.b64encode(os.urandom(16))
scheme, sep, rest = request.url.partition(':')
scheme = {'ws': 'http', 'wss': 'https'}[scheme]
request.url = scheme + sep + rest
request.headers.update({
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': self.key,
'Sec-WebSocket-Version': '13',
})
self.resolver = Resolver(io_loop=io_loop)
super(WebSocketClientConnection, self).__init__(
io_loop, None, request, lambda: None, self._on_http_response,
104857600, self.resolver)
def close(self):
"""Closes the websocket connection.
.. versionadded:: 3.2
"""
if self.protocol is not None:
self.protocol.close()
self.protocol = None
def _on_close(self):
self.on_message(None)
self.resolver.close()
super(WebSocketClientConnection, self)._on_close()
def _on_http_response(self, response):
if not self.connect_future.done():
if response.error:
self.connect_future.set_exception(response.error)
else:
self.connect_future.set_exception(WebSocketError(
"Non-websocket response"))
def _handle_1xx(self, code):
assert code == 101
assert self.headers['Upgrade'].lower() == 'websocket'
assert self.headers['Connection'].lower() == 'upgrade'
accept = WebSocketProtocol13.compute_accept_value(self.key)
assert self.headers['Sec-Websocket-Accept'] == accept
self.protocol = WebSocketProtocol13(self, mask_outgoing=True)
self.protocol._receive_frame()
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
self.connect_future.set_result(self)
def write_message(self, message, binary=False):
"""Sends a message to the WebSocket server."""
self.protocol.write_message(message, binary)
def read_message(self, callback=None):
"""Reads a message from the WebSocket server.
Returns a future whose result is the message, or None
if the connection is closed. If a callback argument
is given it will be called with the future when it is
ready.
"""
assert self.read_future is None
future = TracebackFuture()
if self.read_queue:
future.set_result(self.read_queue.popleft())
else:
self.read_future = future
if callback is not None:
self.io_loop.add_future(future, callback)
return future
def on_message(self, message):
if self.read_future is not None:
self.read_future.set_result(message)
self.read_future = None
else:
self.read_queue.append(message)
def on_pong(self, data):
pass
def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None):
"""Client-side websocket support.
Takes a url and returns a Future whose result is a
`WebSocketClientConnection`.
.. versionchanged:: 3.2
Also accepts ``HTTPRequest`` objects in place of urls.
"""
if io_loop is None:
io_loop = IOLoop.current()
if isinstance(url, httpclient.HTTPRequest):
assert connect_timeout is None
request = url
# Copy and convert the headers dict/object (see comments in
# AsyncHTTPClient.fetch)
request.headers = httputil.HTTPHeaders(request.headers)
else:
request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
request = httpclient._RequestProxy(
request, httpclient.HTTPRequest._DEFAULTS)
conn = WebSocketClientConnection(io_loop, request)
if callback is not None:
io_loop.add_future(conn.connect_future, callback)
return conn.connect_future
| ./CrossVul/dataset_final_sorted/CWE-203/py/good_2410_4 |
crossvul-python_data_bad_2410_4 | """Implementation of the WebSocket protocol.
`WebSockets <http://dev.w3.org/html5/websockets/>`_ allow for bidirectional
communication between the browser and server.
.. warning::
The WebSocket protocol was recently finalized as `RFC 6455
<http://tools.ietf.org/html/rfc6455>`_ and is not yet supported in
all browsers. Refer to http://caniuse.com/websockets for details
on compatibility. In addition, during development the protocol
went through several incompatible versions, and some browsers only
support older versions. By default this module only supports the
latest version of the protocol, but optional support for an older
version (known as "draft 76" or "hixie-76") can be enabled by
overriding `WebSocketHandler.allow_draft76` (see that method's
documentation for caveats).
"""
from __future__ import absolute_import, division, print_function, with_statement
# Author: Jacob Kristhammar, 2010
import array
import base64
import collections
import functools
import hashlib
import os
import struct
import time
import tornado.escape
import tornado.web
from tornado.concurrent import TracebackFuture
from tornado.escape import utf8, native_str
from tornado import httpclient, httputil
from tornado.ioloop import IOLoop
from tornado.iostream import StreamClosedError
from tornado.log import gen_log, app_log
from tornado.netutil import Resolver
from tornado import simple_httpclient
from tornado.util import bytes_type, unicode_type
try:
xrange # py2
except NameError:
xrange = range # py3
class WebSocketError(Exception):
pass
class WebSocketClosedError(WebSocketError):
"""Raised by operations on a closed connection.
.. versionadded:: 3.2
"""
pass
class WebSocketHandler(tornado.web.RequestHandler):
"""Subclass this class to create a basic WebSocket handler.
Override `on_message` to handle incoming messages, and use
`write_message` to send messages to the client. You can also
override `open` and `on_close` to handle opened and closed
connections.
See http://dev.w3.org/html5/websockets/ for details on the
JavaScript interface. The protocol is specified at
http://tools.ietf.org/html/rfc6455.
Here is an example WebSocket handler that echos back all received messages
back to the client::
class EchoWebSocket(websocket.WebSocketHandler):
def open(self):
print "WebSocket opened"
def on_message(self, message):
self.write_message(u"You said: " + message)
def on_close(self):
print "WebSocket closed"
WebSockets are not standard HTTP connections. The "handshake" is
HTTP, but after the handshake, the protocol is
message-based. Consequently, most of the Tornado HTTP facilities
are not available in handlers of this type. The only communication
methods available to you are `write_message()`, `ping()`, and
`close()`. Likewise, your request handler class should implement
`open()` method rather than ``get()`` or ``post()``.
If you map the handler above to ``/websocket`` in your application, you can
invoke it in JavaScript with::
var ws = new WebSocket("ws://localhost:8888/websocket");
ws.onopen = function() {
ws.send("Hello, world");
};
ws.onmessage = function (evt) {
alert(evt.data);
};
This script pops up an alert box that says "You said: Hello, world".
"""
def __init__(self, application, request, **kwargs):
tornado.web.RequestHandler.__init__(self, application, request,
**kwargs)
self.stream = request.connection.stream
self.ws_connection = None
def _execute(self, transforms, *args, **kwargs):
self.open_args = args
self.open_kwargs = kwargs
# Websocket only supports GET method
if self.request.method != 'GET':
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 405 Method Not Allowed\r\n\r\n"
))
self.stream.close()
return
# Upgrade header should be present and should be equal to WebSocket
if self.request.headers.get("Upgrade", "").lower() != 'websocket':
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 400 Bad Request\r\n\r\n"
"Can \"Upgrade\" only to \"WebSocket\"."
))
self.stream.close()
return
# Connection header should be upgrade. Some proxy servers/load balancers
# might mess with it.
headers = self.request.headers
connection = map(lambda s: s.strip().lower(), headers.get("Connection", "").split(","))
if 'upgrade' not in connection:
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 400 Bad Request\r\n\r\n"
"\"Connection\" must be \"Upgrade\"."
))
self.stream.close()
return
# The difference between version 8 and 13 is that in 8 the
# client sends a "Sec-Websocket-Origin" header and in 13 it's
# simply "Origin".
if self.request.headers.get("Sec-WebSocket-Version") in ("7", "8", "13"):
self.ws_connection = WebSocketProtocol13(self)
self.ws_connection.accept_connection()
elif (self.allow_draft76() and
"Sec-WebSocket-Version" not in self.request.headers):
self.ws_connection = WebSocketProtocol76(self)
self.ws_connection.accept_connection()
else:
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 426 Upgrade Required\r\n"
"Sec-WebSocket-Version: 8\r\n\r\n"))
self.stream.close()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket.
The message may be either a string or a dict (which will be
encoded as json). If the ``binary`` argument is false, the
message will be sent as utf8; in binary mode any byte string
is allowed.
If the connection is already closed, raises `WebSocketClosedError`.
.. versionchanged:: 3.2
`WebSocketClosedError` was added (previously a closed connection
would raise an `AttributeError`)
"""
if self.ws_connection is None:
raise WebSocketClosedError()
if isinstance(message, dict):
message = tornado.escape.json_encode(message)
self.ws_connection.write_message(message, binary=binary)
def select_subprotocol(self, subprotocols):
"""Invoked when a new WebSocket requests specific subprotocols.
``subprotocols`` is a list of strings identifying the
subprotocols proposed by the client. This method may be
overridden to return one of those strings to select it, or
``None`` to not select a subprotocol. Failure to select a
subprotocol does not automatically abort the connection,
although clients may close the connection if none of their
proposed subprotocols was selected.
"""
return None
def open(self):
"""Invoked when a new WebSocket is opened.
The arguments to `open` are extracted from the `tornado.web.URLSpec`
regular expression, just like the arguments to
`tornado.web.RequestHandler.get`.
"""
pass
def on_message(self, message):
"""Handle incoming messages on the WebSocket
This method must be overridden.
"""
raise NotImplementedError
def ping(self, data):
"""Send ping frame to the remote end."""
if self.ws_connection is None:
raise WebSocketClosedError()
self.ws_connection.write_ping(data)
def on_pong(self, data):
"""Invoked when the response to a ping frame is received."""
pass
def on_close(self):
"""Invoked when the WebSocket is closed."""
pass
def close(self):
"""Closes this Web Socket.
Once the close handshake is successful the socket will be closed.
"""
if self.ws_connection:
self.ws_connection.close()
self.ws_connection = None
def allow_draft76(self):
"""Override to enable support for the older "draft76" protocol.
The draft76 version of the websocket protocol is disabled by
default due to security concerns, but it can be enabled by
overriding this method to return True.
Connections using the draft76 protocol do not support the
``binary=True`` flag to `write_message`.
Support for the draft76 protocol is deprecated and will be
removed in a future version of Tornado.
"""
return False
def set_nodelay(self, value):
"""Set the no-delay flag for this stream.
By default, small messages may be delayed and/or combined to minimize
the number of packets sent. This can sometimes cause 200-500ms delays
due to the interaction between Nagle's algorithm and TCP delayed
ACKs. To reduce this delay (at the expense of possibly increasing
bandwidth usage), call ``self.set_nodelay(True)`` once the websocket
connection is established.
See `.BaseIOStream.set_nodelay` for additional details.
.. versionadded:: 3.1
"""
self.stream.set_nodelay(value)
def get_websocket_scheme(self):
"""Return the url scheme used for this request, either "ws" or "wss".
This is normally decided by HTTPServer, but applications
may wish to override this if they are using an SSL proxy
that does not provide the X-Scheme header as understood
by HTTPServer.
Note that this is only used by the draft76 protocol.
"""
return "wss" if self.request.protocol == "https" else "ws"
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is normally unncecessary thanks to
`tornado.stack_context`.
"""
return self.ws_connection.async_callback(callback, *args, **kwargs)
def _not_supported(self, *args, **kwargs):
raise Exception("Method not supported for Web Sockets")
def on_connection_close(self):
if self.ws_connection:
self.ws_connection.on_connection_close()
self.ws_connection = None
self.on_close()
for method in ["write", "redirect", "set_header", "send_error", "set_cookie",
"set_status", "flush", "finish"]:
setattr(WebSocketHandler, method, WebSocketHandler._not_supported)
class WebSocketProtocol(object):
"""Base class for WebSocket protocol versions.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.stream = handler.stream
self.client_terminated = False
self.server_terminated = False
def async_callback(self, callback, *args, **kwargs):
"""Wrap callbacks with this if they are used on asynchronous requests.
Catches exceptions properly and closes this WebSocket if an exception
is uncaught.
"""
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception:
app_log.error("Uncaught exception in %s",
self.request.path, exc_info=True)
self._abort()
return wrapper
def on_connection_close(self):
self._abort()
def _abort(self):
"""Instantly aborts the WebSocket connection by closing the socket"""
self.client_terminated = True
self.server_terminated = True
self.stream.close() # forcibly tear down the connection
self.close() # let the subclass cleanup
class WebSocketProtocol76(WebSocketProtocol):
"""Implementation of the WebSockets protocol, version hixie-76.
This class provides basic functionality to process WebSockets requests as
specified in
http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76
"""
def __init__(self, handler):
WebSocketProtocol.__init__(self, handler)
self.challenge = None
self._waiting = None
def accept_connection(self):
try:
self._handle_websocket_headers()
except ValueError:
gen_log.debug("Malformed WebSocket request received")
self._abort()
return
scheme = self.handler.get_websocket_scheme()
# draft76 only allows a single subprotocol
subprotocol_header = ''
subprotocol = self.request.headers.get("Sec-WebSocket-Protocol", None)
if subprotocol:
selected = self.handler.select_subprotocol([subprotocol])
if selected:
assert selected == subprotocol
subprotocol_header = "Sec-WebSocket-Protocol: %s\r\n" % selected
# Write the initial headers before attempting to read the challenge.
# This is necessary when using proxies (such as HAProxy), which
# need to see the Upgrade headers before passing through the
# non-HTTP traffic that follows.
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 101 WebSocket Protocol Handshake\r\n"
"Upgrade: WebSocket\r\n"
"Connection: Upgrade\r\n"
"Server: TornadoServer/%(version)s\r\n"
"Sec-WebSocket-Origin: %(origin)s\r\n"
"Sec-WebSocket-Location: %(scheme)s://%(host)s%(uri)s\r\n"
"%(subprotocol)s"
"\r\n" % (dict(
version=tornado.version,
origin=self.request.headers["Origin"],
scheme=scheme,
host=self.request.host,
uri=self.request.uri,
subprotocol=subprotocol_header))))
self.stream.read_bytes(8, self._handle_challenge)
def challenge_response(self, challenge):
"""Generates the challenge response that's needed in the handshake
The challenge parameter should be the raw bytes as sent from the
client.
"""
key_1 = self.request.headers.get("Sec-Websocket-Key1")
key_2 = self.request.headers.get("Sec-Websocket-Key2")
try:
part_1 = self._calculate_part(key_1)
part_2 = self._calculate_part(key_2)
except ValueError:
raise ValueError("Invalid Keys/Challenge")
return self._generate_challenge_response(part_1, part_2, challenge)
def _handle_challenge(self, challenge):
try:
challenge_response = self.challenge_response(challenge)
except ValueError:
gen_log.debug("Malformed key data in WebSocket request")
self._abort()
return
self._write_response(challenge_response)
def _write_response(self, challenge):
self.stream.write(challenge)
self.async_callback(self.handler.open)(*self.handler.open_args, **self.handler.open_kwargs)
self._receive_message()
def _handle_websocket_headers(self):
"""Verifies all invariant- and required headers
If a header is missing or have an incorrect value ValueError will be
raised
"""
fields = ("Origin", "Host", "Sec-Websocket-Key1",
"Sec-Websocket-Key2")
if not all(map(lambda f: self.request.headers.get(f), fields)):
raise ValueError("Missing/Invalid WebSocket headers")
def _calculate_part(self, key):
"""Processes the key headers and calculates their key value.
Raises ValueError when feed invalid key."""
# pyflakes complains about variable reuse if both of these lines use 'c'
number = int(''.join(c for c in key if c.isdigit()))
spaces = len([c2 for c2 in key if c2.isspace()])
try:
key_number = number // spaces
except (ValueError, ZeroDivisionError):
raise ValueError
return struct.pack(">I", key_number)
def _generate_challenge_response(self, part_1, part_2, part_3):
m = hashlib.md5()
m.update(part_1)
m.update(part_2)
m.update(part_3)
return m.digest()
def _receive_message(self):
self.stream.read_bytes(1, self._on_frame_type)
def _on_frame_type(self, byte):
frame_type = ord(byte)
if frame_type == 0x00:
self.stream.read_until(b"\xff", self._on_end_delimiter)
elif frame_type == 0xff:
self.stream.read_bytes(1, self._on_length_indicator)
else:
self._abort()
def _on_end_delimiter(self, frame):
if not self.client_terminated:
self.async_callback(self.handler.on_message)(
frame[:-1].decode("utf-8", "replace"))
if not self.client_terminated:
self._receive_message()
def _on_length_indicator(self, byte):
if ord(byte) != 0x00:
self._abort()
return
self.client_terminated = True
self.close()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
if binary:
raise ValueError(
"Binary messages not supported by this version of websockets")
if isinstance(message, unicode_type):
message = message.encode("utf-8")
assert isinstance(message, bytes_type)
self.stream.write(b"\x00" + message + b"\xff")
def write_ping(self, data):
"""Send ping frame."""
raise ValueError("Ping messages not supported by this version of websockets")
def close(self):
"""Closes the WebSocket connection."""
if not self.server_terminated:
if not self.stream.closed():
self.stream.write("\xff\x00")
self.server_terminated = True
if self.client_terminated:
if self._waiting is not None:
self.stream.io_loop.remove_timeout(self._waiting)
self._waiting = None
self.stream.close()
elif self._waiting is None:
self._waiting = self.stream.io_loop.add_timeout(
time.time() + 5, self._abort)
class WebSocketProtocol13(WebSocketProtocol):
"""Implementation of the WebSocket protocol from RFC 6455.
This class supports versions 7 and 8 of the protocol in addition to the
final version 13.
"""
def __init__(self, handler, mask_outgoing=False):
WebSocketProtocol.__init__(self, handler)
self.mask_outgoing = mask_outgoing
self._final_frame = False
self._frame_opcode = None
self._masked_frame = None
self._frame_mask = None
self._frame_length = None
self._fragmented_message_buffer = None
self._fragmented_message_opcode = None
self._waiting = None
def accept_connection(self):
try:
self._handle_websocket_headers()
self._accept_connection()
except ValueError:
gen_log.debug("Malformed WebSocket request received", exc_info=True)
self._abort()
return
def _handle_websocket_headers(self):
"""Verifies all invariant- and required headers
If a header is missing or have an incorrect value ValueError will be
raised
"""
fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version")
if not all(map(lambda f: self.request.headers.get(f), fields)):
raise ValueError("Missing/Invalid WebSocket headers")
@staticmethod
def compute_accept_value(key):
"""Computes the value for the Sec-WebSocket-Accept header,
given the value for Sec-WebSocket-Key.
"""
sha1 = hashlib.sha1()
sha1.update(utf8(key))
sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11") # Magic value
return native_str(base64.b64encode(sha1.digest()))
def _challenge_response(self):
return WebSocketProtocol13.compute_accept_value(
self.request.headers.get("Sec-Websocket-Key"))
def _accept_connection(self):
subprotocol_header = ''
subprotocols = self.request.headers.get("Sec-WebSocket-Protocol", '')
subprotocols = [s.strip() for s in subprotocols.split(',')]
if subprotocols:
selected = self.handler.select_subprotocol(subprotocols)
if selected:
assert selected in subprotocols
subprotocol_header = "Sec-WebSocket-Protocol: %s\r\n" % selected
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 101 Switching Protocols\r\n"
"Upgrade: websocket\r\n"
"Connection: Upgrade\r\n"
"Sec-WebSocket-Accept: %s\r\n"
"%s"
"\r\n" % (self._challenge_response(), subprotocol_header)))
self.async_callback(self.handler.open)(*self.handler.open_args, **self.handler.open_kwargs)
self._receive_frame()
def _write_frame(self, fin, opcode, data):
if fin:
finbit = 0x80
else:
finbit = 0
frame = struct.pack("B", finbit | opcode)
l = len(data)
if self.mask_outgoing:
mask_bit = 0x80
else:
mask_bit = 0
if l < 126:
frame += struct.pack("B", l | mask_bit)
elif l <= 0xFFFF:
frame += struct.pack("!BH", 126 | mask_bit, l)
else:
frame += struct.pack("!BQ", 127 | mask_bit, l)
if self.mask_outgoing:
mask = os.urandom(4)
data = mask + _websocket_mask(mask, data)
frame += data
self.stream.write(frame)
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
if binary:
opcode = 0x2
else:
opcode = 0x1
message = tornado.escape.utf8(message)
assert isinstance(message, bytes_type)
try:
self._write_frame(True, opcode, message)
except StreamClosedError:
self._abort()
def write_ping(self, data):
"""Send ping frame."""
assert isinstance(data, bytes_type)
self._write_frame(True, 0x9, data)
def _receive_frame(self):
try:
self.stream.read_bytes(2, self._on_frame_start)
except StreamClosedError:
self._abort()
def _on_frame_start(self, data):
header, payloadlen = struct.unpack("BB", data)
self._final_frame = header & 0x80
reserved_bits = header & 0x70
self._frame_opcode = header & 0xf
self._frame_opcode_is_control = self._frame_opcode & 0x8
if reserved_bits:
# client is using as-yet-undefined extensions; abort
self._abort()
return
self._masked_frame = bool(payloadlen & 0x80)
payloadlen = payloadlen & 0x7f
if self._frame_opcode_is_control and payloadlen >= 126:
# control frames must have payload < 126
self._abort()
return
try:
if payloadlen < 126:
self._frame_length = payloadlen
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
elif payloadlen == 126:
self.stream.read_bytes(2, self._on_frame_length_16)
elif payloadlen == 127:
self.stream.read_bytes(8, self._on_frame_length_64)
except StreamClosedError:
self._abort()
def _on_frame_length_16(self, data):
self._frame_length = struct.unpack("!H", data)[0]
try:
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
except StreamClosedError:
self._abort()
def _on_frame_length_64(self, data):
self._frame_length = struct.unpack("!Q", data)[0]
try:
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
except StreamClosedError:
self._abort()
def _on_masking_key(self, data):
self._frame_mask = data
try:
self.stream.read_bytes(self._frame_length, self._on_masked_frame_data)
except StreamClosedError:
self._abort()
def _on_masked_frame_data(self, data):
self._on_frame_data(_websocket_mask(self._frame_mask, data))
def _on_frame_data(self, data):
if self._frame_opcode_is_control:
# control frames may be interleaved with a series of fragmented
# data frames, so control frames must not interact with
# self._fragmented_*
if not self._final_frame:
# control frames must not be fragmented
self._abort()
return
opcode = self._frame_opcode
elif self._frame_opcode == 0: # continuation frame
if self._fragmented_message_buffer is None:
# nothing to continue
self._abort()
return
self._fragmented_message_buffer += data
if self._final_frame:
opcode = self._fragmented_message_opcode
data = self._fragmented_message_buffer
self._fragmented_message_buffer = None
else: # start of new data message
if self._fragmented_message_buffer is not None:
# can't start new message until the old one is finished
self._abort()
return
if self._final_frame:
opcode = self._frame_opcode
else:
self._fragmented_message_opcode = self._frame_opcode
self._fragmented_message_buffer = data
if self._final_frame:
self._handle_message(opcode, data)
if not self.client_terminated:
self._receive_frame()
def _handle_message(self, opcode, data):
if self.client_terminated:
return
if opcode == 0x1:
# UTF-8 data
try:
decoded = data.decode("utf-8")
except UnicodeDecodeError:
self._abort()
return
self.async_callback(self.handler.on_message)(decoded)
elif opcode == 0x2:
# Binary data
self.async_callback(self.handler.on_message)(data)
elif opcode == 0x8:
# Close
self.client_terminated = True
self.close()
elif opcode == 0x9:
# Ping
self._write_frame(True, 0xA, data)
elif opcode == 0xA:
# Pong
self.async_callback(self.handler.on_pong)(data)
else:
self._abort()
def close(self):
"""Closes the WebSocket connection."""
if not self.server_terminated:
if not self.stream.closed():
self._write_frame(True, 0x8, b"")
self.server_terminated = True
if self.client_terminated:
if self._waiting is not None:
self.stream.io_loop.remove_timeout(self._waiting)
self._waiting = None
self.stream.close()
elif self._waiting is None:
# Give the client a few seconds to complete a clean shutdown,
# otherwise just close the connection.
self._waiting = self.stream.io_loop.add_timeout(
self.stream.io_loop.time() + 5, self._abort)
class WebSocketClientConnection(simple_httpclient._HTTPConnection):
"""WebSocket client connection.
This class should not be instantiated directly; use the
`websocket_connect` function instead.
"""
def __init__(self, io_loop, request):
self.connect_future = TracebackFuture()
self.read_future = None
self.read_queue = collections.deque()
self.key = base64.b64encode(os.urandom(16))
scheme, sep, rest = request.url.partition(':')
scheme = {'ws': 'http', 'wss': 'https'}[scheme]
request.url = scheme + sep + rest
request.headers.update({
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': self.key,
'Sec-WebSocket-Version': '13',
})
self.resolver = Resolver(io_loop=io_loop)
super(WebSocketClientConnection, self).__init__(
io_loop, None, request, lambda: None, self._on_http_response,
104857600, self.resolver)
def close(self):
"""Closes the websocket connection.
.. versionadded:: 3.2
"""
if self.protocol is not None:
self.protocol.close()
self.protocol = None
def _on_close(self):
self.on_message(None)
self.resolver.close()
super(WebSocketClientConnection, self)._on_close()
def _on_http_response(self, response):
if not self.connect_future.done():
if response.error:
self.connect_future.set_exception(response.error)
else:
self.connect_future.set_exception(WebSocketError(
"Non-websocket response"))
def _handle_1xx(self, code):
assert code == 101
assert self.headers['Upgrade'].lower() == 'websocket'
assert self.headers['Connection'].lower() == 'upgrade'
accept = WebSocketProtocol13.compute_accept_value(self.key)
assert self.headers['Sec-Websocket-Accept'] == accept
self.protocol = WebSocketProtocol13(self, mask_outgoing=True)
self.protocol._receive_frame()
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
self.connect_future.set_result(self)
def write_message(self, message, binary=False):
"""Sends a message to the WebSocket server."""
self.protocol.write_message(message, binary)
def read_message(self, callback=None):
"""Reads a message from the WebSocket server.
Returns a future whose result is the message, or None
if the connection is closed. If a callback argument
is given it will be called with the future when it is
ready.
"""
assert self.read_future is None
future = TracebackFuture()
if self.read_queue:
future.set_result(self.read_queue.popleft())
else:
self.read_future = future
if callback is not None:
self.io_loop.add_future(future, callback)
return future
def on_message(self, message):
if self.read_future is not None:
self.read_future.set_result(message)
self.read_future = None
else:
self.read_queue.append(message)
def on_pong(self, data):
pass
def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None):
"""Client-side websocket support.
Takes a url and returns a Future whose result is a
`WebSocketClientConnection`.
.. versionchanged:: 3.2
Also accepts ``HTTPRequest`` objects in place of urls.
"""
if io_loop is None:
io_loop = IOLoop.current()
if isinstance(url, httpclient.HTTPRequest):
assert connect_timeout is None
request = url
# Copy and convert the headers dict/object (see comments in
# AsyncHTTPClient.fetch)
request.headers = httputil.HTTPHeaders(request.headers)
else:
request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
request = httpclient._RequestProxy(
request, httpclient.HTTPRequest._DEFAULTS)
conn = WebSocketClientConnection(io_loop, request)
if callback is not None:
io_loop.add_future(conn.connect_future, callback)
return conn.connect_future
def _websocket_mask_python(mask, data):
"""Websocket masking function.
`mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length.
Returns a `bytes` object of the same length as `data` with the mask applied
as specified in section 5.3 of RFC 6455.
This pure-python implementation may be replaced by an optimized version when available.
"""
mask = array.array("B", mask)
unmasked = array.array("B", data)
for i in xrange(len(data)):
unmasked[i] = unmasked[i] ^ mask[i % 4]
if hasattr(unmasked, 'tobytes'):
# tostring was deprecated in py32. It hasn't been removed,
# but since we turn on deprecation warnings in our tests
# we need to use the right one.
return unmasked.tobytes()
else:
return unmasked.tostring()
if (os.environ.get('TORNADO_NO_EXTENSION') or
os.environ.get('TORNADO_EXTENSION') == '0'):
# These environment variables exist to make it easier to do performance
# comparisons; they are not guaranteed to remain supported in the future.
_websocket_mask = _websocket_mask_python
else:
try:
from tornado.speedups import websocket_mask as _websocket_mask
except ImportError:
if os.environ.get('TORNADO_EXTENSION') == '1':
raise
_websocket_mask = _websocket_mask_python
| ./CrossVul/dataset_final_sorted/CWE-203/py/bad_2410_4 |
crossvul-python_data_bad_4903_1 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-21/py/bad_4903_1 |
crossvul-python_data_bad_4903_2 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-21/py/bad_4903_2 |
crossvul-python_data_good_4903_1 | # -*- coding: utf-8 -*-
#
# This file is part of Radicale Server - Calendar Server
# Copyright © 2012-2015 Guillaume Ayoub
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
"""
Filesystem storage backend.
"""
import codecs
import os
import posixpath
import json
import time
import sys
from contextlib import contextmanager
from .. import config, ical, pathutils
FOLDER = os.path.expanduser(config.get("storage", "filesystem_folder"))
FILESYSTEM_ENCODING = sys.getfilesystemencoding()
try:
from dulwich.repo import Repo
GIT_REPOSITORY = Repo(FOLDER)
except:
GIT_REPOSITORY = None
# This function overrides the builtin ``open`` function for this module
# pylint: disable=W0622
@contextmanager
def open(path, mode="r"):
"""Open a file at ``path`` with encoding set in the configuration."""
# On enter
abs_path = os.path.join(FOLDER, path.replace("/", os.sep))
with codecs.open(abs_path, mode, config.get("encoding", "stock")) as fd:
yield fd
# On exit
if GIT_REPOSITORY and mode == "w":
path = os.path.relpath(abs_path, FOLDER)
GIT_REPOSITORY.stage([path])
committer = config.get("git", "committer")
GIT_REPOSITORY.do_commit(
path.encode("utf-8"), committer=committer.encode("utf-8"))
# pylint: enable=W0622
class Collection(ical.Collection):
"""Collection stored in a flat ical file."""
@property
def _filesystem_path(self):
"""Absolute path of the file at local ``path``."""
return pathutils.path_to_filesystem(self.path, FOLDER)
@property
def _props_path(self):
"""Absolute path of the file storing the collection properties."""
return self._filesystem_path + ".props"
def _create_dirs(self):
"""Create folder storing the collection if absent."""
if not os.path.exists(os.path.dirname(self._filesystem_path)):
os.makedirs(os.path.dirname(self._filesystem_path))
def save(self, text):
self._create_dirs()
with open(self._filesystem_path, "w") as fd:
fd.write(text)
def delete(self):
os.remove(self._filesystem_path)
os.remove(self._props_path)
@property
def text(self):
try:
with open(self._filesystem_path) as fd:
return fd.read()
except IOError:
return ""
@classmethod
def children(cls, path):
filesystem_path = pathutils.path_to_filesystem(path, FOLDER)
_, directories, files = next(os.walk(filesystem_path))
for filename in directories + files:
rel_filename = posixpath.join(path, filename)
if cls.is_node(rel_filename) or cls.is_leaf(rel_filename):
yield cls(rel_filename)
@classmethod
def is_node(cls, path):
filesystem_path = pathutils.path_to_filesystem(path, FOLDER)
return os.path.isdir(filesystem_path)
@classmethod
def is_leaf(cls, path):
filesystem_path = pathutils.path_to_filesystem(path, FOLDER)
return (os.path.isfile(filesystem_path) and not
filesystem_path.endswith(".props"))
@property
def last_modified(self):
modification_time = \
time.gmtime(os.path.getmtime(self._filesystem_path))
return time.strftime("%a, %d %b %Y %H:%M:%S +0000", modification_time)
@property
@contextmanager
def props(self):
# On enter
properties = {}
if os.path.exists(self._props_path):
with open(self._props_path) as prop_file:
properties.update(json.load(prop_file))
old_properties = properties.copy()
yield properties
# On exit
self._create_dirs()
if old_properties != properties:
with open(self._props_path, "w") as prop_file:
json.dump(properties, prop_file)
| ./CrossVul/dataset_final_sorted/CWE-21/py/good_4903_1 |
crossvul-python_data_good_4903_0 | # -*- coding: utf-8 -*-
#
# This file is part of Radicale Server - Calendar Server
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
"""
Helper functions for working with paths
"""
import os
import posixpath
from . import log
def sanitize_path(path):
"""Make absolute (with leading slash) to prevent access to other data.
Preserves an potential trailing slash."""
trailing_slash = "/" if path.endswith("/") else ""
path = posixpath.normpath(path)
new_path = "/"
for part in path.split("/"):
if not part or part in (".", ".."):
continue
new_path = posixpath.join(new_path, part)
trailing_slash = "" if new_path.endswith("/") else trailing_slash
return new_path + trailing_slash
def is_safe_filesystem_path_component(path):
"""Checks if path is a single component of a local filesystem path
and is safe to join"""
if not path:
return False
drive, _ = os.path.splitdrive(path)
if drive:
return False
head, _ = os.path.split(path)
if head:
return False
if path in (os.curdir, os.pardir):
return False
return True
def path_to_filesystem(path, base_folder):
"""Converts path to a local filesystem path relative to base_folder
in a secure manner or raises ValueError."""
sane_path = sanitize_path(path).strip("/")
safe_path = base_folder
if not sane_path:
return safe_path
for part in sane_path.split("/"):
if not is_safe_filesystem_path_component(part):
log.LOGGER.debug("Can't translate path safely to filesystem: %s",
path)
raise ValueError("Unsafe path")
safe_path = os.path.join(safe_path, part)
return safe_path | ./CrossVul/dataset_final_sorted/CWE-21/py/good_4903_0 |
crossvul-python_data_bad_4903_0 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-21/py/bad_4903_0 |
crossvul-python_data_good_4903_2 | # -*- coding: utf-8 -*-
#
# This file is part of Radicale Server - Calendar Server
# Copyright © 2014 Jean-Marc Martins
# Copyright © 2014-2015 Guillaume Ayoub
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
"""
Multi files per calendar filesystem storage backend.
"""
import os
import shutil
import time
import sys
from . import filesystem
from .. import ical
from .. import log
from .. import pathutils
class Collection(filesystem.Collection):
"""Collection stored in several files per calendar."""
def _create_dirs(self):
if not os.path.exists(self._filesystem_path):
os.makedirs(self._filesystem_path)
@property
def headers(self):
return (
ical.Header("PRODID:-//Radicale//NONSGML Radicale Server//EN"),
ical.Header("VERSION:%s" % self.version))
def write(self):
self._create_dirs()
for component in self.components:
text = ical.serialize(
self.tag, self.headers, [component] + self.timezones)
name = (
component.name if sys.version_info[0] >= 3 else
component.name.encode(filesystem.FILESYSTEM_ENCODING))
filesystem_path = os.path.join(self._filesystem_path, name)
with filesystem.open(filesystem_path, "w") as fd:
fd.write(text)
def delete(self):
shutil.rmtree(self._filesystem_path)
os.remove(self._props_path)
def remove(self, name):
filesystem_path = os.path.join(self._filesystem_path, name)
if os.path.exists(filesystem_path):
os.remove(filesystem_path)
@property
def text(self):
components = (
ical.Timezone, ical.Event, ical.Todo, ical.Journal, ical.Card)
items = set()
try:
filenames = os.listdir(self._filesystem_path)
except (OSError, IOError) as e:
log.LOGGER.info('Error while reading collection %r: %r'
% (self._filesystem_path, e))
return ""
for filename in filenames:
path = os.path.join(self._filesystem_path, filename)
try:
with filesystem.open(path) as fd:
items.update(self._parse(fd.read(), components))
except (OSError, IOError) as e:
log.LOGGER.warning('Error while reading item %r: %r'
% (path, e))
return ical.serialize(
self.tag, self.headers, sorted(items, key=lambda x: x.name))
@classmethod
def is_node(cls, path):
filesystem_path = pathutils.path_to_filesystem(path,
filesystem.FOLDER)
return (os.path.isdir(filesystem_path) and
not os.path.exists(filesystem_path + ".props"))
@classmethod
def is_leaf(cls, path):
filesystem_path = pathutils.path_to_filesystem(path,
filesystem.FOLDER)
return (os.path.isdir(filesystem_path) and
os.path.exists(path + ".props"))
@property
def last_modified(self):
last = max([
os.path.getmtime(os.path.join(self._filesystem_path, filename))
for filename in os.listdir(self._filesystem_path)] or [0])
return time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(last))
| ./CrossVul/dataset_final_sorted/CWE-21/py/good_4903_2 |
crossvul-python_data_bad_3768_1 | import os
import re
import urllib
from django.conf import settings
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.sites.models import Site, RequestSite
from django.contrib.auth.models import User
from django.test import TestCase
from django.core import mail
from django.core.urlresolvers import reverse
from django.http import QueryDict
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
fixtures = ['authtestdata.json']
urls = 'django.contrib.auth.tests.urls'
def setUp(self):
self.old_LANGUAGES = settings.LANGUAGES
self.old_LANGUAGE_CODE = settings.LANGUAGE_CODE
settings.LANGUAGES = (('en', 'English'),)
settings.LANGUAGE_CODE = 'en'
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), 'templates'),
)
def tearDown(self):
settings.LANGUAGES = self.old_LANGUAGES
settings.LANGUAGE_CODE = self.old_LANGUAGE_CODE
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password
}
)
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
self.assertTrue(SESSION_KEY in self.client.session)
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"Error is raised if the provided email address isn't currently registered"
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertContains(response, "That e-mail address doesn't have an associated user account")
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertEqual(response.status_code, 200)
self.assertTrue("Please enter your new password" in response.content)
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0"*4) + path[-1]
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404
response = self.client.get('/reset/123456-1-1/')
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz-1-1/')
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0"*4) + path[-1]
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2':' anewpassword'})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# It redirects us to a 'complete' page:
self.assertEqual(response.status_code, 302)
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2':' x'})
self.assertEqual(response.status_code, 200)
self.assertTrue("The two password fields didn't match" in response.content)
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password
}
)
self.assertEqual(response.status_code, 200)
self.assertTrue("Please enter a correct username and password. Note that both fields are case-sensitive." in response.content)
def logout(self):
response = self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
}
)
self.assertEqual(response.status_code, 200)
self.assertTrue("Your old password was entered incorrectly. Please enter it again." in response.content)
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
}
)
self.assertEqual(response.status_code, 200)
self.assertTrue("The two password fields didn't match." in response.content)
def test_password_change_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
}
)
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/password_change/done/'))
self.fail_login()
self.login(password='password1')
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('django.contrib.auth.views.login'))
self.assertEqual(response.status_code, 200)
if Site._meta.installed:
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertTrue(isinstance(response.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def test_security_check(self, password='password'):
login_url = reverse('django.contrib.auth.views.login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urllib.quote(bad_url)
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
}
)
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/', # see ticket #12534
):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urllib.quote(good_url)
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
}
)
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
class LoginURLSettings(AuthViewsTestCase):
urls = 'django.contrib.auth.tests.urls'
def setUp(self):
super(LoginURLSettings, self).setUp()
self.old_LOGIN_URL = settings.LOGIN_URL
def tearDown(self):
super(LoginURLSettings, self).tearDown()
settings.LOGIN_URL = self.old_LOGIN_URL
def get_login_required_url(self, login_url):
settings.LOGIN_URL = login_url
response = self.client.get('/login_required/')
self.assertEqual(response.status_code, 302)
return response['Location']
def test_standard_login_url(self):
login_url = '/login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url,
'http://testserver%s?%s' % (login_url, querystring.urlencode('/')))
def test_remote_login_url(self):
login_url = 'http://remote.example.com/login'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_https_login_url(self):
login_url = 'https:///login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_login_url_with_querystring(self):
login_url = '/login/?pretty=1'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('pretty=1', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver/login/?%s' %
querystring.urlencode('/'))
def test_remote_login_url_with_next_querystring(self):
login_url = 'http://remote.example.com/login/'
login_required_url = self.get_login_required_url('%s?next=/default/' %
login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url, '%s?%s' % (login_url,
querystring.urlencode('/')))
class LogoutTest(AuthViewsTestCase):
urls = 'django.contrib.auth.tests.urls'
def confirm_logged_out(self):
self.assertTrue(SESSION_KEY not in self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertEqual(200, response.status_code)
self.assertTrue('Logged out' in response.content)
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertTrue('site' in response.context)
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
response = self.client.get('/logout/next_page/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_security_check(self, password='password'):
logout_url = reverse('django.contrib.auth.views.logout')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'
):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urllib.quote(bad_url)
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
self.confirm_logged_out()
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/', # see ticket #12534
):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urllib.quote(good_url)
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
self.confirm_logged_out()
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3768_1 |
crossvul-python_data_bad_3768_3 | import datetime
import os
import re
import time
from pprint import pformat
from urllib import urlencode, quote
from urlparse import urljoin, urlparse
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
# The mod_python version is more efficient, so try importing it first.
from mod_python.util import parse_qsl
except ImportError:
try:
# Python 2.6 and greater
from urlparse import parse_qsl
except ImportError:
# Python 2.5, 2.4. Works on Python 2.6 but raises
# PendingDeprecationWarning
from cgi import parse_qsl
import Cookie
# httponly support exists in Python 2.6's Cookie library,
# but not in Python 2.4 or 2.5.
_morsel_supports_httponly = Cookie.Morsel._reserved.has_key('httponly')
# Some versions of Python 2.7 and later won't need this encoding bug fix:
_cookie_encodes_correctly = Cookie.SimpleCookie().value_encode(';') == (';', '"\\073"')
# See ticket #13007, http://bugs.python.org/issue2193 and http://trac.edgewall.org/ticket/2256
_tc = Cookie.SimpleCookie()
_tc.load('f:oo')
_cookie_allows_colon_in_names = 'Set-Cookie: f:oo=' in _tc.output()
if _morsel_supports_httponly and _cookie_encodes_correctly and _cookie_allows_colon_in_names:
SimpleCookie = Cookie.SimpleCookie
else:
if not _morsel_supports_httponly:
class Morsel(Cookie.Morsel):
def __setitem__(self, K, V):
K = K.lower()
if K == "httponly":
if V:
# The superclass rejects httponly as a key,
# so we jump to the grandparent.
super(Cookie.Morsel, self).__setitem__(K, V)
else:
super(Morsel, self).__setitem__(K, V)
def OutputString(self, attrs=None):
output = super(Morsel, self).OutputString(attrs)
if "httponly" in self:
output += "; httponly"
return output
class SimpleCookie(Cookie.SimpleCookie):
if not _morsel_supports_httponly:
def __set(self, key, real_value, coded_value):
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
def __setitem__(self, key, value):
rval, cval = self.value_encode(value)
self.__set(key, rval, cval)
if not _cookie_encodes_correctly:
def value_encode(self, val):
# Some browsers do not support quoted-string from RFC 2109,
# including some versions of Safari and Internet Explorer.
# These browsers split on ';', and some versions of Safari
# are known to split on ', '. Therefore, we encode ';' and ','
# SimpleCookie already does the hard work of encoding and decoding.
# It uses octal sequences like '\\012' for newline etc.
# and non-ASCII chars. We just make use of this mechanism, to
# avoid introducing two encoding schemes which would be confusing
# and especially awkward for javascript.
# NB, contrary to Python docs, value_encode returns a tuple containing
# (real val, encoded_val)
val, encoded = super(SimpleCookie, self).value_encode(val)
encoded = encoded.replace(";", "\\073").replace(",","\\054")
# If encoded now contains any quoted chars, we need double quotes
# around the whole string.
if "\\" in encoded and not encoded.startswith('"'):
encoded = '"' + encoded + '"'
return val, encoded
if not _cookie_allows_colon_in_names:
def load(self, rawdata, ignore_parse_errors=False):
if ignore_parse_errors:
self.bad_cookies = set()
self._BaseCookie__set = self._loose_set
super(SimpleCookie, self).load(rawdata)
if ignore_parse_errors:
self._BaseCookie__set = self._strict_set
for key in self.bad_cookies:
del self[key]
_strict_set = Cookie.BaseCookie._BaseCookie__set
def _loose_set(self, key, real_value, coded_value):
try:
self._strict_set(key, real_value, coded_value)
except Cookie.CookieError:
self.bad_cookies.add(key)
dict.__setitem__(self, key, Cookie.Morsel())
class CompatCookie(SimpleCookie):
def __init__(self, *args, **kwargs):
super(CompatCookie, self).__init__(*args, **kwargs)
import warnings
warnings.warn("CompatCookie is deprecated, use django.http.SimpleCookie instead.",
PendingDeprecationWarning)
from django.core.exceptions import SuspiciousOperation
from django.utils.datastructures import MultiValueDict, ImmutableList
from django.utils.encoding import smart_str, iri_to_uri, force_unicode
from django.utils.http import cookie_date
from django.http.multipartparser import MultiPartParser
from django.conf import settings
from django.core.files import uploadhandler
from utils import *
RESERVED_CHARS="!*'();:@&=+$,/?%#[]"
absolute_http_url_re = re.compile(r"^https?://", re.I)
class Http404(Exception):
pass
class HttpRequest(object):
"""A basic HTTP request."""
# The encoding used in GET/POST dicts. None means use default setting.
_encoding = None
_upload_handlers = []
def __init__(self):
self.GET, self.POST, self.COOKIES, self.META, self.FILES = {}, {}, {}, {}, {}
self.path = ''
self.path_info = ''
self.method = None
def __repr__(self):
return '<HttpRequest\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
(pformat(self.GET), pformat(self.POST), pformat(self.COOKIES),
pformat(self.META))
def get_host(self):
"""Returns the HTTP host using the environment or request headers."""
# We try three options, in order of decreasing preference.
if settings.USE_X_FORWARDED_HOST and (
'HTTP_X_FORWARDED_HOST' in self.META):
host = self.META['HTTP_X_FORWARDED_HOST']
elif 'HTTP_HOST' in self.META:
host = self.META['HTTP_HOST']
else:
# Reconstruct the host using the algorithm from PEP 333.
host = self.META['SERVER_NAME']
server_port = str(self.META['SERVER_PORT'])
if server_port != (self.is_secure() and '443' or '80'):
host = '%s:%s' % (host, server_port)
return host
def get_full_path(self):
# RFC 3986 requires query string arguments to be in the ASCII range.
# Rather than crash if this doesn't happen, we encode defensively.
return '%s%s' % (self.path, self.META.get('QUERY_STRING', '') and ('?' + iri_to_uri(self.META.get('QUERY_STRING', ''))) or '')
def build_absolute_uri(self, location=None):
"""
Builds an absolute URI from the location and the variables available in
this request. If no location is specified, the absolute URI is built on
``request.get_full_path()``.
"""
if not location:
location = self.get_full_path()
if not absolute_http_url_re.match(location):
current_uri = '%s://%s%s' % (self.is_secure() and 'https' or 'http',
self.get_host(), self.path)
location = urljoin(current_uri, location)
return iri_to_uri(location)
def is_secure(self):
return os.environ.get("HTTPS") == "on"
def is_ajax(self):
return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest'
def _set_encoding(self, val):
"""
Sets the encoding used for GET/POST accesses. If the GET or POST
dictionary has already been created, it is removed and recreated on the
next access (so that it is decoded correctly).
"""
self._encoding = val
if hasattr(self, '_get'):
del self._get
if hasattr(self, '_post'):
del self._post
def _get_encoding(self):
return self._encoding
encoding = property(_get_encoding, _set_encoding)
def _initialize_handlers(self):
self._upload_handlers = [uploadhandler.load_handler(handler, self)
for handler in settings.FILE_UPLOAD_HANDLERS]
def _set_upload_handlers(self, upload_handlers):
if hasattr(self, '_files'):
raise AttributeError("You cannot set the upload handlers after the upload has been processed.")
self._upload_handlers = upload_handlers
def _get_upload_handlers(self):
if not self._upload_handlers:
# If thre are no upload handlers defined, initialize them from settings.
self._initialize_handlers()
return self._upload_handlers
upload_handlers = property(_get_upload_handlers, _set_upload_handlers)
def parse_file_upload(self, META, post_data):
"""Returns a tuple of (POST QueryDict, FILES MultiValueDict)."""
self.upload_handlers = ImmutableList(
self.upload_handlers,
warning = "You cannot alter upload handlers after the upload has been processed."
)
parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding)
return parser.parse()
def _get_raw_post_data(self):
if not hasattr(self, '_raw_post_data'):
if self._read_started:
raise Exception("You cannot access raw_post_data after reading from request's data stream")
try:
content_length = int(self.META.get('CONTENT_LENGTH', 0))
except (ValueError, TypeError):
# If CONTENT_LENGTH was empty string or not an integer, don't
# error out. We've also seen None passed in here (against all
# specs, but see ticket #8259), so we handle TypeError as well.
content_length = 0
if content_length:
self._raw_post_data = self.read(content_length)
else:
self._raw_post_data = self.read()
self._stream = StringIO(self._raw_post_data)
return self._raw_post_data
raw_post_data = property(_get_raw_post_data)
def _mark_post_parse_error(self):
self._post = QueryDict('')
self._files = MultiValueDict()
self._post_parse_error = True
def _load_post_and_files(self):
# Populates self._post and self._files
if self.method != 'POST':
self._post, self._files = QueryDict('', encoding=self._encoding), MultiValueDict()
return
if self._read_started and not hasattr(self, '_raw_post_data'):
self._mark_post_parse_error()
return
if self.META.get('CONTENT_TYPE', '').startswith('multipart'):
if hasattr(self, '_raw_post_data'):
# Use already read data
data = StringIO(self._raw_post_data)
else:
data = self
try:
self._post, self._files = self.parse_file_upload(self.META, data)
except:
# An error occured while parsing POST data. Since when
# formatting the error the request handler might access
# self.POST, set self._post and self._file to prevent
# attempts to parse POST data again.
# Mark that an error occured. This allows self.__repr__ to
# be explicit about it instead of simply representing an
# empty POST
self._mark_post_parse_error()
raise
else:
self._post, self._files = QueryDict(self.raw_post_data, encoding=self._encoding), MultiValueDict()
## File-like and iterator interface.
##
## Expects self._stream to be set to an appropriate source of bytes by
## a corresponding request subclass (WSGIRequest or ModPythonRequest).
## Also when request data has already been read by request.POST or
## request.raw_post_data, self._stream points to a StringIO instance
## containing that data.
def read(self, *args, **kwargs):
self._read_started = True
return self._stream.read(*args, **kwargs)
def readline(self, *args, **kwargs):
self._read_started = True
return self._stream.readline(*args, **kwargs)
def xreadlines(self):
while True:
buf = self.readline()
if not buf:
break
yield buf
__iter__ = xreadlines
def readlines(self):
return list(iter(self))
class QueryDict(MultiValueDict):
"""
A specialized MultiValueDict that takes a query string when initialized.
This is immutable unless you create a copy of it.
Values retrieved from this class are converted from the given encoding
(DEFAULT_CHARSET by default) to unicode.
"""
# These are both reset in __init__, but is specified here at the class
# level so that unpickling will have valid values
_mutable = True
_encoding = None
def __init__(self, query_string, mutable=False, encoding=None):
MultiValueDict.__init__(self)
if not encoding:
# *Important*: do not import settings any earlier because of note
# in core.handlers.modpython.
from django.conf import settings
encoding = settings.DEFAULT_CHARSET
self.encoding = encoding
for key, value in parse_qsl((query_string or ''), True): # keep_blank_values=True
self.appendlist(force_unicode(key, encoding, errors='replace'),
force_unicode(value, encoding, errors='replace'))
self._mutable = mutable
def _get_encoding(self):
if self._encoding is None:
# *Important*: do not import settings at the module level because
# of the note in core.handlers.modpython.
from django.conf import settings
self._encoding = settings.DEFAULT_CHARSET
return self._encoding
def _set_encoding(self, value):
self._encoding = value
encoding = property(_get_encoding, _set_encoding)
def _assert_mutable(self):
if not self._mutable:
raise AttributeError("This QueryDict instance is immutable")
def __setitem__(self, key, value):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
value = str_to_unicode(value, self.encoding)
MultiValueDict.__setitem__(self, key, value)
def __delitem__(self, key):
self._assert_mutable()
super(QueryDict, self).__delitem__(key)
def __copy__(self):
result = self.__class__('', mutable=True, encoding=self.encoding)
for key, value in dict.items(self):
dict.__setitem__(result, key, value)
return result
def __deepcopy__(self, memo):
import django.utils.copycompat as copy
result = self.__class__('', mutable=True, encoding=self.encoding)
memo[id(self)] = result
for key, value in dict.items(self):
dict.__setitem__(result, copy.deepcopy(key, memo), copy.deepcopy(value, memo))
return result
def setlist(self, key, list_):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
list_ = [str_to_unicode(elt, self.encoding) for elt in list_]
MultiValueDict.setlist(self, key, list_)
def setlistdefault(self, key, default_list=()):
self._assert_mutable()
if key not in self:
self.setlist(key, default_list)
return MultiValueDict.getlist(self, key)
def appendlist(self, key, value):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
value = str_to_unicode(value, self.encoding)
MultiValueDict.appendlist(self, key, value)
def update(self, other_dict):
self._assert_mutable()
f = lambda s: str_to_unicode(s, self.encoding)
if hasattr(other_dict, 'lists'):
for key, valuelist in other_dict.lists():
for value in valuelist:
MultiValueDict.update(self, {f(key): f(value)})
else:
d = dict([(f(k), f(v)) for k, v in other_dict.items()])
MultiValueDict.update(self, d)
def pop(self, key, *args):
self._assert_mutable()
return MultiValueDict.pop(self, key, *args)
def popitem(self):
self._assert_mutable()
return MultiValueDict.popitem(self)
def clear(self):
self._assert_mutable()
MultiValueDict.clear(self)
def setdefault(self, key, default=None):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
default = str_to_unicode(default, self.encoding)
return MultiValueDict.setdefault(self, key, default)
def copy(self):
"""Returns a mutable copy of this object."""
return self.__deepcopy__({})
def urlencode(self, safe=None):
"""
Returns an encoded string of all query string arguments.
:arg safe: Used to specify characters which do not require quoting, for
example::
>>> q = QueryDict('', mutable=True)
>>> q['next'] = '/a&b/'
>>> q.urlencode()
'next=%2Fa%26b%2F'
>>> q.urlencode(safe='/')
'next=/a%26b/'
"""
output = []
if safe:
encode = lambda k, v: '%s=%s' % ((quote(k, safe), quote(v, safe)))
else:
encode = lambda k, v: urlencode({k: v})
for k, list_ in self.lists():
k = smart_str(k, self.encoding)
output.extend([encode(k, smart_str(v, self.encoding))
for v in list_])
return '&'.join(output)
def parse_cookie(cookie):
if cookie == '':
return {}
if not isinstance(cookie, Cookie.BaseCookie):
try:
c = SimpleCookie()
c.load(cookie, ignore_parse_errors=True)
except Cookie.CookieError:
# Invalid cookie
return {}
else:
c = cookie
cookiedict = {}
for key in c.keys():
cookiedict[key] = c.get(key).value
return cookiedict
class BadHeaderError(ValueError):
pass
class HttpResponse(object):
"""A basic HTTP response, with content and dictionary-accessed headers."""
status_code = 200
def __init__(self, content='', mimetype=None, status=None,
content_type=None):
# _headers is a mapping of the lower-case name to the original case of
# the header (required for working with legacy systems) and the header
# value. Both the name of the header and its value are ASCII strings.
self._headers = {}
self._charset = settings.DEFAULT_CHARSET
if mimetype:
content_type = mimetype # For backwards compatibility
if not content_type:
content_type = "%s; charset=%s" % (settings.DEFAULT_CONTENT_TYPE,
self._charset)
if not isinstance(content, basestring) and hasattr(content, '__iter__'):
self._container = content
self._is_string = False
else:
self._container = [content]
self._is_string = True
self.cookies = SimpleCookie()
if status:
self.status_code = status
self['Content-Type'] = content_type
def __str__(self):
"""Full HTTP message, including headers."""
return '\n'.join(['%s: %s' % (key, value)
for key, value in self._headers.values()]) \
+ '\n\n' + self.content
def _convert_to_ascii(self, *values):
"""Converts all values to ascii strings."""
for value in values:
if isinstance(value, unicode):
try:
value = value.encode('us-ascii')
except UnicodeError, e:
e.reason += ', HTTP response headers must be in US-ASCII format'
raise
else:
value = str(value)
if '\n' in value or '\r' in value:
raise BadHeaderError("Header values can't contain newlines (got %r)" % (value))
yield value
def __setitem__(self, header, value):
header, value = self._convert_to_ascii(header, value)
self._headers[header.lower()] = (header, value)
def __delitem__(self, header):
try:
del self._headers[header.lower()]
except KeyError:
pass
def __getitem__(self, header):
return self._headers[header.lower()][1]
def has_header(self, header):
"""Case-insensitive check for a header."""
return self._headers.has_key(header.lower())
__contains__ = has_header
def items(self):
return self._headers.values()
def get(self, header, alternate):
return self._headers.get(header.lower(), (None, alternate))[1]
def set_cookie(self, key, value='', max_age=None, expires=None, path='/',
domain=None, secure=False, httponly=False):
"""
Sets a cookie.
``expires`` can be a string in the correct format or a
``datetime.datetime`` object in UTC. If ``expires`` is a datetime
object then ``max_age`` will be calculated.
"""
self.cookies[key] = value
if expires is not None:
if isinstance(expires, datetime.datetime):
delta = expires - expires.utcnow()
# Add one second so the date matches exactly (a fraction of
# time gets lost between converting to a timedelta and
# then the date string).
delta = delta + datetime.timedelta(seconds=1)
# Just set max_age - the max_age logic will set expires.
expires = None
max_age = max(0, delta.days * 86400 + delta.seconds)
else:
self.cookies[key]['expires'] = expires
if max_age is not None:
self.cookies[key]['max-age'] = max_age
# IE requires expires, so set it if hasn't been already.
if not expires:
self.cookies[key]['expires'] = cookie_date(time.time() +
max_age)
if path is not None:
self.cookies[key]['path'] = path
if domain is not None:
self.cookies[key]['domain'] = domain
if secure:
self.cookies[key]['secure'] = True
if httponly:
self.cookies[key]['httponly'] = True
def delete_cookie(self, key, path='/', domain=None):
self.set_cookie(key, max_age=0, path=path, domain=domain,
expires='Thu, 01-Jan-1970 00:00:00 GMT')
def _get_content(self):
if self.has_header('Content-Encoding'):
return ''.join(self._container)
return smart_str(''.join(self._container), self._charset)
def _set_content(self, value):
self._container = [value]
self._is_string = True
content = property(_get_content, _set_content)
def __iter__(self):
self._iterator = iter(self._container)
return self
def next(self):
chunk = self._iterator.next()
if isinstance(chunk, unicode):
chunk = chunk.encode(self._charset)
return str(chunk)
def close(self):
if hasattr(self._container, 'close'):
self._container.close()
# The remaining methods partially implement the file-like object interface.
# See http://docs.python.org/lib/bltin-file-objects.html
def write(self, content):
if not self._is_string:
raise Exception("This %s instance is not writable" % self.__class__)
self._container.append(content)
def flush(self):
pass
def tell(self):
if not self._is_string:
raise Exception("This %s instance cannot tell its position" % self.__class__)
return sum([len(chunk) for chunk in self._container])
class HttpResponseRedirectBase(HttpResponse):
allowed_schemes = ['http', 'https', 'ftp']
def __init__(self, redirect_to):
super(HttpResponseRedirectBase, self).__init__()
parsed = urlparse(redirect_to)
if parsed[0] and parsed[0] not in self.allowed_schemes:
raise SuspiciousOperation("Unsafe redirect to URL with scheme '%s'" % parsed[0])
self['Location'] = iri_to_uri(redirect_to)
class HttpResponseRedirect(HttpResponseRedirectBase):
status_code = 302
class HttpResponsePermanentRedirect(HttpResponseRedirectBase):
status_code = 301
class HttpResponseNotModified(HttpResponse):
status_code = 304
class HttpResponseBadRequest(HttpResponse):
status_code = 400
class HttpResponseNotFound(HttpResponse):
status_code = 404
class HttpResponseForbidden(HttpResponse):
status_code = 403
class HttpResponseNotAllowed(HttpResponse):
status_code = 405
def __init__(self, permitted_methods):
super(HttpResponseNotAllowed, self).__init__()
self['Allow'] = ', '.join(permitted_methods)
class HttpResponseGone(HttpResponse):
status_code = 410
class HttpResponseServerError(HttpResponse):
status_code = 500
# A backwards compatible alias for HttpRequest.get_host.
def get_host(request):
return request.get_host()
# It's neither necessary nor appropriate to use
# django.utils.encoding.smart_unicode for parsing URLs and form inputs. Thus,
# this slightly more restricted function.
def str_to_unicode(s, encoding):
"""
Converts basestring objects to unicode, using the given encoding. Illegally
encoded input characters are replaced with Unicode "unknown" codepoint
(\ufffd).
Returns any non-basestring objects without change.
"""
if isinstance(s, str):
return unicode(s, encoding, 'replace')
else:
return s
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3768_3 |
crossvul-python_data_bad_1739_2 | """Tornado handlers for the contents web service."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import json
from tornado import gen, web
from IPython.html.utils import url_path_join, url_escape
from IPython.utils.jsonutil import date_default
from IPython.html.base.handlers import (
IPythonHandler, APIHandler, json_errors, path_regex,
)
def sort_key(model):
"""key function for case-insensitive sort by name and type"""
iname = model['name'].lower()
type_key = {
'directory' : '0',
'notebook' : '1',
'file' : '2',
}.get(model['type'], '9')
return u'%s%s' % (type_key, iname)
def validate_model(model, expect_content):
"""
Validate a model returned by a ContentsManager method.
If expect_content is True, then we expect non-null entries for 'content'
and 'format'.
"""
required_keys = {
"name",
"path",
"type",
"writable",
"created",
"last_modified",
"mimetype",
"content",
"format",
}
missing = required_keys - set(model.keys())
if missing:
raise web.HTTPError(
500,
u"Missing Model Keys: {missing}".format(missing=missing),
)
maybe_none_keys = ['content', 'format']
if model['type'] == 'file':
# mimetype should be populated only for file models
maybe_none_keys.append('mimetype')
if expect_content:
errors = [key for key in maybe_none_keys if model[key] is None]
if errors:
raise web.HTTPError(
500,
u"Keys unexpectedly None: {keys}".format(keys=errors),
)
else:
errors = {
key: model[key]
for key in maybe_none_keys
if model[key] is not None
}
if errors:
raise web.HTTPError(
500,
u"Keys unexpectedly not None: {keys}".format(keys=errors),
)
class ContentsHandler(APIHandler):
SUPPORTED_METHODS = (u'GET', u'PUT', u'PATCH', u'POST', u'DELETE')
def location_url(self, path):
"""Return the full URL location of a file.
Parameters
----------
path : unicode
The API path of the file, such as "foo/bar.txt".
"""
return url_escape(url_path_join(
self.base_url, 'api', 'contents', path
))
def _finish_model(self, model, location=True):
"""Finish a JSON request with a model, setting relevant headers, etc."""
if location:
location = self.location_url(model['path'])
self.set_header('Location', location)
self.set_header('Last-Modified', model['last_modified'])
self.set_header('Content-Type', 'application/json')
self.finish(json.dumps(model, default=date_default))
@web.authenticated
@json_errors
@gen.coroutine
def get(self, path=''):
"""Return a model for a file or directory.
A directory model contains a list of models (without content)
of the files and directories it contains.
"""
path = path or ''
type = self.get_query_argument('type', default=None)
if type not in {None, 'directory', 'file', 'notebook'}:
raise web.HTTPError(400, u'Type %r is invalid' % type)
format = self.get_query_argument('format', default=None)
if format not in {None, 'text', 'base64'}:
raise web.HTTPError(400, u'Format %r is invalid' % format)
content = self.get_query_argument('content', default='1')
if content not in {'0', '1'}:
raise web.HTTPError(400, u'Content %r is invalid' % content)
content = int(content)
model = yield gen.maybe_future(self.contents_manager.get(
path=path, type=type, format=format, content=content,
))
if model['type'] == 'directory' and content:
# group listing by type, then by name (case-insensitive)
# FIXME: sorting should be done in the frontends
model['content'].sort(key=sort_key)
validate_model(model, expect_content=content)
self._finish_model(model, location=False)
@web.authenticated
@json_errors
@gen.coroutine
def patch(self, path=''):
"""PATCH renames a file or directory without re-uploading content."""
cm = self.contents_manager
model = self.get_json_body()
if model is None:
raise web.HTTPError(400, u'JSON body missing')
model = yield gen.maybe_future(cm.update(model, path))
validate_model(model, expect_content=False)
self._finish_model(model)
@gen.coroutine
def _copy(self, copy_from, copy_to=None):
"""Copy a file, optionally specifying a target directory."""
self.log.info(u"Copying {copy_from} to {copy_to}".format(
copy_from=copy_from,
copy_to=copy_to or '',
))
model = yield gen.maybe_future(self.contents_manager.copy(copy_from, copy_to))
self.set_status(201)
validate_model(model, expect_content=False)
self._finish_model(model)
@gen.coroutine
def _upload(self, model, path):
"""Handle upload of a new file to path"""
self.log.info(u"Uploading file to %s", path)
model = yield gen.maybe_future(self.contents_manager.new(model, path))
self.set_status(201)
validate_model(model, expect_content=False)
self._finish_model(model)
@gen.coroutine
def _new_untitled(self, path, type='', ext=''):
"""Create a new, empty untitled entity"""
self.log.info(u"Creating new %s in %s", type or 'file', path)
model = yield gen.maybe_future(self.contents_manager.new_untitled(path=path, type=type, ext=ext))
self.set_status(201)
validate_model(model, expect_content=False)
self._finish_model(model)
@gen.coroutine
def _save(self, model, path):
"""Save an existing file."""
self.log.info(u"Saving file at %s", path)
model = yield gen.maybe_future(self.contents_manager.save(model, path))
validate_model(model, expect_content=False)
self._finish_model(model)
@web.authenticated
@json_errors
@gen.coroutine
def post(self, path=''):
"""Create a new file in the specified path.
POST creates new files. The server always decides on the name.
POST /api/contents/path
New untitled, empty file or directory.
POST /api/contents/path
with body {"copy_from" : "/path/to/OtherNotebook.ipynb"}
New copy of OtherNotebook in path
"""
cm = self.contents_manager
if cm.file_exists(path):
raise web.HTTPError(400, "Cannot POST to files, use PUT instead.")
if not cm.dir_exists(path):
raise web.HTTPError(404, "No such directory: %s" % path)
model = self.get_json_body()
if model is not None:
copy_from = model.get('copy_from')
ext = model.get('ext', '')
type = model.get('type', '')
if copy_from:
yield self._copy(copy_from, path)
else:
yield self._new_untitled(path, type=type, ext=ext)
else:
yield self._new_untitled(path)
@web.authenticated
@json_errors
@gen.coroutine
def put(self, path=''):
"""Saves the file in the location specified by name and path.
PUT is very similar to POST, but the requester specifies the name,
whereas with POST, the server picks the name.
PUT /api/contents/path/Name.ipynb
Save notebook at ``path/Name.ipynb``. Notebook structure is specified
in `content` key of JSON request body. If content is not specified,
create a new empty notebook.
"""
model = self.get_json_body()
if model:
if model.get('copy_from'):
raise web.HTTPError(400, "Cannot copy with PUT, only POST")
exists = yield gen.maybe_future(self.contents_manager.file_exists(path))
if exists:
yield gen.maybe_future(self._save(model, path))
else:
yield gen.maybe_future(self._upload(model, path))
else:
yield gen.maybe_future(self._new_untitled(path))
@web.authenticated
@json_errors
@gen.coroutine
def delete(self, path=''):
"""delete a file in the given path"""
cm = self.contents_manager
self.log.warn('delete %s', path)
yield gen.maybe_future(cm.delete(path))
self.set_status(204)
self.finish()
class CheckpointsHandler(APIHandler):
SUPPORTED_METHODS = ('GET', 'POST')
@web.authenticated
@json_errors
@gen.coroutine
def get(self, path=''):
"""get lists checkpoints for a file"""
cm = self.contents_manager
checkpoints = yield gen.maybe_future(cm.list_checkpoints(path))
data = json.dumps(checkpoints, default=date_default)
self.finish(data)
@web.authenticated
@json_errors
@gen.coroutine
def post(self, path=''):
"""post creates a new checkpoint"""
cm = self.contents_manager
checkpoint = yield gen.maybe_future(cm.create_checkpoint(path))
data = json.dumps(checkpoint, default=date_default)
location = url_path_join(self.base_url, 'api/contents',
path, 'checkpoints', checkpoint['id'])
self.set_header('Location', url_escape(location))
self.set_status(201)
self.finish(data)
class ModifyCheckpointsHandler(APIHandler):
SUPPORTED_METHODS = ('POST', 'DELETE')
@web.authenticated
@json_errors
@gen.coroutine
def post(self, path, checkpoint_id):
"""post restores a file from a checkpoint"""
cm = self.contents_manager
yield gen.maybe_future(cm.restore_checkpoint(checkpoint_id, path))
self.set_status(204)
self.finish()
@web.authenticated
@json_errors
@gen.coroutine
def delete(self, path, checkpoint_id):
"""delete clears a checkpoint for a given file"""
cm = self.contents_manager
yield gen.maybe_future(cm.delete_checkpoint(checkpoint_id, path))
self.set_status(204)
self.finish()
class NotebooksRedirectHandler(IPythonHandler):
"""Redirect /api/notebooks to /api/contents"""
SUPPORTED_METHODS = ('GET', 'PUT', 'PATCH', 'POST', 'DELETE')
def get(self, path):
self.log.warn("/api/notebooks is deprecated, use /api/contents")
self.redirect(url_path_join(
self.base_url,
'api/contents',
path
))
put = patch = post = delete = get
#-----------------------------------------------------------------------------
# URL to handler mappings
#-----------------------------------------------------------------------------
_checkpoint_id_regex = r"(?P<checkpoint_id>[\w-]+)"
default_handlers = [
(r"/api/contents%s/checkpoints" % path_regex, CheckpointsHandler),
(r"/api/contents%s/checkpoints/%s" % (path_regex, _checkpoint_id_regex),
ModifyCheckpointsHandler),
(r"/api/contents%s" % path_regex, ContentsHandler),
(r"/api/notebooks/?(.*)", NotebooksRedirectHandler),
]
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_1739_2 |
crossvul-python_data_good_1739_1 | """A contents manager that uses the local file system for storage."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import io
import os
import shutil
import mimetypes
from tornado import web
from .filecheckpoints import FileCheckpoints
from .fileio import FileManagerMixin
from .manager import ContentsManager
from IPython import nbformat
from IPython.utils.importstring import import_item
from IPython.utils.traitlets import Any, Unicode, Bool, TraitError
from IPython.utils.py3compat import getcwd, string_types
from IPython.utils import tz
from IPython.html.utils import (
is_hidden,
to_api_path,
)
_script_exporter = None
def _post_save_script(model, os_path, contents_manager, **kwargs):
"""convert notebooks to Python script after save with nbconvert
replaces `ipython notebook --script`
"""
from IPython.nbconvert.exporters.script import ScriptExporter
if model['type'] != 'notebook':
return
global _script_exporter
if _script_exporter is None:
_script_exporter = ScriptExporter(parent=contents_manager)
log = contents_manager.log
base, ext = os.path.splitext(os_path)
py_fname = base + '.py'
script, resources = _script_exporter.from_filename(os_path)
script_fname = base + resources.get('output_extension', '.txt')
log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir))
with io.open(script_fname, 'w', encoding='utf-8') as f:
f.write(script)
class FileContentsManager(FileManagerMixin, ContentsManager):
root_dir = Unicode(config=True)
def _root_dir_default(self):
try:
return self.parent.notebook_dir
except AttributeError:
return getcwd()
save_script = Bool(False, config=True, help='DEPRECATED, use post_save_hook')
def _save_script_changed(self):
self.log.warn("""
`--script` is deprecated. You can trigger nbconvert via pre- or post-save hooks:
ContentsManager.pre_save_hook
FileContentsManager.post_save_hook
A post-save hook has been registered that calls:
ipython nbconvert --to script [notebook]
which behaves similarly to `--script`.
""")
self.post_save_hook = _post_save_script
post_save_hook = Any(None, config=True,
help="""Python callable or importstring thereof
to be called on the path of a file just saved.
This can be used to process the file on disk,
such as converting the notebook to a script or HTML via nbconvert.
It will be called as (all arguments passed by keyword)::
hook(os_path=os_path, model=model, contents_manager=instance)
- path: the filesystem path to the file just written
- model: the model representing the file
- contents_manager: this ContentsManager instance
"""
)
def _post_save_hook_changed(self, name, old, new):
if new and isinstance(new, string_types):
self.post_save_hook = import_item(self.post_save_hook)
elif new:
if not callable(new):
raise TraitError("post_save_hook must be callable")
def run_post_save_hook(self, model, os_path):
"""Run the post-save hook if defined, and log errors"""
if self.post_save_hook:
try:
self.log.debug("Running post-save hook on %s", os_path)
self.post_save_hook(os_path=os_path, model=model, contents_manager=self)
except Exception:
self.log.error("Post-save hook failed on %s", os_path, exc_info=True)
def _root_dir_changed(self, name, old, new):
"""Do a bit of validation of the root_dir."""
if not os.path.isabs(new):
# If we receive a non-absolute path, make it absolute.
self.root_dir = os.path.abspath(new)
return
if not os.path.isdir(new):
raise TraitError("%r is not a directory" % new)
def _checkpoints_class_default(self):
return FileCheckpoints
def is_hidden(self, path):
"""Does the API style path correspond to a hidden directory or file?
Parameters
----------
path : string
The path to check. This is an API path (`/` separated,
relative to root_dir).
Returns
-------
hidden : bool
Whether the path exists and is hidden.
"""
path = path.strip('/')
os_path = self._get_os_path(path=path)
return is_hidden(os_path, self.root_dir)
def file_exists(self, path):
"""Returns True if the file exists, else returns False.
API-style wrapper for os.path.isfile
Parameters
----------
path : string
The relative path to the file (with '/' as separator)
Returns
-------
exists : bool
Whether the file exists.
"""
path = path.strip('/')
os_path = self._get_os_path(path)
return os.path.isfile(os_path)
def dir_exists(self, path):
"""Does the API-style path refer to an extant directory?
API-style wrapper for os.path.isdir
Parameters
----------
path : string
The path to check. This is an API path (`/` separated,
relative to root_dir).
Returns
-------
exists : bool
Whether the path is indeed a directory.
"""
path = path.strip('/')
os_path = self._get_os_path(path=path)
return os.path.isdir(os_path)
def exists(self, path):
"""Returns True if the path exists, else returns False.
API-style wrapper for os.path.exists
Parameters
----------
path : string
The API path to the file (with '/' as separator)
Returns
-------
exists : bool
Whether the target exists.
"""
path = path.strip('/')
os_path = self._get_os_path(path=path)
return os.path.exists(os_path)
def _base_model(self, path):
"""Build the common base of a contents model"""
os_path = self._get_os_path(path)
info = os.stat(os_path)
last_modified = tz.utcfromtimestamp(info.st_mtime)
created = tz.utcfromtimestamp(info.st_ctime)
# Create the base model.
model = {}
model['name'] = path.rsplit('/', 1)[-1]
model['path'] = path
model['last_modified'] = last_modified
model['created'] = created
model['content'] = None
model['format'] = None
model['mimetype'] = None
try:
model['writable'] = os.access(os_path, os.W_OK)
except OSError:
self.log.error("Failed to check write permissions on %s", os_path)
model['writable'] = False
return model
def _dir_model(self, path, content=True):
"""Build a model for a directory
if content is requested, will include a listing of the directory
"""
os_path = self._get_os_path(path)
four_o_four = u'directory does not exist: %r' % path
if not os.path.isdir(os_path):
raise web.HTTPError(404, four_o_four)
elif is_hidden(os_path, self.root_dir):
self.log.info("Refusing to serve hidden directory %r, via 404 Error",
os_path
)
raise web.HTTPError(404, four_o_four)
model = self._base_model(path)
model['type'] = 'directory'
if content:
model['content'] = contents = []
os_dir = self._get_os_path(path)
for name in os.listdir(os_dir):
os_path = os.path.join(os_dir, name)
# skip over broken symlinks in listing
if not os.path.exists(os_path):
self.log.warn("%s doesn't exist", os_path)
continue
elif not os.path.isfile(os_path) and not os.path.isdir(os_path):
self.log.debug("%s not a regular file", os_path)
continue
if self.should_list(name) and not is_hidden(os_path, self.root_dir):
contents.append(self.get(
path='%s/%s' % (path, name),
content=False)
)
model['format'] = 'json'
return model
def _file_model(self, path, content=True, format=None):
"""Build a model for a file
if content is requested, include the file contents.
format:
If 'text', the contents will be decoded as UTF-8.
If 'base64', the raw bytes contents will be encoded as base64.
If not specified, try to decode as UTF-8, and fall back to base64
"""
model = self._base_model(path)
model['type'] = 'file'
os_path = self._get_os_path(path)
model['mimetype'] = mimetypes.guess_type(os_path)[0]
if content:
content, format = self._read_file(os_path, format)
if model['mimetype'] is None:
default_mime = {
'text': 'text/plain',
'base64': 'application/octet-stream'
}[format]
model['mimetype'] = default_mime
model.update(
content=content,
format=format,
)
return model
def _notebook_model(self, path, content=True):
"""Build a notebook model
if content is requested, the notebook content will be populated
as a JSON structure (not double-serialized)
"""
model = self._base_model(path)
model['type'] = 'notebook'
if content:
os_path = self._get_os_path(path)
nb = self._read_notebook(os_path, as_version=4)
self.mark_trusted_cells(nb, path)
model['content'] = nb
model['format'] = 'json'
self.validate_notebook_model(model)
return model
def get(self, path, content=True, type=None, format=None):
""" Takes a path for an entity and returns its model
Parameters
----------
path : str
the API path that describes the relative path for the target
content : bool
Whether to include the contents in the reply
type : str, optional
The requested type - 'file', 'notebook', or 'directory'.
Will raise HTTPError 400 if the content doesn't match.
format : str, optional
The requested format for file contents. 'text' or 'base64'.
Ignored if this returns a notebook or directory model.
Returns
-------
model : dict
the contents model. If content=True, returns the contents
of the file or directory as well.
"""
path = path.strip('/')
if not self.exists(path):
raise web.HTTPError(404, u'No such file or directory: %s' % path)
os_path = self._get_os_path(path)
if os.path.isdir(os_path):
if type not in (None, 'directory'):
raise web.HTTPError(400,
u'%s is a directory, not a %s' % (path, type), reason='bad type')
model = self._dir_model(path, content=content)
elif type == 'notebook' or (type is None and path.endswith('.ipynb')):
model = self._notebook_model(path, content=content)
else:
if type == 'directory':
raise web.HTTPError(400,
u'%s is not a directory' % path, reason='bad type')
model = self._file_model(path, content=content, format=format)
return model
def _save_directory(self, os_path, model, path=''):
"""create a directory"""
if is_hidden(os_path, self.root_dir):
raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path)
if not os.path.exists(os_path):
with self.perm_to_403():
os.mkdir(os_path)
elif not os.path.isdir(os_path):
raise web.HTTPError(400, u'Not a directory: %s' % (os_path))
else:
self.log.debug("Directory %r already exists", os_path)
def save(self, model, path=''):
"""Save the file model and return the model with no content."""
path = path.strip('/')
if 'type' not in model:
raise web.HTTPError(400, u'No file type provided')
if 'content' not in model and model['type'] != 'directory':
raise web.HTTPError(400, u'No file content provided')
os_path = self._get_os_path(path)
self.log.debug("Saving %s", os_path)
self.run_pre_save_hook(model=model, path=path)
try:
if model['type'] == 'notebook':
nb = nbformat.from_dict(model['content'])
self.check_and_sign(nb, path)
self._save_notebook(os_path, nb)
# One checkpoint should always exist for notebooks.
if not self.checkpoints.list_checkpoints(path):
self.create_checkpoint(path)
elif model['type'] == 'file':
# Missing format will be handled internally by _save_file.
self._save_file(os_path, model['content'], model.get('format'))
elif model['type'] == 'directory':
self._save_directory(os_path, model, path)
else:
raise web.HTTPError(400, "Unhandled contents type: %s" % model['type'])
except web.HTTPError:
raise
except Exception as e:
self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True)
raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e))
validation_message = None
if model['type'] == 'notebook':
self.validate_notebook_model(model)
validation_message = model.get('message', None)
model = self.get(path, content=False)
if validation_message:
model['message'] = validation_message
self.run_post_save_hook(model=model, os_path=os_path)
return model
def delete_file(self, path):
"""Delete file at path."""
path = path.strip('/')
os_path = self._get_os_path(path)
rm = os.unlink
if os.path.isdir(os_path):
listing = os.listdir(os_path)
# Don't delete non-empty directories.
# A directory containing only leftover checkpoints is
# considered empty.
cp_dir = getattr(self.checkpoints, 'checkpoint_dir', None)
for entry in listing:
if entry != cp_dir:
raise web.HTTPError(400, u'Directory %s not empty' % os_path)
elif not os.path.isfile(os_path):
raise web.HTTPError(404, u'File does not exist: %s' % os_path)
if os.path.isdir(os_path):
self.log.debug("Removing directory %s", os_path)
with self.perm_to_403():
shutil.rmtree(os_path)
else:
self.log.debug("Unlinking file %s", os_path)
with self.perm_to_403():
rm(os_path)
def rename_file(self, old_path, new_path):
"""Rename a file."""
old_path = old_path.strip('/')
new_path = new_path.strip('/')
if new_path == old_path:
return
new_os_path = self._get_os_path(new_path)
old_os_path = self._get_os_path(old_path)
# Should we proceed with the move?
if os.path.exists(new_os_path):
raise web.HTTPError(409, u'File already exists: %s' % new_path)
# Move the file
try:
with self.perm_to_403():
shutil.move(old_os_path, new_os_path)
except web.HTTPError:
raise
except Exception as e:
raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % (old_path, e))
def info_string(self):
return "Serving notebooks from local directory: %s" % self.root_dir
def get_kernel_path(self, path, model=None):
"""Return the initial API path of a kernel associated with a given notebook"""
if '/' in path:
parent_dir = path.rsplit('/', 1)[0]
else:
parent_dir = ''
return parent_dir
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_1739_1 |
crossvul-python_data_bad_5582_1 | import os
import os.path
import mimetypes
import requests
from zope.interface import implements
from pyramid.interfaces import ITemplateRenderer
class ReleaseFileRenderer(object):
implements(ITemplateRenderer)
def __init__(self, repository_root):
self.repository_root = repository_root
def __call__(self, value, system):
if 'request' in system:
request = system['request']
mime, encoding = mimetypes.guess_type(value['filename'])
request.response_content_type = mime
if encoding:
request.response_encoding = encoding
f = os.path.join(self.repository_root,
value['filename'][0].lower(),
value['filename'])
if not os.path.exists(f):
dir_ = os.path.join(self.repository_root,
value['filename'][0].lower())
if not os.path.exists(dir_):
os.makedirs(dir_, 0750)
resp = requests.get(value['url'])
with open(f, 'wb') as rf:
rf.write(resp.content)
return resp.content
else:
data = ''
with open(f, 'rb') as rf:
data = ''
while True:
content = rf.read(2<<16)
if not content:
break
data += content
return data
def renderer_factory(info):
return ReleaseFileRenderer(info.settings['pyshop.repository'])
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_5582_1 |
crossvul-python_data_good_3768_2 | import urlparse
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, QueryDict
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils.http import base36_to_int
from django.utils.translation import ugettext as _
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
# Avoid shadowing the login() and logout() views below.
from django.contrib.auth import REDIRECT_FIELD_NAME, login as auth_login, logout as auth_logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AuthenticationForm, PasswordResetForm, SetPasswordForm, PasswordChangeForm
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.models import get_current_site
@csrf_protect
@never_cache
def login(request, template_name='registration/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm,
current_app=None, extra_context=None):
"""
Displays the login form and handles the login action.
"""
redirect_to = request.REQUEST.get(redirect_field_name, '')
if request.method == "POST":
form = authentication_form(data=request.POST)
if form.is_valid():
netloc = urlparse.urlparse(redirect_to)[1]
# Use default setting if redirect_to is empty
if not redirect_to:
redirect_to = settings.LOGIN_REDIRECT_URL
# Security check -- don't allow redirection to a different
# host.
elif netloc and netloc != request.get_host():
redirect_to = settings.LOGIN_REDIRECT_URL
# Okay, security checks complete. Log the user in.
auth_login(request, form.get_user())
if request.session.test_cookie_worked():
request.session.delete_test_cookie()
return HttpResponseRedirect(redirect_to)
else:
form = authentication_form(request)
request.session.set_test_cookie()
current_site = get_current_site(request)
context = {
'form': form,
redirect_field_name: redirect_to,
'site': current_site,
'site_name': current_site.name,
}
context.update(extra_context or {})
return render_to_response(template_name, context,
context_instance=RequestContext(request, current_app=current_app))
def logout(request, next_page=None,
template_name='registration/logged_out.html',
redirect_field_name=REDIRECT_FIELD_NAME,
current_app=None, extra_context=None):
"""
Logs out the user and displays 'You are logged out' message.
"""
auth_logout(request)
redirect_to = request.REQUEST.get(redirect_field_name, '')
if redirect_to:
netloc = urlparse.urlparse(redirect_to)[1]
# Security check -- don't allow redirection to a different host.
if not (netloc and netloc != request.get_host()):
return HttpResponseRedirect(redirect_to)
if next_page is None:
current_site = get_current_site(request)
context = {
'site': current_site,
'site_name': current_site.name,
'title': _('Logged out')
}
context.update(extra_context or {})
return render_to_response(template_name, context,
context_instance=RequestContext(request, current_app=current_app))
else:
# Redirect to this page until the session has been cleared.
return HttpResponseRedirect(next_page or request.path)
def logout_then_login(request, login_url=None, current_app=None, extra_context=None):
"""
Logs out the user if he is logged in. Then redirects to the log-in page.
"""
if not login_url:
login_url = settings.LOGIN_URL
return logout(request, login_url, current_app=current_app, extra_context=extra_context)
def redirect_to_login(next, login_url=None,
redirect_field_name=REDIRECT_FIELD_NAME):
"""
Redirects the user to the login page, passing the given 'next' page
"""
if not login_url:
login_url = settings.LOGIN_URL
login_url_parts = list(urlparse.urlparse(login_url))
if redirect_field_name:
querystring = QueryDict(login_url_parts[4], mutable=True)
querystring[redirect_field_name] = next
login_url_parts[4] = querystring.urlencode(safe='/')
return HttpResponseRedirect(urlparse.urlunparse(login_url_parts))
# 4 views for password reset:
# - password_reset sends the mail
# - password_reset_done shows a success message for the above
# - password_reset_confirm checks the link the user clicked and
# prompts for a new password
# - password_reset_complete shows a success message for the above
@csrf_protect
def password_reset(request, is_admin_site=False,
template_name='registration/password_reset_form.html',
email_template_name='registration/password_reset_email.html',
password_reset_form=PasswordResetForm,
token_generator=default_token_generator,
post_reset_redirect=None,
from_email=None,
current_app=None,
extra_context=None):
if post_reset_redirect is None:
post_reset_redirect = reverse('django.contrib.auth.views.password_reset_done')
if request.method == "POST":
form = password_reset_form(request.POST)
if form.is_valid():
opts = {
'use_https': request.is_secure(),
'token_generator': token_generator,
'from_email': from_email,
'email_template_name': email_template_name,
'request': request,
}
if is_admin_site:
opts = dict(opts, domain_override=request.get_host())
form.save(**opts)
return HttpResponseRedirect(post_reset_redirect)
else:
form = password_reset_form()
context = {
'form': form,
}
context.update(extra_context or {})
return render_to_response(template_name, context,
context_instance=RequestContext(request, current_app=current_app))
def password_reset_done(request,
template_name='registration/password_reset_done.html',
current_app=None, extra_context=None):
context = {}
context.update(extra_context or {})
return render_to_response(template_name, context,
context_instance=RequestContext(request, current_app=current_app))
# Doesn't need csrf_protect since no-one can guess the URL
@never_cache
def password_reset_confirm(request, uidb36=None, token=None,
template_name='registration/password_reset_confirm.html',
token_generator=default_token_generator,
set_password_form=SetPasswordForm,
post_reset_redirect=None,
current_app=None, extra_context=None):
"""
View that checks the hash in a password reset link and presents a
form for entering a new password.
"""
assert uidb36 is not None and token is not None # checked by URLconf
if post_reset_redirect is None:
post_reset_redirect = reverse('django.contrib.auth.views.password_reset_complete')
try:
uid_int = base36_to_int(uidb36)
user = User.objects.get(id=uid_int)
except (ValueError, User.DoesNotExist):
user = None
if user is not None and token_generator.check_token(user, token):
validlink = True
if request.method == 'POST':
form = set_password_form(user, request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(post_reset_redirect)
else:
form = set_password_form(None)
else:
validlink = False
form = None
context = {
'form': form,
'validlink': validlink,
}
context.update(extra_context or {})
return render_to_response(template_name, context,
context_instance=RequestContext(request, current_app=current_app))
def password_reset_complete(request,
template_name='registration/password_reset_complete.html',
current_app=None, extra_context=None):
context = {
'login_url': settings.LOGIN_URL
}
context.update(extra_context or {})
return render_to_response(template_name, context,
context_instance=RequestContext(request, current_app=current_app))
@csrf_protect
@login_required
def password_change(request,
template_name='registration/password_change_form.html',
post_change_redirect=None,
password_change_form=PasswordChangeForm,
current_app=None, extra_context=None):
if post_change_redirect is None:
post_change_redirect = reverse('django.contrib.auth.views.password_change_done')
if request.method == "POST":
form = password_change_form(user=request.user, data=request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(post_change_redirect)
else:
form = password_change_form(user=request.user)
context = {
'form': form,
}
context.update(extra_context or {})
return render_to_response(template_name, context,
context_instance=RequestContext(request, current_app=current_app))
def password_change_done(request,
template_name='registration/password_change_done.html',
current_app=None, extra_context=None):
context = {}
context.update(extra_context or {})
return render_to_response(template_name, context,
context_instance=RequestContext(request, current_app=current_app))
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3768_2 |
crossvul-python_data_bad_3500_1 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3500_1 |
crossvul-python_data_bad_100_2 | # -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
# Copyright 2017 - 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer, reactor
from twisted.python.failure import Failure
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import AuthError, Codes, SynapseError
from synapse.crypto.event_signing import add_hashes_and_signatures
from synapse.events.utils import serialize_event
from synapse.events.validator import EventValidator
from synapse.types import (
UserID, RoomAlias, RoomStreamToken,
)
from synapse.util.async import run_on_reactor, ReadWriteLock, Limiter
from synapse.util.logcontext import preserve_fn, run_in_background
from synapse.util.metrics import measure_func
from synapse.util.frozenutils import frozendict_json_encoder
from synapse.util.stringutils import random_string
from synapse.visibility import filter_events_for_client
from synapse.replication.http.send_event import send_event_to_master
from ._base import BaseHandler
from canonicaljson import encode_canonical_json
import logging
import simplejson
logger = logging.getLogger(__name__)
class PurgeStatus(object):
"""Object tracking the status of a purge request
This class contains information on the progress of a purge request, for
return by get_purge_status.
Attributes:
status (int): Tracks whether this request has completed. One of
STATUS_{ACTIVE,COMPLETE,FAILED}
"""
STATUS_ACTIVE = 0
STATUS_COMPLETE = 1
STATUS_FAILED = 2
STATUS_TEXT = {
STATUS_ACTIVE: "active",
STATUS_COMPLETE: "complete",
STATUS_FAILED: "failed",
}
def __init__(self):
self.status = PurgeStatus.STATUS_ACTIVE
def asdict(self):
return {
"status": PurgeStatus.STATUS_TEXT[self.status]
}
class MessageHandler(BaseHandler):
def __init__(self, hs):
super(MessageHandler, self).__init__(hs)
self.hs = hs
self.state = hs.get_state_handler()
self.clock = hs.get_clock()
self.pagination_lock = ReadWriteLock()
self._purges_in_progress_by_room = set()
# map from purge id to PurgeStatus
self._purges_by_id = {}
def start_purge_history(self, room_id, topological_ordering,
delete_local_events=False):
"""Start off a history purge on a room.
Args:
room_id (str): The room to purge from
topological_ordering (int): minimum topo ordering to preserve
delete_local_events (bool): True to delete local events as well as
remote ones
Returns:
str: unique ID for this purge transaction.
"""
if room_id in self._purges_in_progress_by_room:
raise SynapseError(
400,
"History purge already in progress for %s" % (room_id, ),
)
purge_id = random_string(16)
# we log the purge_id here so that it can be tied back to the
# request id in the log lines.
logger.info("[purge] starting purge_id %s", purge_id)
self._purges_by_id[purge_id] = PurgeStatus()
run_in_background(
self._purge_history,
purge_id, room_id, topological_ordering, delete_local_events,
)
return purge_id
@defer.inlineCallbacks
def _purge_history(self, purge_id, room_id, topological_ordering,
delete_local_events):
"""Carry out a history purge on a room.
Args:
purge_id (str): The id for this purge
room_id (str): The room to purge from
topological_ordering (int): minimum topo ordering to preserve
delete_local_events (bool): True to delete local events as well as
remote ones
Returns:
Deferred
"""
self._purges_in_progress_by_room.add(room_id)
try:
with (yield self.pagination_lock.write(room_id)):
yield self.store.purge_history(
room_id, topological_ordering, delete_local_events,
)
logger.info("[purge] complete")
self._purges_by_id[purge_id].status = PurgeStatus.STATUS_COMPLETE
except Exception:
logger.error("[purge] failed: %s", Failure().getTraceback().rstrip())
self._purges_by_id[purge_id].status = PurgeStatus.STATUS_FAILED
finally:
self._purges_in_progress_by_room.discard(room_id)
# remove the purge from the list 24 hours after it completes
def clear_purge():
del self._purges_by_id[purge_id]
reactor.callLater(24 * 3600, clear_purge)
def get_purge_status(self, purge_id):
"""Get the current status of an active purge
Args:
purge_id (str): purge_id returned by start_purge_history
Returns:
PurgeStatus|None
"""
return self._purges_by_id.get(purge_id)
@defer.inlineCallbacks
def get_messages(self, requester, room_id=None, pagin_config=None,
as_client_event=True, event_filter=None):
"""Get messages in a room.
Args:
requester (Requester): The user requesting messages.
room_id (str): The room they want messages from.
pagin_config (synapse.api.streams.PaginationConfig): The pagination
config rules to apply, if any.
as_client_event (bool): True to get events in client-server format.
event_filter (Filter): Filter to apply to results or None
Returns:
dict: Pagination API results
"""
user_id = requester.user.to_string()
if pagin_config.from_token:
room_token = pagin_config.from_token.room_key
else:
pagin_config.from_token = (
yield self.hs.get_event_sources().get_current_token_for_room(
room_id=room_id
)
)
room_token = pagin_config.from_token.room_key
room_token = RoomStreamToken.parse(room_token)
pagin_config.from_token = pagin_config.from_token.copy_and_replace(
"room_key", str(room_token)
)
source_config = pagin_config.get_source_config("room")
with (yield self.pagination_lock.read(room_id)):
membership, member_event_id = yield self._check_in_room_or_world_readable(
room_id, user_id
)
if source_config.direction == 'b':
# if we're going backwards, we might need to backfill. This
# requires that we have a topo token.
if room_token.topological:
max_topo = room_token.topological
else:
max_topo = yield self.store.get_max_topological_token(
room_id, room_token.stream
)
if membership == Membership.LEAVE:
# If they have left the room then clamp the token to be before
# they left the room, to save the effort of loading from the
# database.
leave_token = yield self.store.get_topological_token_for_event(
member_event_id
)
leave_token = RoomStreamToken.parse(leave_token)
if leave_token.topological < max_topo:
source_config.from_key = str(leave_token)
yield self.hs.get_handlers().federation_handler.maybe_backfill(
room_id, max_topo
)
events, next_key = yield self.store.paginate_room_events(
room_id=room_id,
from_key=source_config.from_key,
to_key=source_config.to_key,
direction=source_config.direction,
limit=source_config.limit,
event_filter=event_filter,
)
next_token = pagin_config.from_token.copy_and_replace(
"room_key", next_key
)
if not events:
defer.returnValue({
"chunk": [],
"start": pagin_config.from_token.to_string(),
"end": next_token.to_string(),
})
if event_filter:
events = event_filter.filter(events)
events = yield filter_events_for_client(
self.store,
user_id,
events,
is_peeking=(member_event_id is None),
)
time_now = self.clock.time_msec()
chunk = {
"chunk": [
serialize_event(e, time_now, as_client_event)
for e in events
],
"start": pagin_config.from_token.to_string(),
"end": next_token.to_string(),
}
defer.returnValue(chunk)
@defer.inlineCallbacks
def get_room_data(self, user_id=None, room_id=None,
event_type=None, state_key="", is_guest=False):
""" Get data from a room.
Args:
event : The room path event
Returns:
The path data content.
Raises:
SynapseError if something went wrong.
"""
membership, membership_event_id = yield self._check_in_room_or_world_readable(
room_id, user_id
)
if membership == Membership.JOIN:
data = yield self.state_handler.get_current_state(
room_id, event_type, state_key
)
elif membership == Membership.LEAVE:
key = (event_type, state_key)
room_state = yield self.store.get_state_for_events(
[membership_event_id], [key]
)
data = room_state[membership_event_id].get(key)
defer.returnValue(data)
@defer.inlineCallbacks
def _check_in_room_or_world_readable(self, room_id, user_id):
try:
# check_user_was_in_room will return the most recent membership
# event for the user if:
# * The user is a non-guest user, and was ever in the room
# * The user is a guest user, and has joined the room
# else it will throw.
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
defer.returnValue((member_event.membership, member_event.event_id))
return
except AuthError:
visibility = yield self.state_handler.get_current_state(
room_id, EventTypes.RoomHistoryVisibility, ""
)
if (
visibility and
visibility.content["history_visibility"] == "world_readable"
):
defer.returnValue((Membership.JOIN, None))
return
raise AuthError(
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
)
@defer.inlineCallbacks
def get_state_events(self, user_id, room_id, is_guest=False):
"""Retrieve all state events for a given room. If the user is
joined to the room then return the current state. If the user has
left the room return the state events from when they left.
Args:
user_id(str): The user requesting state events.
room_id(str): The room ID to get all state events from.
Returns:
A list of dicts representing state events. [{}, {}, {}]
"""
membership, membership_event_id = yield self._check_in_room_or_world_readable(
room_id, user_id
)
if membership == Membership.JOIN:
room_state = yield self.state_handler.get_current_state(room_id)
elif membership == Membership.LEAVE:
room_state = yield self.store.get_state_for_events(
[membership_event_id], None
)
room_state = room_state[membership_event_id]
now = self.clock.time_msec()
defer.returnValue(
[serialize_event(c, now) for c in room_state.values()]
)
@defer.inlineCallbacks
def get_joined_members(self, requester, room_id):
"""Get all the joined members in the room and their profile information.
If the user has left the room return the state events from when they left.
Args:
requester(Requester): The user requesting state events.
room_id(str): The room ID to get all state events from.
Returns:
A dict of user_id to profile info
"""
user_id = requester.user.to_string()
if not requester.app_service:
# We check AS auth after fetching the room membership, as it
# requires us to pull out all joined members anyway.
membership, _ = yield self._check_in_room_or_world_readable(
room_id, user_id
)
if membership != Membership.JOIN:
raise NotImplementedError(
"Getting joined members after leaving is not implemented"
)
users_with_profile = yield self.state.get_current_user_in_room(room_id)
# If this is an AS, double check that they are allowed to see the members.
# This can either be because the AS user is in the room or becuase there
# is a user in the room that the AS is "interested in"
if requester.app_service and user_id not in users_with_profile:
for uid in users_with_profile:
if requester.app_service.is_interested_in_user(uid):
break
else:
# Loop fell through, AS has no interested users in room
raise AuthError(403, "Appservice not in room")
defer.returnValue({
user_id: {
"avatar_url": profile.avatar_url,
"display_name": profile.display_name,
}
for user_id, profile in users_with_profile.iteritems()
})
class EventCreationHandler(object):
def __init__(self, hs):
self.hs = hs
self.auth = hs.get_auth()
self.store = hs.get_datastore()
self.state = hs.get_state_handler()
self.clock = hs.get_clock()
self.validator = EventValidator()
self.profile_handler = hs.get_profile_handler()
self.event_builder_factory = hs.get_event_builder_factory()
self.server_name = hs.hostname
self.ratelimiter = hs.get_ratelimiter()
self.notifier = hs.get_notifier()
self.config = hs.config
self.http_client = hs.get_simple_http_client()
# This is only used to get at ratelimit function, and maybe_kick_guest_users
self.base_handler = BaseHandler(hs)
self.pusher_pool = hs.get_pusherpool()
# We arbitrarily limit concurrent event creation for a room to 5.
# This is to stop us from diverging history *too* much.
self.limiter = Limiter(max_count=5)
self.action_generator = hs.get_action_generator()
self.spam_checker = hs.get_spam_checker()
@defer.inlineCallbacks
def create_event(self, requester, event_dict, token_id=None, txn_id=None,
prev_events_and_hashes=None):
"""
Given a dict from a client, create a new event.
Creates an FrozenEvent object, filling out auth_events, prev_events,
etc.
Adds display names to Join membership events.
Args:
requester
event_dict (dict): An entire event
token_id (str)
txn_id (str)
prev_events_and_hashes (list[(str, dict[str, str], int)]|None):
the forward extremities to use as the prev_events for the
new event. For each event, a tuple of (event_id, hashes, depth)
where *hashes* is a map from algorithm to hash.
If None, they will be requested from the database.
Returns:
Tuple of created event (FrozenEvent), Context
"""
builder = self.event_builder_factory.new(event_dict)
self.validator.validate_new(builder)
if builder.type == EventTypes.Member:
membership = builder.content.get("membership", None)
target = UserID.from_string(builder.state_key)
if membership in {Membership.JOIN, Membership.INVITE}:
# If event doesn't include a display name, add one.
profile = self.profile_handler
content = builder.content
try:
if "displayname" not in content:
content["displayname"] = yield profile.get_displayname(target)
if "avatar_url" not in content:
content["avatar_url"] = yield profile.get_avatar_url(target)
except Exception as e:
logger.info(
"Failed to get profile information for %r: %s",
target, e
)
if token_id is not None:
builder.internal_metadata.token_id = token_id
if txn_id is not None:
builder.internal_metadata.txn_id = txn_id
event, context = yield self.create_new_client_event(
builder=builder,
requester=requester,
prev_events_and_hashes=prev_events_and_hashes,
)
defer.returnValue((event, context))
@defer.inlineCallbacks
def send_nonmember_event(self, requester, event, context, ratelimit=True):
"""
Persists and notifies local clients and federation of an event.
Args:
event (FrozenEvent) the event to send.
context (Context) the context of the event.
ratelimit (bool): Whether to rate limit this send.
is_guest (bool): Whether the sender is a guest.
"""
if event.type == EventTypes.Member:
raise SynapseError(
500,
"Tried to send member event through non-member codepath"
)
user = UserID.from_string(event.sender)
assert self.hs.is_mine(user), "User must be our own: %s" % (user,)
if event.is_state():
prev_state = yield self.deduplicate_state_event(event, context)
if prev_state is not None:
defer.returnValue(prev_state)
yield self.handle_new_client_event(
requester=requester,
event=event,
context=context,
ratelimit=ratelimit,
)
@defer.inlineCallbacks
def deduplicate_state_event(self, event, context):
"""
Checks whether event is in the latest resolved state in context.
If so, returns the version of the event in context.
Otherwise, returns None.
"""
prev_event_id = context.prev_state_ids.get((event.type, event.state_key))
prev_event = yield self.store.get_event(prev_event_id, allow_none=True)
if not prev_event:
return
if prev_event and event.user_id == prev_event.user_id:
prev_content = encode_canonical_json(prev_event.content)
next_content = encode_canonical_json(event.content)
if prev_content == next_content:
defer.returnValue(prev_event)
return
@defer.inlineCallbacks
def create_and_send_nonmember_event(
self,
requester,
event_dict,
ratelimit=True,
txn_id=None
):
"""
Creates an event, then sends it.
See self.create_event and self.send_nonmember_event.
"""
# We limit the number of concurrent event sends in a room so that we
# don't fork the DAG too much. If we don't limit then we can end up in
# a situation where event persistence can't keep up, causing
# extremities to pile up, which in turn leads to state resolution
# taking longer.
with (yield self.limiter.queue(event_dict["room_id"])):
event, context = yield self.create_event(
requester,
event_dict,
token_id=requester.access_token_id,
txn_id=txn_id
)
spam_error = self.spam_checker.check_event_for_spam(event)
if spam_error:
if not isinstance(spam_error, basestring):
spam_error = "Spam is not permitted here"
raise SynapseError(
403, spam_error, Codes.FORBIDDEN
)
yield self.send_nonmember_event(
requester,
event,
context,
ratelimit=ratelimit,
)
defer.returnValue(event)
@measure_func("create_new_client_event")
@defer.inlineCallbacks
def create_new_client_event(self, builder, requester=None,
prev_events_and_hashes=None):
"""Create a new event for a local client
Args:
builder (EventBuilder):
requester (synapse.types.Requester|None):
prev_events_and_hashes (list[(str, dict[str, str], int)]|None):
the forward extremities to use as the prev_events for the
new event. For each event, a tuple of (event_id, hashes, depth)
where *hashes* is a map from algorithm to hash.
If None, they will be requested from the database.
Returns:
Deferred[(synapse.events.EventBase, synapse.events.snapshot.EventContext)]
"""
if prev_events_and_hashes is not None:
assert len(prev_events_and_hashes) <= 10, \
"Attempting to create an event with %i prev_events" % (
len(prev_events_and_hashes),
)
else:
prev_events_and_hashes = \
yield self.store.get_prev_events_for_room(builder.room_id)
if prev_events_and_hashes:
depth = max([d for _, _, d in prev_events_and_hashes]) + 1
else:
depth = 1
prev_events = [
(event_id, prev_hashes)
for event_id, prev_hashes, _ in prev_events_and_hashes
]
builder.prev_events = prev_events
builder.depth = depth
context = yield self.state.compute_event_context(builder)
if requester:
context.app_service = requester.app_service
if builder.is_state():
builder.prev_state = yield self.store.add_event_hashes(
context.prev_state_events
)
yield self.auth.add_auth_events(builder, context)
signing_key = self.hs.config.signing_key[0]
add_hashes_and_signatures(
builder, self.server_name, signing_key
)
event = builder.build()
logger.debug(
"Created event %s with state: %s",
event.event_id, context.prev_state_ids,
)
defer.returnValue(
(event, context,)
)
@measure_func("handle_new_client_event")
@defer.inlineCallbacks
def handle_new_client_event(
self,
requester,
event,
context,
ratelimit=True,
extra_users=[],
):
"""Processes a new event. This includes checking auth, persisting it,
notifying users, sending to remote servers, etc.
If called from a worker will hit out to the master process for final
processing.
Args:
requester (Requester)
event (FrozenEvent)
context (EventContext)
ratelimit (bool)
extra_users (list(UserID)): Any extra users to notify about event
"""
try:
yield self.auth.check_from_context(event, context)
except AuthError as err:
logger.warn("Denying new event %r because %s", event, err)
raise err
# Ensure that we can round trip before trying to persist in db
try:
dump = frozendict_json_encoder.encode(event.content)
simplejson.loads(dump)
except Exception:
logger.exception("Failed to encode content: %r", event.content)
raise
yield self.action_generator.handle_push_actions_for_event(
event, context
)
try:
# If we're a worker we need to hit out to the master.
if self.config.worker_app:
yield send_event_to_master(
self.http_client,
host=self.config.worker_replication_host,
port=self.config.worker_replication_http_port,
requester=requester,
event=event,
context=context,
ratelimit=ratelimit,
extra_users=extra_users,
)
return
yield self.persist_and_notify_client_event(
requester,
event,
context,
ratelimit=ratelimit,
extra_users=extra_users,
)
except: # noqa: E722, as we reraise the exception this is fine.
# Ensure that we actually remove the entries in the push actions
# staging area, if we calculated them.
preserve_fn(self.store.remove_push_actions_from_staging)(event.event_id)
raise
@defer.inlineCallbacks
def persist_and_notify_client_event(
self,
requester,
event,
context,
ratelimit=True,
extra_users=[],
):
"""Called when we have fully built the event, have already
calculated the push actions for the event, and checked auth.
This should only be run on master.
"""
assert not self.config.worker_app
if ratelimit:
yield self.base_handler.ratelimit(requester)
yield self.base_handler.maybe_kick_guest_users(event, context)
if event.type == EventTypes.CanonicalAlias:
# Check the alias is acually valid (at this time at least)
room_alias_str = event.content.get("alias", None)
if room_alias_str:
room_alias = RoomAlias.from_string(room_alias_str)
directory_handler = self.hs.get_handlers().directory_handler
mapping = yield directory_handler.get_association(room_alias)
if mapping["room_id"] != event.room_id:
raise SynapseError(
400,
"Room alias %s does not point to the room" % (
room_alias_str,
)
)
federation_handler = self.hs.get_handlers().federation_handler
if event.type == EventTypes.Member:
if event.content["membership"] == Membership.INVITE:
def is_inviter_member_event(e):
return (
e.type == EventTypes.Member and
e.sender == event.sender
)
state_to_include_ids = [
e_id
for k, e_id in context.current_state_ids.iteritems()
if k[0] in self.hs.config.room_invite_state_types
or k == (EventTypes.Member, event.sender)
]
state_to_include = yield self.store.get_events(state_to_include_ids)
event.unsigned["invite_room_state"] = [
{
"type": e.type,
"state_key": e.state_key,
"content": e.content,
"sender": e.sender,
}
for e in state_to_include.itervalues()
]
invitee = UserID.from_string(event.state_key)
if not self.hs.is_mine(invitee):
# TODO: Can we add signature from remote server in a nicer
# way? If we have been invited by a remote server, we need
# to get them to sign the event.
returned_invite = yield federation_handler.send_invite(
invitee.domain,
event,
)
event.unsigned.pop("room_state", None)
# TODO: Make sure the signatures actually are correct.
event.signatures.update(
returned_invite.signatures
)
if event.type == EventTypes.Redaction:
auth_events_ids = yield self.auth.compute_auth_events(
event, context.prev_state_ids, for_verification=True,
)
auth_events = yield self.store.get_events(auth_events_ids)
auth_events = {
(e.type, e.state_key): e for e in auth_events.values()
}
if self.auth.check_redaction(event, auth_events=auth_events):
original_event = yield self.store.get_event(
event.redacts,
check_redacted=False,
get_prev_content=False,
allow_rejected=False,
allow_none=False
)
if event.user_id != original_event.user_id:
raise AuthError(
403,
"You don't have permission to redact events"
)
if event.type == EventTypes.Create and context.prev_state_ids:
raise AuthError(
403,
"Changing the room create event is forbidden",
)
(event_stream_id, max_stream_id) = yield self.store.persist_event(
event, context=context
)
# this intentionally does not yield: we don't care about the result
# and don't need to wait for it.
preserve_fn(self.pusher_pool.on_new_notifications)(
event_stream_id, max_stream_id
)
@defer.inlineCallbacks
def _notify():
yield run_on_reactor()
self.notifier.on_new_room_event(
event, event_stream_id, max_stream_id,
extra_users=extra_users
)
preserve_fn(_notify)()
if event.type == EventTypes.Message:
presence = self.hs.get_presence_handler()
# We don't want to block sending messages on any presence code. This
# matters as sometimes presence code can take a while.
preserve_fn(presence.bump_presence_active_time)(requester.user)
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_100_2 |
crossvul-python_data_bad_5399_0 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import six
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hmac
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
@utils.register_interface(KeyDerivationFunction)
class HKDF(object):
def __init__(self, algorithm, length, salt, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
if not (salt is None or isinstance(salt, bytes)):
raise TypeError("salt must be bytes.")
if salt is None:
salt = b"\x00" * (self._algorithm.digest_size // 8)
self._salt = salt
self._backend = backend
self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend)
def _extract(self, key_material):
h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend)
h.update(key_material)
return h.finalize()
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
return self._hkdf_expand.derive(self._extract(key_material))
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
@utils.register_interface(KeyDerivationFunction)
class HKDFExpand(object):
def __init__(self, algorithm, length, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
self._backend = backend
max_length = 255 * (algorithm.digest_size // 8)
if length > max_length:
raise ValueError(
"Can not derive keys larger than {0} octets.".format(
max_length
))
self._length = length
if not (info is None or isinstance(info, bytes)):
raise TypeError("info must be bytes.")
if info is None:
info = b""
self._info = info
self._used = False
def _expand(self, key_material):
output = [b""]
counter = 1
while (self._algorithm.digest_size // 8) * len(output) < self._length:
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
h.update(output[-1])
h.update(self._info)
h.update(six.int2byte(counter))
output.append(h.finalize())
counter += 1
return b"".join(output)[:self._length]
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
if self._used:
raise AlreadyFinalized
self._used = True
return self._expand(key_material)
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_5399_0 |
crossvul-python_data_good_1740_1 | """A contents manager that uses the local file system for storage."""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import io
import os
import shutil
import mimetypes
import nbformat
from tornado import web
from .filecheckpoints import FileCheckpoints
from .fileio import FileManagerMixin
from .manager import ContentsManager
from ipython_genutils.importstring import import_item
from traitlets import Any, Unicode, Bool, TraitError
from ipython_genutils.py3compat import getcwd, string_types
from . import tz
from notebook.utils import (
is_hidden,
to_api_path,
)
_script_exporter = None
def _post_save_script(model, os_path, contents_manager, **kwargs):
"""convert notebooks to Python script after save with nbconvert
replaces `ipython notebook --script`
"""
from nbconvert.exporters.script import ScriptExporter
if model['type'] != 'notebook':
return
global _script_exporter
if _script_exporter is None:
_script_exporter = ScriptExporter(parent=contents_manager)
log = contents_manager.log
base, ext = os.path.splitext(os_path)
py_fname = base + '.py'
script, resources = _script_exporter.from_filename(os_path)
script_fname = base + resources.get('output_extension', '.txt')
log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir))
with io.open(script_fname, 'w', encoding='utf-8') as f:
f.write(script)
class FileContentsManager(FileManagerMixin, ContentsManager):
root_dir = Unicode(config=True)
def _root_dir_default(self):
try:
return self.parent.notebook_dir
except AttributeError:
return getcwd()
save_script = Bool(False, config=True, help='DEPRECATED, use post_save_hook')
def _save_script_changed(self):
self.log.warn("""
`--script` is deprecated. You can trigger nbconvert via pre- or post-save hooks:
ContentsManager.pre_save_hook
FileContentsManager.post_save_hook
A post-save hook has been registered that calls:
ipython nbconvert --to script [notebook]
which behaves similarly to `--script`.
""")
self.post_save_hook = _post_save_script
post_save_hook = Any(None, config=True,
help="""Python callable or importstring thereof
to be called on the path of a file just saved.
This can be used to process the file on disk,
such as converting the notebook to a script or HTML via nbconvert.
It will be called as (all arguments passed by keyword)::
hook(os_path=os_path, model=model, contents_manager=instance)
- path: the filesystem path to the file just written
- model: the model representing the file
- contents_manager: this ContentsManager instance
"""
)
def _post_save_hook_changed(self, name, old, new):
if new and isinstance(new, string_types):
self.post_save_hook = import_item(self.post_save_hook)
elif new:
if not callable(new):
raise TraitError("post_save_hook must be callable")
def run_post_save_hook(self, model, os_path):
"""Run the post-save hook if defined, and log errors"""
if self.post_save_hook:
try:
self.log.debug("Running post-save hook on %s", os_path)
self.post_save_hook(os_path=os_path, model=model, contents_manager=self)
except Exception:
self.log.error("Post-save hook failed on %s", os_path, exc_info=True)
def _root_dir_changed(self, name, old, new):
"""Do a bit of validation of the root_dir."""
if not os.path.isabs(new):
# If we receive a non-absolute path, make it absolute.
self.root_dir = os.path.abspath(new)
return
if not os.path.isdir(new):
raise TraitError("%r is not a directory" % new)
def _checkpoints_class_default(self):
return FileCheckpoints
def is_hidden(self, path):
"""Does the API style path correspond to a hidden directory or file?
Parameters
----------
path : string
The path to check. This is an API path (`/` separated,
relative to root_dir).
Returns
-------
hidden : bool
Whether the path exists and is hidden.
"""
path = path.strip('/')
os_path = self._get_os_path(path=path)
return is_hidden(os_path, self.root_dir)
def file_exists(self, path):
"""Returns True if the file exists, else returns False.
API-style wrapper for os.path.isfile
Parameters
----------
path : string
The relative path to the file (with '/' as separator)
Returns
-------
exists : bool
Whether the file exists.
"""
path = path.strip('/')
os_path = self._get_os_path(path)
return os.path.isfile(os_path)
def dir_exists(self, path):
"""Does the API-style path refer to an extant directory?
API-style wrapper for os.path.isdir
Parameters
----------
path : string
The path to check. This is an API path (`/` separated,
relative to root_dir).
Returns
-------
exists : bool
Whether the path is indeed a directory.
"""
path = path.strip('/')
os_path = self._get_os_path(path=path)
return os.path.isdir(os_path)
def exists(self, path):
"""Returns True if the path exists, else returns False.
API-style wrapper for os.path.exists
Parameters
----------
path : string
The API path to the file (with '/' as separator)
Returns
-------
exists : bool
Whether the target exists.
"""
path = path.strip('/')
os_path = self._get_os_path(path=path)
return os.path.exists(os_path)
def _base_model(self, path):
"""Build the common base of a contents model"""
os_path = self._get_os_path(path)
info = os.stat(os_path)
last_modified = tz.utcfromtimestamp(info.st_mtime)
created = tz.utcfromtimestamp(info.st_ctime)
# Create the base model.
model = {}
model['name'] = path.rsplit('/', 1)[-1]
model['path'] = path
model['last_modified'] = last_modified
model['created'] = created
model['content'] = None
model['format'] = None
model['mimetype'] = None
try:
model['writable'] = os.access(os_path, os.W_OK)
except OSError:
self.log.error("Failed to check write permissions on %s", os_path)
model['writable'] = False
return model
def _dir_model(self, path, content=True):
"""Build a model for a directory
if content is requested, will include a listing of the directory
"""
os_path = self._get_os_path(path)
four_o_four = u'directory does not exist: %r' % path
if not os.path.isdir(os_path):
raise web.HTTPError(404, four_o_four)
elif is_hidden(os_path, self.root_dir):
self.log.info("Refusing to serve hidden directory %r, via 404 Error",
os_path
)
raise web.HTTPError(404, four_o_four)
model = self._base_model(path)
model['type'] = 'directory'
if content:
model['content'] = contents = []
os_dir = self._get_os_path(path)
for name in os.listdir(os_dir):
os_path = os.path.join(os_dir, name)
# skip over broken symlinks in listing
if not os.path.exists(os_path):
self.log.warn("%s doesn't exist", os_path)
continue
elif not os.path.isfile(os_path) and not os.path.isdir(os_path):
self.log.debug("%s not a regular file", os_path)
continue
if self.should_list(name) and not is_hidden(os_path, self.root_dir):
contents.append(self.get(
path='%s/%s' % (path, name),
content=False)
)
model['format'] = 'json'
return model
def _file_model(self, path, content=True, format=None):
"""Build a model for a file
if content is requested, include the file contents.
format:
If 'text', the contents will be decoded as UTF-8.
If 'base64', the raw bytes contents will be encoded as base64.
If not specified, try to decode as UTF-8, and fall back to base64
"""
model = self._base_model(path)
model['type'] = 'file'
os_path = self._get_os_path(path)
model['mimetype'] = mimetypes.guess_type(os_path)[0]
if content:
content, format = self._read_file(os_path, format)
if model['mimetype'] is None:
default_mime = {
'text': 'text/plain',
'base64': 'application/octet-stream'
}[format]
model['mimetype'] = default_mime
model.update(
content=content,
format=format,
)
return model
def _notebook_model(self, path, content=True):
"""Build a notebook model
if content is requested, the notebook content will be populated
as a JSON structure (not double-serialized)
"""
model = self._base_model(path)
model['type'] = 'notebook'
if content:
os_path = self._get_os_path(path)
nb = self._read_notebook(os_path, as_version=4)
self.mark_trusted_cells(nb, path)
model['content'] = nb
model['format'] = 'json'
self.validate_notebook_model(model)
return model
def get(self, path, content=True, type=None, format=None):
""" Takes a path for an entity and returns its model
Parameters
----------
path : str
the API path that describes the relative path for the target
content : bool
Whether to include the contents in the reply
type : str, optional
The requested type - 'file', 'notebook', or 'directory'.
Will raise HTTPError 400 if the content doesn't match.
format : str, optional
The requested format for file contents. 'text' or 'base64'.
Ignored if this returns a notebook or directory model.
Returns
-------
model : dict
the contents model. If content=True, returns the contents
of the file or directory as well.
"""
path = path.strip('/')
if not self.exists(path):
raise web.HTTPError(404, u'No such file or directory: %s' % path)
os_path = self._get_os_path(path)
if os.path.isdir(os_path):
if type not in (None, 'directory'):
raise web.HTTPError(400,
u'%s is a directory, not a %s' % (path, type), reason='bad type')
model = self._dir_model(path, content=content)
elif type == 'notebook' or (type is None and path.endswith('.ipynb')):
model = self._notebook_model(path, content=content)
else:
if type == 'directory':
raise web.HTTPError(400,
u'%s is not a directory' % path, reason='bad type')
model = self._file_model(path, content=content, format=format)
return model
def _save_directory(self, os_path, model, path=''):
"""create a directory"""
if is_hidden(os_path, self.root_dir):
raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path)
if not os.path.exists(os_path):
with self.perm_to_403():
os.mkdir(os_path)
elif not os.path.isdir(os_path):
raise web.HTTPError(400, u'Not a directory: %s' % (os_path))
else:
self.log.debug("Directory %r already exists", os_path)
def save(self, model, path=''):
"""Save the file model and return the model with no content."""
path = path.strip('/')
if 'type' not in model:
raise web.HTTPError(400, u'No file type provided')
if 'content' not in model and model['type'] != 'directory':
raise web.HTTPError(400, u'No file content provided')
os_path = self._get_os_path(path)
self.log.debug("Saving %s", os_path)
self.run_pre_save_hook(model=model, path=path)
try:
if model['type'] == 'notebook':
nb = nbformat.from_dict(model['content'])
self.check_and_sign(nb, path)
self._save_notebook(os_path, nb)
# One checkpoint should always exist for notebooks.
if not self.checkpoints.list_checkpoints(path):
self.create_checkpoint(path)
elif model['type'] == 'file':
# Missing format will be handled internally by _save_file.
self._save_file(os_path, model['content'], model.get('format'))
elif model['type'] == 'directory':
self._save_directory(os_path, model, path)
else:
raise web.HTTPError(400, "Unhandled contents type: %s" % model['type'])
except web.HTTPError:
raise
except Exception as e:
self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True)
raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e))
validation_message = None
if model['type'] == 'notebook':
self.validate_notebook_model(model)
validation_message = model.get('message', None)
model = self.get(path, content=False)
if validation_message:
model['message'] = validation_message
self.run_post_save_hook(model=model, os_path=os_path)
return model
def delete_file(self, path):
"""Delete file at path."""
path = path.strip('/')
os_path = self._get_os_path(path)
rm = os.unlink
if os.path.isdir(os_path):
listing = os.listdir(os_path)
# Don't delete non-empty directories.
# A directory containing only leftover checkpoints is
# considered empty.
cp_dir = getattr(self.checkpoints, 'checkpoint_dir', None)
for entry in listing:
if entry != cp_dir:
raise web.HTTPError(400, u'Directory %s not empty' % os_path)
elif not os.path.isfile(os_path):
raise web.HTTPError(404, u'File does not exist: %s' % os_path)
if os.path.isdir(os_path):
self.log.debug("Removing directory %s", os_path)
with self.perm_to_403():
shutil.rmtree(os_path)
else:
self.log.debug("Unlinking file %s", os_path)
with self.perm_to_403():
rm(os_path)
def rename_file(self, old_path, new_path):
"""Rename a file."""
old_path = old_path.strip('/')
new_path = new_path.strip('/')
if new_path == old_path:
return
new_os_path = self._get_os_path(new_path)
old_os_path = self._get_os_path(old_path)
# Should we proceed with the move?
if os.path.exists(new_os_path):
raise web.HTTPError(409, u'File already exists: %s' % new_path)
# Move the file
try:
with self.perm_to_403():
shutil.move(old_os_path, new_os_path)
except web.HTTPError:
raise
except Exception as e:
raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % (old_path, e))
def info_string(self):
return "Serving notebooks from local directory: %s" % self.root_dir
def get_kernel_path(self, path, model=None):
"""Return the initial API path of a kernel associated with a given notebook"""
if '/' in path:
parent_dir = path.rsplit('/', 1)[0]
else:
parent_dir = ''
return parent_dir
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_1740_1 |
crossvul-python_data_bad_2141_3 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import sys
import re
import os
import shlex
import yaml
import copy
import optparse
import operator
from ansible import errors
from ansible import __version__
from ansible.utils import template
from ansible.utils.display_functions import *
from ansible.utils.plugins import *
from ansible.callbacks import display
import ansible.constants as C
import ast
import time
import StringIO
import stat
import termios
import tty
import pipes
import random
import difflib
import warnings
import traceback
import getpass
import sys
import json
from vault import VaultLib
VERBOSITY=0
MAX_FILE_SIZE_FOR_DIFF=1*1024*1024
try:
import json
except ImportError:
import simplejson as json
try:
from hashlib import md5 as _md5
except ImportError:
from md5 import md5 as _md5
PASSLIB_AVAILABLE = False
try:
import passlib.hash
PASSLIB_AVAILABLE = True
except:
pass
try:
import builtin
except ImportError:
import __builtin__ as builtin
KEYCZAR_AVAILABLE=False
try:
try:
# some versions of pycrypto may not have this?
from Crypto.pct_warnings import PowmInsecureWarning
except ImportError:
PowmInsecureWarning = RuntimeWarning
with warnings.catch_warnings(record=True) as warning_handler:
warnings.simplefilter("error", PowmInsecureWarning)
try:
import keyczar.errors as key_errors
from keyczar.keys import AesKey
except PowmInsecureWarning:
system_warning(
"The version of gmp you have installed has a known issue regarding " + \
"timing vulnerabilities when used with pycrypto. " + \
"If possible, you should update it (ie. yum update gmp)."
)
warnings.resetwarnings()
warnings.simplefilter("ignore")
import keyczar.errors as key_errors
from keyczar.keys import AesKey
KEYCZAR_AVAILABLE=True
except ImportError:
pass
###############################################################
# Abstractions around keyczar
###############################################################
def key_for_hostname(hostname):
# fireball mode is an implementation of ansible firing up zeromq via SSH
# to use no persistent daemons or key management
if not KEYCZAR_AVAILABLE:
raise errors.AnsibleError("python-keyczar must be installed on the control machine to use accelerated modes")
key_path = os.path.expanduser(C.ACCELERATE_KEYS_DIR)
if not os.path.exists(key_path):
os.makedirs(key_path, mode=0700)
os.chmod(key_path, int(C.ACCELERATE_KEYS_DIR_PERMS, 8))
elif not os.path.isdir(key_path):
raise errors.AnsibleError('ACCELERATE_KEYS_DIR is not a directory.')
if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_DIR_PERMS, 8):
raise errors.AnsibleError('Incorrect permissions on the private key directory. Use `chmod 0%o %s` to correct this issue, and make sure any of the keys files contained within that directory are set to 0%o' % (int(C.ACCELERATE_KEYS_DIR_PERMS, 8), C.ACCELERATE_KEYS_DIR, int(C.ACCELERATE_KEYS_FILE_PERMS, 8)))
key_path = os.path.join(key_path, hostname)
# use new AES keys every 2 hours, which means fireball must not allow running for longer either
if not os.path.exists(key_path) or (time.time() - os.path.getmtime(key_path) > 60*60*2):
key = AesKey.Generate()
fd = os.open(key_path, os.O_WRONLY | os.O_CREAT, int(C.ACCELERATE_KEYS_FILE_PERMS, 8))
fh = os.fdopen(fd, 'w')
fh.write(str(key))
fh.close()
return key
else:
if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_FILE_PERMS, 8):
raise errors.AnsibleError('Incorrect permissions on the key file for this host. Use `chmod 0%o %s` to correct this issue.' % (int(C.ACCELERATE_KEYS_FILE_PERMS, 8), key_path))
fh = open(key_path)
key = AesKey.Read(fh.read())
fh.close()
return key
def encrypt(key, msg):
return key.Encrypt(msg)
def decrypt(key, msg):
try:
return key.Decrypt(msg)
except key_errors.InvalidSignatureError:
raise errors.AnsibleError("decryption failed")
###############################################################
# UTILITY FUNCTIONS FOR COMMAND LINE TOOLS
###############################################################
def err(msg):
''' print an error message to stderr '''
print >> sys.stderr, msg
def exit(msg, rc=1):
''' quit with an error to stdout and a failure code '''
err(msg)
sys.exit(rc)
def jsonify(result, format=False):
''' format JSON output (uncompressed or uncompressed) '''
if result is None:
return "{}"
result2 = result.copy()
for key, value in result2.items():
if type(value) is str:
result2[key] = value.decode('utf-8', 'ignore')
if format:
return json.dumps(result2, sort_keys=True, indent=4)
else:
return json.dumps(result2, sort_keys=True)
def write_tree_file(tree, hostname, buf):
''' write something into treedir/hostname '''
# TODO: might be nice to append playbook runs per host in a similar way
# in which case, we'd want append mode.
path = os.path.join(tree, hostname)
fd = open(path, "w+")
fd.write(buf)
fd.close()
def is_failed(result):
''' is a given JSON result a failed result? '''
return ((result.get('rc', 0) != 0) or (result.get('failed', False) in [ True, 'True', 'true']))
def is_changed(result):
''' is a given JSON result a changed result? '''
return (result.get('changed', False) in [ True, 'True', 'true'])
def check_conditional(conditional, basedir, inject, fail_on_undefined=False):
if conditional is None or conditional == '':
return True
if isinstance(conditional, list):
for x in conditional:
if not check_conditional(x, basedir, inject, fail_on_undefined=fail_on_undefined):
return False
return True
if not isinstance(conditional, basestring):
return conditional
conditional = conditional.replace("jinja2_compare ","")
# allow variable names
if conditional in inject and '-' not in str(inject[conditional]):
conditional = inject[conditional]
conditional = template.template(basedir, conditional, inject, fail_on_undefined=fail_on_undefined)
original = str(conditional).replace("jinja2_compare ","")
# a Jinja2 evaluation that results in something Python can eval!
presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional
conditional = template.template(basedir, presented, inject)
val = conditional.strip()
if val == presented:
# the templating failed, meaning most likely a
# variable was undefined. If we happened to be
# looking for an undefined variable, return True,
# otherwise fail
if "is undefined" in conditional:
return True
elif "is defined" in conditional:
return False
else:
raise errors.AnsibleError("error while evaluating conditional: %s" % original)
elif val == "True":
return True
elif val == "False":
return False
else:
raise errors.AnsibleError("unable to evaluate conditional: %s" % original)
def is_executable(path):
'''is the given path executable?'''
return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE]
or stat.S_IXGRP & os.stat(path)[stat.ST_MODE]
or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
def unfrackpath(path):
'''
returns a path that is free of symlinks, environment
variables, relative path traversals and symbols (~)
example:
'$HOME/../../var/mail' becomes '/var/spool/mail'
'''
return os.path.normpath(os.path.realpath(os.path.expandvars(os.path.expanduser(path))))
def prepare_writeable_dir(tree,mode=0777):
''' make sure a directory exists and is writeable '''
# modify the mode to ensure the owner at least
# has read/write access to this directory
mode |= 0700
# make sure the tree path is always expanded
# and normalized and free of symlinks
tree = unfrackpath(tree)
if not os.path.exists(tree):
try:
os.makedirs(tree, mode)
except (IOError, OSError), e:
raise errors.AnsibleError("Could not make dir %s: %s" % (tree, e))
if not os.access(tree, os.W_OK):
raise errors.AnsibleError("Cannot write to path %s" % tree)
return tree
def path_dwim(basedir, given):
'''
make relative paths work like folks expect.
'''
if given.startswith("/"):
return os.path.abspath(given)
elif given.startswith("~"):
return os.path.abspath(os.path.expanduser(given))
else:
if basedir is None:
basedir = "."
return os.path.abspath(os.path.join(basedir, given))
def path_dwim_relative(original, dirname, source, playbook_base, check=True):
''' find one file in a directory one level up in a dir named dirname relative to current '''
# (used by roles code)
basedir = os.path.dirname(original)
if os.path.islink(basedir):
basedir = unfrackpath(basedir)
template2 = os.path.join(basedir, dirname, source)
else:
template2 = os.path.join(basedir, '..', dirname, source)
source2 = path_dwim(basedir, template2)
if os.path.exists(source2):
return source2
obvious_local_path = path_dwim(playbook_base, source)
if os.path.exists(obvious_local_path):
return obvious_local_path
if check:
raise errors.AnsibleError("input file not found at %s or %s" % (source2, obvious_local_path))
return source2 # which does not exist
def json_loads(data):
''' parse a JSON string and return a data structure '''
return json.loads(data)
def parse_json(raw_data):
''' this version for module return data only '''
orig_data = raw_data
# ignore stuff like tcgetattr spewage or other warnings
data = filter_leading_non_json_lines(raw_data)
try:
return json.loads(data)
except:
# not JSON, but try "Baby JSON" which allows many of our modules to not
# require JSON and makes writing modules in bash much simpler
results = {}
try:
tokens = shlex.split(data)
except:
print "failed to parse json: "+ data
raise
for t in tokens:
if "=" not in t:
raise errors.AnsibleError("failed to parse: %s" % orig_data)
(key,value) = t.split("=", 1)
if key == 'changed' or 'failed':
if value.lower() in [ 'true', '1' ]:
value = True
elif value.lower() in [ 'false', '0' ]:
value = False
if key == 'rc':
value = int(value)
results[key] = value
if len(results.keys()) == 0:
return { "failed" : True, "parsed" : False, "msg" : orig_data }
return results
def smush_braces(data):
''' smush Jinaj2 braces so unresolved templates like {{ foo }} don't get parsed weird by key=value code '''
while '{{ ' in data:
data = data.replace('{{ ', '{{')
while ' }}' in data:
data = data.replace(' }}', '}}')
return data
def smush_ds(data):
# things like key={{ foo }} are not handled by shlex.split well, so preprocess any YAML we load
# so we do not have to call smush elsewhere
if type(data) == list:
return [ smush_ds(x) for x in data ]
elif type(data) == dict:
for (k,v) in data.items():
data[k] = smush_ds(v)
return data
elif isinstance(data, basestring):
return smush_braces(data)
else:
return data
def parse_yaml(data, path_hint=None):
''' convert a yaml string to a data structure. Also supports JSON, ssssssh!!!'''
stripped_data = data.lstrip()
loaded = None
if stripped_data.startswith("{") or stripped_data.startswith("["):
# since the line starts with { or [ we can infer this is a JSON document.
try:
loaded = json.loads(data)
except ValueError, ve:
if path_hint:
raise errors.AnsibleError(path_hint + ": " + str(ve))
else:
raise errors.AnsibleError(str(ve))
else:
# else this is pretty sure to be a YAML document
loaded = yaml.safe_load(data)
return smush_ds(loaded)
def process_common_errors(msg, probline, column):
replaced = probline.replace(" ","")
if ":{{" in replaced and "}}" in replaced:
msg = msg + """
This one looks easy to fix. YAML thought it was looking for the start of a
hash/dictionary and was confused to see a second "{". Most likely this was
meant to be an ansible template evaluation instead, so we have to give the
parser a small hint that we wanted a string instead. The solution here is to
just quote the entire value.
For instance, if the original line was:
app_path: {{ base_path }}/foo
It should be written as:
app_path: "{{ base_path }}/foo"
"""
return msg
elif len(probline) and len(probline) > 1 and len(probline) > column and probline[column] == ":" and probline.count(':') > 1:
msg = msg + """
This one looks easy to fix. There seems to be an extra unquoted colon in the line
and this is confusing the parser. It was only expecting to find one free
colon. The solution is just add some quotes around the colon, or quote the
entire line after the first colon.
For instance, if the original line was:
copy: src=file.txt dest=/path/filename:with_colon.txt
It can be written as:
copy: src=file.txt dest='/path/filename:with_colon.txt'
Or:
copy: 'src=file.txt dest=/path/filename:with_colon.txt'
"""
return msg
else:
parts = probline.split(":")
if len(parts) > 1:
middle = parts[1].strip()
match = False
unbalanced = False
if middle.startswith("'") and not middle.endswith("'"):
match = True
elif middle.startswith('"') and not middle.endswith('"'):
match = True
if len(middle) > 0 and middle[0] in [ '"', "'" ] and middle[-1] in [ '"', "'" ] and probline.count("'") > 2 or probline.count('"') > 2:
unbalanced = True
if match:
msg = msg + """
This one looks easy to fix. It seems that there is a value started
with a quote, and the YAML parser is expecting to see the line ended
with the same kind of quote. For instance:
when: "ok" in result.stdout
Could be written as:
when: '"ok" in result.stdout'
or equivalently:
when: "'ok' in result.stdout"
"""
return msg
if unbalanced:
msg = msg + """
We could be wrong, but this one looks like it might be an issue with
unbalanced quotes. If starting a value with a quote, make sure the
line ends with the same set of quotes. For instance this arbitrary
example:
foo: "bad" "wolf"
Could be written as:
foo: '"bad" "wolf"'
"""
return msg
return msg
def process_yaml_error(exc, data, path=None, show_content=True):
if hasattr(exc, 'problem_mark'):
mark = exc.problem_mark
if show_content:
if mark.line -1 >= 0:
before_probline = data.split("\n")[mark.line-1]
else:
before_probline = ''
probline = data.split("\n")[mark.line]
arrow = " " * mark.column + "^"
msg = """Syntax Error while loading YAML script, %s
Note: The error may actually appear before this position: line %s, column %s
%s
%s
%s""" % (path, mark.line + 1, mark.column + 1, before_probline, probline, arrow)
unquoted_var = None
if '{{' in probline and '}}' in probline:
if '"{{' not in probline or "'{{" not in probline:
unquoted_var = True
if not unquoted_var:
msg = process_common_errors(msg, probline, mark.column)
else:
msg = msg + """
We could be wrong, but this one looks like it might be an issue with
missing quotes. Always quote template expression brackets when they
start a value. For instance:
with_items:
- {{ foo }}
Should be written as:
with_items:
- "{{ foo }}"
"""
else:
# most likely displaying a file with sensitive content,
# so don't show any of the actual lines of yaml just the
# line number itself
msg = """Syntax error while loading YAML script, %s
The error appears to have been on line %s, column %s, but may actually
be before there depending on the exact syntax problem.
""" % (path, mark.line + 1, mark.column + 1)
else:
# No problem markers means we have to throw a generic
# "stuff messed up" type message. Sry bud.
if path:
msg = "Could not parse YAML. Check over %s again." % path
else:
msg = "Could not parse YAML."
raise errors.AnsibleYAMLValidationFailed(msg)
def parse_yaml_from_file(path, vault_password=None):
''' convert a yaml file to a data structure '''
data = None
show_content = True
try:
data = open(path).read()
except IOError:
raise errors.AnsibleError("file could not read: %s" % path)
vault = VaultLib(password=vault_password)
if vault.is_encrypted(data):
data = vault.decrypt(data)
show_content = False
try:
return parse_yaml(data, path_hint=path)
except yaml.YAMLError, exc:
process_yaml_error(exc, data, path, show_content)
def parse_kv(args):
''' convert a string of key/value items to a dict '''
options = {}
if args is not None:
# attempting to split a unicode here does bad things
args = args.encode('utf-8')
try:
vargs = shlex.split(args, posix=True)
except ValueError, ve:
if 'no closing quotation' in str(ve).lower():
raise errors.AnsibleError("error parsing argument string, try quoting the entire line.")
else:
raise
vargs = [x.decode('utf-8') for x in vargs]
for x in vargs:
if "=" in x:
k, v = x.split("=",1)
options[k]=v
return options
def merge_hash(a, b):
''' recursively merges hash b into a
keys from b take precedence over keys from a '''
result = copy.deepcopy(a)
# next, iterate over b keys and values
for k, v in b.iteritems():
# if there's already such key in a
# and that key contains dict
if k in result and isinstance(result[k], dict):
# merge those dicts recursively
result[k] = merge_hash(a[k], v)
else:
# otherwise, just copy a value from b to a
result[k] = v
return result
def md5s(data):
''' Return MD5 hex digest of data. '''
digest = _md5()
try:
digest.update(data)
except UnicodeEncodeError:
digest.update(data.encode('utf-8'))
return digest.hexdigest()
def md5(filename):
''' Return MD5 hex digest of local file, None if file is not present or a directory. '''
if not os.path.exists(filename) or os.path.isdir(filename):
return None
digest = _md5()
blocksize = 64 * 1024
try:
infile = open(filename, 'rb')
block = infile.read(blocksize)
while block:
digest.update(block)
block = infile.read(blocksize)
infile.close()
except IOError, e:
raise errors.AnsibleError("error while accessing the file %s, error was: %s" % (filename, e))
return digest.hexdigest()
def default(value, function):
''' syntactic sugar around lazy evaluation of defaults '''
if value is None:
return function()
return value
def _gitinfo():
''' returns a string containing git branch, commit id and commit date '''
result = None
repo_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', '.git')
if os.path.exists(repo_path):
# Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
if os.path.isfile(repo_path):
try:
gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
# There is a posibility the .git file to have an absolute path.
if os.path.isabs(gitdir):
repo_path = gitdir
else:
repo_path = os.path.join(repo_path.split('.git')[0], gitdir)
except (IOError, AttributeError):
return ''
f = open(os.path.join(repo_path, "HEAD"))
branch = f.readline().split('/')[-1].rstrip("\n")
f.close()
branch_path = os.path.join(repo_path, "refs", "heads", branch)
if os.path.exists(branch_path):
f = open(branch_path)
commit = f.readline()[:10]
f.close()
date = time.localtime(os.stat(branch_path).st_mtime)
if time.daylight == 0:
offset = time.timezone
else:
offset = time.altzone
result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit,
time.strftime("%Y/%m/%d %H:%M:%S", date), offset / -36)
else:
result = ''
return result
def version(prog):
result = "{0} {1}".format(prog, __version__)
gitinfo = _gitinfo()
if gitinfo:
result = result + " {0}".format(gitinfo)
return result
def getch():
''' read in a single character '''
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
def sanitize_output(str):
''' strips private info out of a string '''
private_keys = ['password', 'login_password']
filter_re = [
# filter out things like user:pass@foo/whatever
# and http://username:pass@wherever/foo
re.compile('^(?P<before>.*:)(?P<password>.*)(?P<after>\@.*)$'),
]
parts = str.split()
output = ''
for part in parts:
try:
(k,v) = part.split('=', 1)
if k in private_keys:
output += " %s=VALUE_HIDDEN" % k
else:
found = False
for filter in filter_re:
m = filter.match(v)
if m:
d = m.groupdict()
output += " %s=%s" % (k, d['before'] + "********" + d['after'])
found = True
break
if not found:
output += " %s" % part
except:
output += " %s" % part
return output.strip()
####################################################################
# option handling code for /usr/bin/ansible and ansible-playbook
# below this line
class SortedOptParser(optparse.OptionParser):
'''Optparser which sorts the options by opt before outputting --help'''
def format_help(self, formatter=None):
self.option_list.sort(key=operator.methodcaller('get_opt_string'))
return optparse.OptionParser.format_help(self, formatter=None)
def increment_debug(option, opt, value, parser):
global VERBOSITY
VERBOSITY += 1
def base_parser(constants=C, usage="", output_opts=False, runas_opts=False,
async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, diff_opts=False):
''' create an options parser for any ansible script '''
parser = SortedOptParser(usage, version=version("%prog"))
parser.add_option('-v','--verbose', default=False, action="callback",
callback=increment_debug, help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")
parser.add_option('-f','--forks', dest='forks', default=constants.DEFAULT_FORKS, type='int',
help="specify number of parallel processes to use (default=%s)" % constants.DEFAULT_FORKS)
parser.add_option('-i', '--inventory-file', dest='inventory',
help="specify inventory host file (default=%s)" % constants.DEFAULT_HOST_LIST,
default=constants.DEFAULT_HOST_LIST)
parser.add_option('-k', '--ask-pass', default=False, dest='ask_pass', action='store_true',
help='ask for SSH password')
parser.add_option('--private-key', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
help='use this file to authenticate the connection')
parser.add_option('-K', '--ask-sudo-pass', default=False, dest='ask_sudo_pass', action='store_true',
help='ask for sudo password')
parser.add_option('--ask-su-pass', default=False, dest='ask_su_pass', action='store_true',
help='ask for su password')
parser.add_option('--ask-vault-pass', default=False, dest='ask_vault_pass', action='store_true',
help='ask for vault password')
parser.add_option('--vault-password-file', default=None, dest='vault_password_file',
help="vault password file")
parser.add_option('--list-hosts', dest='listhosts', action='store_true',
help='outputs a list of matching hosts; does not execute anything else')
parser.add_option('-M', '--module-path', dest='module_path',
help="specify path(s) to module library (default=%s)" % constants.DEFAULT_MODULE_PATH,
default=None)
if subset_opts:
parser.add_option('-l', '--limit', default=constants.DEFAULT_SUBSET, dest='subset',
help='further limit selected hosts to an additional pattern')
parser.add_option('-T', '--timeout', default=constants.DEFAULT_TIMEOUT, type='int',
dest='timeout',
help="override the SSH timeout in seconds (default=%s)" % constants.DEFAULT_TIMEOUT)
if output_opts:
parser.add_option('-o', '--one-line', dest='one_line', action='store_true',
help='condense output')
parser.add_option('-t', '--tree', dest='tree', default=None,
help='log output to this directory')
if runas_opts:
parser.add_option("-s", "--sudo", default=constants.DEFAULT_SUDO, action="store_true",
dest='sudo', help="run operations with sudo (nopasswd)")
parser.add_option('-U', '--sudo-user', dest='sudo_user', default=None,
help='desired sudo user (default=root)') # Can't default to root because we need to detect when this option was given
parser.add_option('-u', '--user', default=constants.DEFAULT_REMOTE_USER,
dest='remote_user', help='connect as this user (default=%s)' % constants.DEFAULT_REMOTE_USER)
parser.add_option('-S', '--su', default=constants.DEFAULT_SU,
action='store_true', help='run operations with su')
parser.add_option('-R', '--su-user', help='run operations with su as this '
'user (default=%s)' % constants.DEFAULT_SU_USER)
if connect_opts:
parser.add_option('-c', '--connection', dest='connection',
default=C.DEFAULT_TRANSPORT,
help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT)
if async_opts:
parser.add_option('-P', '--poll', default=constants.DEFAULT_POLL_INTERVAL, type='int',
dest='poll_interval',
help="set the poll interval if using -B (default=%s)" % constants.DEFAULT_POLL_INTERVAL)
parser.add_option('-B', '--background', dest='seconds', type='int', default=0,
help='run asynchronously, failing after X seconds (default=N/A)')
if check_opts:
parser.add_option("-C", "--check", default=False, dest='check', action='store_true',
help="don't make any changes; instead, try to predict some of the changes that may occur"
)
if diff_opts:
parser.add_option("-D", "--diff", default=False, dest='diff', action='store_true',
help="when changing (small) files and templates, show the differences in those files; works great with --check"
)
return parser
def ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=False, confirm_vault=False, confirm_new=False):
vault_pass = None
new_vault_pass = None
if ask_vault_pass:
vault_pass = getpass.getpass(prompt="Vault password: ")
if ask_vault_pass and confirm_vault:
vault_pass2 = getpass.getpass(prompt="Confirm Vault password: ")
if vault_pass != vault_pass2:
raise errors.AnsibleError("Passwords do not match")
if ask_new_vault_pass:
new_vault_pass = getpass.getpass(prompt="New Vault password: ")
if ask_new_vault_pass and confirm_new:
new_vault_pass2 = getpass.getpass(prompt="Confirm New Vault password: ")
if new_vault_pass != new_vault_pass2:
raise errors.AnsibleError("Passwords do not match")
# enforce no newline chars at the end of passwords
if vault_pass:
vault_pass = vault_pass.strip()
if new_vault_pass:
new_vault_pass = new_vault_pass.strip()
return vault_pass, new_vault_pass
def ask_passwords(ask_pass=False, ask_sudo_pass=False, ask_su_pass=False, ask_vault_pass=False):
sshpass = None
sudopass = None
su_pass = None
vault_pass = None
sudo_prompt = "sudo password: "
su_prompt = "su password: "
if ask_pass:
sshpass = getpass.getpass(prompt="SSH password: ")
sudo_prompt = "sudo password [defaults to SSH password]: "
if ask_sudo_pass:
sudopass = getpass.getpass(prompt=sudo_prompt)
if ask_pass and sudopass == '':
sudopass = sshpass
if ask_su_pass:
su_pass = getpass.getpass(prompt=su_prompt)
if ask_vault_pass:
vault_pass = getpass.getpass(prompt="Vault password: ")
return (sshpass, sudopass, su_pass, vault_pass)
def do_encrypt(result, encrypt, salt_size=None, salt=None):
if PASSLIB_AVAILABLE:
try:
crypt = getattr(passlib.hash, encrypt)
except:
raise errors.AnsibleError("passlib does not support '%s' algorithm" % encrypt)
if salt_size:
result = crypt.encrypt(result, salt_size=salt_size)
elif salt:
result = crypt.encrypt(result, salt=salt)
else:
result = crypt.encrypt(result)
else:
raise errors.AnsibleError("passlib must be installed to encrypt vars_prompt values")
return result
def last_non_blank_line(buf):
all_lines = buf.splitlines()
all_lines.reverse()
for line in all_lines:
if (len(line) > 0):
return line
# shouldn't occur unless there's no output
return ""
def filter_leading_non_json_lines(buf):
'''
used to avoid random output from SSH at the top of JSON output, like messages from
tcagetattr, or where dropbear spews MOTD on every single command (which is nuts).
need to filter anything which starts not with '{', '[', ', '=' or is an empty line.
filter only leading lines since multiline JSON is valid.
'''
kv_regex = re.compile(r'.*\w+=\w+.*')
filtered_lines = StringIO.StringIO()
stop_filtering = False
for line in buf.splitlines():
if stop_filtering or kv_regex.match(line) or line.startswith('{') or line.startswith('['):
stop_filtering = True
filtered_lines.write(line + '\n')
return filtered_lines.getvalue()
def boolean(value):
val = str(value)
if val.lower() in [ "true", "t", "y", "1", "yes" ]:
return True
else:
return False
def make_sudo_cmd(sudo_user, executable, cmd):
"""
helper function for connection plugins to create sudo commands
"""
# Rather than detect if sudo wants a password this time, -k makes
# sudo always ask for a password if one is required.
# Passing a quoted compound command to sudo (or sudo -s)
# directly doesn't work, so we shellquote it with pipes.quote()
# and pass the quoted string to the user's shell. We loop reading
# output until we see the randomly-generated sudo prompt set with
# the -p option.
randbits = ''.join(chr(random.randint(ord('a'), ord('z'))) for x in xrange(32))
prompt = '[sudo via ansible, key=%s] password: ' % randbits
success_key = 'SUDO-SUCCESS-%s' % randbits
sudocmd = '%s -k && %s %s -S -p "%s" -u %s %s -c %s' % (
C.DEFAULT_SUDO_EXE, C.DEFAULT_SUDO_EXE, C.DEFAULT_SUDO_FLAGS,
prompt, sudo_user, executable or '$SHELL', pipes.quote('echo %s; %s' % (success_key, cmd)))
return ('/bin/sh -c ' + pipes.quote(sudocmd), prompt, success_key)
def make_su_cmd(su_user, executable, cmd):
"""
Helper function for connection plugins to create direct su commands
"""
# TODO: work on this function
randbits = ''.join(chr(random.randint(ord('a'), ord('z'))) for x in xrange(32))
prompt = '[Pp]assword: ?$'
success_key = 'SUDO-SUCCESS-%s' % randbits
sudocmd = '%s %s %s -c "%s -c %s"' % (
C.DEFAULT_SU_EXE, C.DEFAULT_SU_FLAGS, su_user, executable or '$SHELL',
pipes.quote('echo %s; %s' % (success_key, cmd))
)
return ('/bin/sh -c ' + pipes.quote(sudocmd), prompt, success_key)
_TO_UNICODE_TYPES = (unicode, type(None))
def to_unicode(value):
if isinstance(value, _TO_UNICODE_TYPES):
return value
return value.decode("utf-8")
def get_diff(diff):
# called by --diff usage in playbook and runner via callbacks
# include names in diffs 'before' and 'after' and do diff -U 10
try:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
ret = []
if 'dst_binary' in diff:
ret.append("diff skipped: destination file appears to be binary\n")
if 'src_binary' in diff:
ret.append("diff skipped: source file appears to be binary\n")
if 'dst_larger' in diff:
ret.append("diff skipped: destination file size is greater than %d\n" % diff['dst_larger'])
if 'src_larger' in diff:
ret.append("diff skipped: source file size is greater than %d\n" % diff['src_larger'])
if 'before' in diff and 'after' in diff:
if 'before_header' in diff:
before_header = "before: %s" % diff['before_header']
else:
before_header = 'before'
if 'after_header' in diff:
after_header = "after: %s" % diff['after_header']
else:
after_header = 'after'
differ = difflib.unified_diff(to_unicode(diff['before']).splitlines(True), to_unicode(diff['after']).splitlines(True), before_header, after_header, '', '', 10)
for line in list(differ):
ret.append(line)
return u"".join(ret)
except UnicodeDecodeError:
return ">> the files are different, but the diff library cannot compare unicode strings"
def is_list_of_strings(items):
for x in items:
if not isinstance(x, basestring):
return False
return True
def list_union(a, b):
result = []
for x in a:
if x not in result:
result.append(x)
for x in b:
if x not in result:
result.append(x)
return result
def list_intersection(a, b):
result = []
for x in a:
if x in b and x not in result:
result.append(x)
return result
def safe_eval(expr, locals={}, include_exceptions=False):
'''
this is intended for allowing things like:
with_items: a_list_variable
where Jinja2 would return a string
but we do not want to allow it to call functions (outside of Jinja2, where
the env is constrained)
Based on:
http://stackoverflow.com/questions/12523516/using-ast-and-whitelists-to-make-pythons-eval-safe
'''
# this is the whitelist of AST nodes we are going to
# allow in the evaluation. Any node type other than
# those listed here will raise an exception in our custom
# visitor class defined below.
SAFE_NODES = set(
(
ast.Add,
ast.BinOp,
ast.Call,
ast.Compare,
ast.Dict,
ast.Div,
ast.Expression,
ast.List,
ast.Load,
ast.Mult,
ast.Num,
ast.Name,
ast.Str,
ast.Sub,
ast.Tuple,
ast.UnaryOp,
)
)
# AST node types were expanded after 2.6
if not sys.version.startswith('2.6'):
SAFE_NODES.union(
set(
(ast.Set,)
)
)
filter_list = []
for filter in filter_loader.all():
filter_list.extend(filter.filters().keys())
CALL_WHITELIST = C.DEFAULT_CALLABLE_WHITELIST + filter_list
class CleansingNodeVisitor(ast.NodeVisitor):
def generic_visit(self, node, inside_call=False):
if type(node) not in SAFE_NODES:
raise Exception("invalid expression (%s)" % expr)
elif isinstance(node, ast.Call):
inside_call = True
elif isinstance(node, ast.Name) and inside_call:
if hasattr(builtin, node.id) and node.id not in CALL_WHITELIST:
raise Exception("invalid function: %s" % node.id)
# iterate over all child nodes
for child_node in ast.iter_child_nodes(node):
self.generic_visit(child_node, inside_call)
if not isinstance(expr, basestring):
# already templated to a datastructure, perhaps?
if include_exceptions:
return (expr, None)
return expr
cnv = CleansingNodeVisitor()
try:
parsed_tree = ast.parse(expr, mode='eval')
cnv.visit(parsed_tree)
compiled = compile(parsed_tree, expr, 'eval')
result = eval(compiled, {}, locals)
if include_exceptions:
return (result, None)
else:
return result
except SyntaxError, e:
# special handling for syntax errors, we just return
# the expression string back as-is
if include_exceptions:
return (expr, None)
return expr
except Exception, e:
if include_exceptions:
return (expr, e)
return expr
def listify_lookup_plugin_terms(terms, basedir, inject):
if isinstance(terms, basestring):
# someone did:
# with_items: alist
# OR
# with_items: {{ alist }}
stripped = terms.strip()
if not (stripped.startswith('{') or stripped.startswith('[')) and not stripped.startswith("/") and not stripped.startswith('set(['):
# if not already a list, get ready to evaluate with Jinja2
# not sure why the "/" is in above code :)
try:
new_terms = template.template(basedir, "{{ %s }}" % terms, inject)
if isinstance(new_terms, basestring) and "{{" in new_terms:
pass
else:
terms = new_terms
except:
pass
if '{' in terms or '[' in terms:
# Jinja2 already evaluated a variable to a list.
# Jinja2-ified list needs to be converted back to a real type
# TODO: something a bit less heavy than eval
return safe_eval(terms)
if isinstance(terms, basestring):
terms = [ terms ]
return terms
def combine_vars(a, b):
if C.DEFAULT_HASH_BEHAVIOUR == "merge":
return merge_hash(a, b)
else:
return dict(a.items() + b.items())
def random_password(length=20, chars=C.DEFAULT_PASSWORD_CHARS):
'''Return a random password string of length containing only chars.'''
password = []
while len(password) < length:
new_char = os.urandom(1)
if new_char in chars:
password.append(new_char)
return ''.join(password)
def before_comment(msg):
''' what's the part of a string before a comment? '''
msg = msg.replace("\#","**NOT_A_COMMENT**")
msg = msg.split("#")[0]
msg = msg.replace("**NOT_A_COMMENT**","#")
return msg
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_2141_3 |
crossvul-python_data_good_3499_0 | import getopt
from subprocess import Popen, PIPE
import pipes
import Bcfg2.Server.Admin
class Viz(Bcfg2.Server.Admin.MetadataCore):
__shorthelp__ = "Produce graphviz diagrams of metadata structures"
__longhelp__ = (__shorthelp__ + "\n\nbcfg2-admin viz [--includehosts] "
"[--includebundles] [--includekey] "
"[-o output.png] [--raw]")
__usage__ = ("bcfg2-admin viz [options]\n\n"
" %-25s%s\n"
" %-25s%s\n"
" %-25s%s\n"
" %-25s%s\n" %
("-H, --includehosts",
"include hosts in the viz output",
"-b, --includebundles",
"include bundles in the viz output",
"-k, --includekey",
"show a key for different digraph shapes",
"-o, --outfile <file>",
"write viz output to an output file"))
colors = ['steelblue1', 'chartreuse', 'gold', 'magenta',
'indianred1', 'limegreen', 'orange1', 'lightblue2',
'green1', 'blue1', 'yellow1', 'darkturquoise', 'gray66']
plugin_blacklist = ['DBStats', 'Snapshots', 'Cfg', 'Pkgmgr', 'Packages',
'Rules', 'Account', 'Decisions', 'Deps', 'Git', 'Svn',
'Fossil', 'Bzr', 'Bundler', 'TGenshi', 'SGenshi', 'Base']
def __init__(self, cfile):
Bcfg2.Server.Admin.MetadataCore.__init__(self, cfile,
self.__usage__,
pblacklist=self.plugin_blacklist)
def __call__(self, args):
Bcfg2.Server.Admin.MetadataCore.__call__(self, args)
# First get options to the 'viz' subcommand
try:
opts, args = getopt.getopt(args, 'Hbko:',
['includehosts', 'includebundles',
'includekey', 'outfile='])
except getopt.GetoptError, msg:
print msg
#FIXME: is this for --raw?
#rset = False
hset = False
bset = False
kset = False
outputfile = False
for opt, arg in opts:
if opt in ("-H", "--includehosts"):
hset = True
elif opt in ("-b", "--includebundles"):
bset = True
elif opt in ("-k", "--includekey"):
kset = True
elif opt in ("-o", "--outfile"):
outputfile = arg
data = self.Visualize(self.get_repo_path(), hset, bset,
kset, outputfile)
if data:
print(data)
raise SystemExit, 0
def Visualize(self, repopath, hosts=False,
bundles=False, key=False, output=False):
"""Build visualization of groups file."""
if output:
format = output.split('.')[-1]
else:
format = 'png'
cmd = ["dot", "-T", format]
if output:
cmd.extend(["-o", output])
try:
dotpipe = Popen(cmd, stdin=PIPE, stdout=PIPE, close_fds=True)
except OSError:
# on some systems (RHEL 6), you cannot run dot with
# shell=True. on others (Gentoo with Python 2.7), you
# must. In yet others (RHEL 5), either way works. I have
# no idea what the difference is, but it's kind of a PITA.
cmd = ["dot", "-T", pipes.quote(format)]
if output:
cmd.extend(["-o", pipes.quote(output)])
dotpipe = Popen(cmd, shell=True,
stdin=PIPE, stdout=PIPE, close_fds=True)
try:
dotpipe.stdin.write("digraph groups {\n")
except:
print "write to dot process failed. Is graphviz installed?"
raise SystemExit(1)
dotpipe.stdin.write('\trankdir="LR";\n')
dotpipe.stdin.write(self.metadata.viz(hosts, bundles,
key, self.colors))
if key:
dotpipe.stdin.write("\tsubgraph cluster_key {\n")
dotpipe.stdin.write('''\tstyle="filled";\n''')
dotpipe.stdin.write('''\tcolor="lightblue";\n''')
dotpipe.stdin.write('''\tBundle [ shape="septagon" ];\n''')
dotpipe.stdin.write('''\tGroup [shape="ellipse"];\n''')
dotpipe.stdin.write('''\tProfile [style="bold", shape="ellipse"];\n''')
dotpipe.stdin.write('''\tHblock [label="Host1|Host2|Host3", shape="record"];\n''')
dotpipe.stdin.write('''\tlabel="Key";\n''')
dotpipe.stdin.write("\t}\n")
dotpipe.stdin.write("}\n")
dotpipe.stdin.close()
return dotpipe.stdout.read()
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3499_0 |
crossvul-python_data_bad_3766_3 | from __future__ import absolute_import
import datetime
import os
import re
import sys
import time
import warnings
from pprint import pformat
from urllib import urlencode, quote
from urlparse import urljoin, urlparse
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
# The mod_python version is more efficient, so try importing it first.
from mod_python.util import parse_qsl
except ImportError:
try:
# Python 2.6 and greater
from urlparse import parse_qsl
except ImportError:
# Python 2.5. Works on Python 2.6 but raises PendingDeprecationWarning
from cgi import parse_qsl
import Cookie
# httponly support exists in Python 2.6's Cookie library,
# but not in Python 2.5.
_morsel_supports_httponly = 'httponly' in Cookie.Morsel._reserved
# Some versions of Python 2.7 and later won't need this encoding bug fix:
_cookie_encodes_correctly = Cookie.SimpleCookie().value_encode(';') == (';', '"\\073"')
# See ticket #13007, http://bugs.python.org/issue2193 and http://trac.edgewall.org/ticket/2256
_tc = Cookie.SimpleCookie()
try:
_tc.load('foo:bar=1')
_cookie_allows_colon_in_names = True
except Cookie.CookieError:
_cookie_allows_colon_in_names = False
if _morsel_supports_httponly and _cookie_encodes_correctly and _cookie_allows_colon_in_names:
SimpleCookie = Cookie.SimpleCookie
else:
if not _morsel_supports_httponly:
class Morsel(Cookie.Morsel):
def __setitem__(self, K, V):
K = K.lower()
if K == "httponly":
if V:
# The superclass rejects httponly as a key,
# so we jump to the grandparent.
super(Cookie.Morsel, self).__setitem__(K, V)
else:
super(Morsel, self).__setitem__(K, V)
def OutputString(self, attrs=None):
output = super(Morsel, self).OutputString(attrs)
if "httponly" in self:
output += "; httponly"
return output
else:
Morsel = Cookie.Morsel
class SimpleCookie(Cookie.SimpleCookie):
if not _cookie_encodes_correctly:
def value_encode(self, val):
# Some browsers do not support quoted-string from RFC 2109,
# including some versions of Safari and Internet Explorer.
# These browsers split on ';', and some versions of Safari
# are known to split on ', '. Therefore, we encode ';' and ','
# SimpleCookie already does the hard work of encoding and decoding.
# It uses octal sequences like '\\012' for newline etc.
# and non-ASCII chars. We just make use of this mechanism, to
# avoid introducing two encoding schemes which would be confusing
# and especially awkward for javascript.
# NB, contrary to Python docs, value_encode returns a tuple containing
# (real val, encoded_val)
val, encoded = super(SimpleCookie, self).value_encode(val)
encoded = encoded.replace(";", "\\073").replace(",","\\054")
# If encoded now contains any quoted chars, we need double quotes
# around the whole string.
if "\\" in encoded and not encoded.startswith('"'):
encoded = '"' + encoded + '"'
return val, encoded
if not _cookie_allows_colon_in_names or not _morsel_supports_httponly:
def load(self, rawdata):
self.bad_cookies = set()
super(SimpleCookie, self).load(rawdata)
for key in self.bad_cookies:
del self[key]
# override private __set() method:
# (needed for using our Morsel, and for laxness with CookieError
def _BaseCookie__set(self, key, real_value, coded_value):
try:
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
except Cookie.CookieError:
self.bad_cookies.add(key)
dict.__setitem__(self, key, Cookie.Morsel())
class CompatCookie(SimpleCookie):
def __init__(self, *args, **kwargs):
super(CompatCookie, self).__init__(*args, **kwargs)
warnings.warn("CompatCookie is deprecated. Use django.http.SimpleCookie instead.", DeprecationWarning)
from django.conf import settings
from django.core import signing
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.core.files import uploadhandler
from django.http.multipartparser import MultiPartParser
from django.http.utils import *
from django.utils.datastructures import MultiValueDict, ImmutableList
from django.utils.encoding import smart_str, iri_to_uri, force_unicode
from django.utils.http import cookie_date
from django.utils import timezone
RESERVED_CHARS="!*'();:@&=+$,/?%#[]"
absolute_http_url_re = re.compile(r"^https?://", re.I)
class Http404(Exception):
pass
RAISE_ERROR = object()
def build_request_repr(request, path_override=None, GET_override=None,
POST_override=None, COOKIES_override=None,
META_override=None):
"""
Builds and returns the request's representation string. The request's
attributes may be overridden by pre-processed values.
"""
# Since this is called as part of error handling, we need to be very
# robust against potentially malformed input.
try:
get = (pformat(GET_override)
if GET_override is not None
else pformat(request.GET))
except:
get = '<could not parse>'
if request._post_parse_error:
post = '<could not parse>'
else:
try:
post = (pformat(POST_override)
if POST_override is not None
else pformat(request.POST))
except:
post = '<could not parse>'
try:
cookies = (pformat(COOKIES_override)
if COOKIES_override is not None
else pformat(request.COOKIES))
except:
cookies = '<could not parse>'
try:
meta = (pformat(META_override)
if META_override is not None
else pformat(request.META))
except:
meta = '<could not parse>'
path = path_override if path_override is not None else request.path
return smart_str(u'<%s\npath:%s,\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' %
(request.__class__.__name__,
path,
unicode(get),
unicode(post),
unicode(cookies),
unicode(meta)))
class UnreadablePostError(IOError):
pass
class HttpRequest(object):
"""A basic HTTP request."""
# The encoding used in GET/POST dicts. None means use default setting.
_encoding = None
_upload_handlers = []
def __init__(self):
self.GET, self.POST, self.COOKIES, self.META, self.FILES = {}, {}, {}, {}, {}
self.path = ''
self.path_info = ''
self.method = None
self._post_parse_error = False
def __repr__(self):
return build_request_repr(self)
def get_host(self):
"""Returns the HTTP host using the environment or request headers."""
# We try three options, in order of decreasing preference.
if settings.USE_X_FORWARDED_HOST and (
'HTTP_X_FORWARDED_HOST' in self.META):
host = self.META['HTTP_X_FORWARDED_HOST']
elif 'HTTP_HOST' in self.META:
host = self.META['HTTP_HOST']
else:
# Reconstruct the host using the algorithm from PEP 333.
host = self.META['SERVER_NAME']
server_port = str(self.META['SERVER_PORT'])
if server_port != (self.is_secure() and '443' or '80'):
host = '%s:%s' % (host, server_port)
return host
def get_full_path(self):
# RFC 3986 requires query string arguments to be in the ASCII range.
# Rather than crash if this doesn't happen, we encode defensively.
return '%s%s' % (self.path, self.META.get('QUERY_STRING', '') and ('?' + iri_to_uri(self.META.get('QUERY_STRING', ''))) or '')
def get_signed_cookie(self, key, default=RAISE_ERROR, salt='', max_age=None):
"""
Attempts to return a signed cookie. If the signature fails or the
cookie has expired, raises an exception... unless you provide the
default argument in which case that value will be returned instead.
"""
try:
cookie_value = self.COOKIES[key].encode('utf-8')
except KeyError:
if default is not RAISE_ERROR:
return default
else:
raise
try:
value = signing.get_cookie_signer(salt=key + salt).unsign(
cookie_value, max_age=max_age)
except signing.BadSignature:
if default is not RAISE_ERROR:
return default
else:
raise
return value
def build_absolute_uri(self, location=None):
"""
Builds an absolute URI from the location and the variables available in
this request. If no location is specified, the absolute URI is built on
``request.get_full_path()``.
"""
if not location:
location = self.get_full_path()
if not absolute_http_url_re.match(location):
current_uri = '%s://%s%s' % (self.is_secure() and 'https' or 'http',
self.get_host(), self.path)
location = urljoin(current_uri, location)
return iri_to_uri(location)
def _is_secure(self):
return os.environ.get("HTTPS") == "on"
def is_secure(self):
# First, check the SECURE_PROXY_SSL_HEADER setting.
if settings.SECURE_PROXY_SSL_HEADER:
try:
header, value = settings.SECURE_PROXY_SSL_HEADER
except ValueError:
raise ImproperlyConfigured('The SECURE_PROXY_SSL_HEADER setting must be a tuple containing two values.')
if self.META.get(header, None) == value:
return True
# Failing that, fall back to _is_secure(), which is a hook for
# subclasses to implement.
return self._is_secure()
def is_ajax(self):
return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest'
def _set_encoding(self, val):
"""
Sets the encoding used for GET/POST accesses. If the GET or POST
dictionary has already been created, it is removed and recreated on the
next access (so that it is decoded correctly).
"""
self._encoding = val
if hasattr(self, '_get'):
del self._get
if hasattr(self, '_post'):
del self._post
def _get_encoding(self):
return self._encoding
encoding = property(_get_encoding, _set_encoding)
def _initialize_handlers(self):
self._upload_handlers = [uploadhandler.load_handler(handler, self)
for handler in settings.FILE_UPLOAD_HANDLERS]
def _set_upload_handlers(self, upload_handlers):
if hasattr(self, '_files'):
raise AttributeError("You cannot set the upload handlers after the upload has been processed.")
self._upload_handlers = upload_handlers
def _get_upload_handlers(self):
if not self._upload_handlers:
# If there are no upload handlers defined, initialize them from settings.
self._initialize_handlers()
return self._upload_handlers
upload_handlers = property(_get_upload_handlers, _set_upload_handlers)
def parse_file_upload(self, META, post_data):
"""Returns a tuple of (POST QueryDict, FILES MultiValueDict)."""
self.upload_handlers = ImmutableList(
self.upload_handlers,
warning = "You cannot alter upload handlers after the upload has been processed."
)
parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding)
return parser.parse()
@property
def body(self):
if not hasattr(self, '_body'):
if self._read_started:
raise Exception("You cannot access body after reading from request's data stream")
try:
self._body = self.read()
except IOError, e:
raise UnreadablePostError, e, sys.exc_traceback
self._stream = StringIO(self._body)
return self._body
@property
def raw_post_data(self):
warnings.warn('HttpRequest.raw_post_data has been deprecated. Use HttpRequest.body instead.', PendingDeprecationWarning)
return self.body
def _mark_post_parse_error(self):
self._post = QueryDict('')
self._files = MultiValueDict()
self._post_parse_error = True
def _load_post_and_files(self):
# Populates self._post and self._files
if self.method != 'POST':
self._post, self._files = QueryDict('', encoding=self._encoding), MultiValueDict()
return
if self._read_started and not hasattr(self, '_body'):
self._mark_post_parse_error()
return
if self.META.get('CONTENT_TYPE', '').startswith('multipart'):
if hasattr(self, '_body'):
# Use already read data
data = StringIO(self._body)
else:
data = self
try:
self._post, self._files = self.parse_file_upload(self.META, data)
except:
# An error occured while parsing POST data. Since when
# formatting the error the request handler might access
# self.POST, set self._post and self._file to prevent
# attempts to parse POST data again.
# Mark that an error occured. This allows self.__repr__ to
# be explicit about it instead of simply representing an
# empty POST
self._mark_post_parse_error()
raise
else:
self._post, self._files = QueryDict(self.body, encoding=self._encoding), MultiValueDict()
## File-like and iterator interface.
##
## Expects self._stream to be set to an appropriate source of bytes by
## a corresponding request subclass (WSGIRequest or ModPythonRequest).
## Also when request data has already been read by request.POST or
## request.body, self._stream points to a StringIO instance
## containing that data.
def read(self, *args, **kwargs):
self._read_started = True
return self._stream.read(*args, **kwargs)
def readline(self, *args, **kwargs):
self._read_started = True
return self._stream.readline(*args, **kwargs)
def xreadlines(self):
while True:
buf = self.readline()
if not buf:
break
yield buf
__iter__ = xreadlines
def readlines(self):
return list(iter(self))
class QueryDict(MultiValueDict):
"""
A specialized MultiValueDict that takes a query string when initialized.
This is immutable unless you create a copy of it.
Values retrieved from this class are converted from the given encoding
(DEFAULT_CHARSET by default) to unicode.
"""
# These are both reset in __init__, but is specified here at the class
# level so that unpickling will have valid values
_mutable = True
_encoding = None
def __init__(self, query_string, mutable=False, encoding=None):
MultiValueDict.__init__(self)
if not encoding:
encoding = settings.DEFAULT_CHARSET
self.encoding = encoding
for key, value in parse_qsl((query_string or ''), True): # keep_blank_values=True
self.appendlist(force_unicode(key, encoding, errors='replace'),
force_unicode(value, encoding, errors='replace'))
self._mutable = mutable
def _get_encoding(self):
if self._encoding is None:
self._encoding = settings.DEFAULT_CHARSET
return self._encoding
def _set_encoding(self, value):
self._encoding = value
encoding = property(_get_encoding, _set_encoding)
def _assert_mutable(self):
if not self._mutable:
raise AttributeError("This QueryDict instance is immutable")
def __setitem__(self, key, value):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
value = str_to_unicode(value, self.encoding)
MultiValueDict.__setitem__(self, key, value)
def __delitem__(self, key):
self._assert_mutable()
super(QueryDict, self).__delitem__(key)
def __copy__(self):
result = self.__class__('', mutable=True, encoding=self.encoding)
for key, value in dict.items(self):
dict.__setitem__(result, key, value)
return result
def __deepcopy__(self, memo):
import copy
result = self.__class__('', mutable=True, encoding=self.encoding)
memo[id(self)] = result
for key, value in dict.items(self):
dict.__setitem__(result, copy.deepcopy(key, memo), copy.deepcopy(value, memo))
return result
def setlist(self, key, list_):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
list_ = [str_to_unicode(elt, self.encoding) for elt in list_]
MultiValueDict.setlist(self, key, list_)
def setlistdefault(self, key, default_list=()):
self._assert_mutable()
if key not in self:
self.setlist(key, default_list)
return MultiValueDict.getlist(self, key)
def appendlist(self, key, value):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
value = str_to_unicode(value, self.encoding)
MultiValueDict.appendlist(self, key, value)
def update(self, other_dict):
self._assert_mutable()
f = lambda s: str_to_unicode(s, self.encoding)
if hasattr(other_dict, 'lists'):
for key, valuelist in other_dict.lists():
for value in valuelist:
MultiValueDict.update(self, {f(key): f(value)})
else:
d = dict([(f(k), f(v)) for k, v in other_dict.items()])
MultiValueDict.update(self, d)
def pop(self, key, *args):
self._assert_mutable()
return MultiValueDict.pop(self, key, *args)
def popitem(self):
self._assert_mutable()
return MultiValueDict.popitem(self)
def clear(self):
self._assert_mutable()
MultiValueDict.clear(self)
def setdefault(self, key, default=None):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
default = str_to_unicode(default, self.encoding)
return MultiValueDict.setdefault(self, key, default)
def copy(self):
"""Returns a mutable copy of this object."""
return self.__deepcopy__({})
def urlencode(self, safe=None):
"""
Returns an encoded string of all query string arguments.
:arg safe: Used to specify characters which do not require quoting, for
example::
>>> q = QueryDict('', mutable=True)
>>> q['next'] = '/a&b/'
>>> q.urlencode()
'next=%2Fa%26b%2F'
>>> q.urlencode(safe='/')
'next=/a%26b/'
"""
output = []
if safe:
encode = lambda k, v: '%s=%s' % ((quote(k, safe), quote(v, safe)))
else:
encode = lambda k, v: urlencode({k: v})
for k, list_ in self.lists():
k = smart_str(k, self.encoding)
output.extend([encode(k, smart_str(v, self.encoding))
for v in list_])
return '&'.join(output)
def parse_cookie(cookie):
if cookie == '':
return {}
if not isinstance(cookie, Cookie.BaseCookie):
try:
c = SimpleCookie()
c.load(cookie)
except Cookie.CookieError:
# Invalid cookie
return {}
else:
c = cookie
cookiedict = {}
for key in c.keys():
cookiedict[key] = c.get(key).value
return cookiedict
class BadHeaderError(ValueError):
pass
class HttpResponse(object):
"""A basic HTTP response, with content and dictionary-accessed headers."""
status_code = 200
def __init__(self, content='', mimetype=None, status=None,
content_type=None):
# _headers is a mapping of the lower-case name to the original case of
# the header (required for working with legacy systems) and the header
# value. Both the name of the header and its value are ASCII strings.
self._headers = {}
self._charset = settings.DEFAULT_CHARSET
if mimetype: # For backwards compatibility.
content_type = mimetype
if not content_type:
content_type = "%s; charset=%s" % (settings.DEFAULT_CONTENT_TYPE,
self._charset)
self.content = content
self.cookies = SimpleCookie()
if status:
self.status_code = status
self['Content-Type'] = content_type
def __str__(self):
"""Full HTTP message, including headers."""
return '\n'.join(['%s: %s' % (key, value)
for key, value in self._headers.values()]) \
+ '\n\n' + self.content
def _convert_to_ascii(self, *values):
"""Converts all values to ascii strings."""
for value in values:
if isinstance(value, unicode):
try:
value = value.encode('us-ascii')
except UnicodeError, e:
e.reason += ', HTTP response headers must be in US-ASCII format'
raise
else:
value = str(value)
if '\n' in value or '\r' in value:
raise BadHeaderError("Header values can't contain newlines (got %r)" % (value))
yield value
def __setitem__(self, header, value):
header, value = self._convert_to_ascii(header, value)
self._headers[header.lower()] = (header, value)
def __delitem__(self, header):
try:
del self._headers[header.lower()]
except KeyError:
pass
def __getitem__(self, header):
return self._headers[header.lower()][1]
def __getstate__(self):
# SimpleCookie is not pickeable with pickle.HIGHEST_PROTOCOL, so we
# serialise to a string instead
state = self.__dict__.copy()
state['cookies'] = str(state['cookies'])
return state
def __setstate__(self, state):
self.__dict__.update(state)
self.cookies = SimpleCookie(self.cookies)
def has_header(self, header):
"""Case-insensitive check for a header."""
return header.lower() in self._headers
__contains__ = has_header
def items(self):
return self._headers.values()
def get(self, header, alternate=None):
return self._headers.get(header.lower(), (None, alternate))[1]
def set_cookie(self, key, value='', max_age=None, expires=None, path='/',
domain=None, secure=False, httponly=False):
"""
Sets a cookie.
``expires`` can be:
- a string in the correct format,
- a naive ``datetime.datetime`` object in UTC,
- an aware ``datetime.datetime`` object in any time zone.
If it is a ``datetime.datetime`` object then ``max_age`` will be calculated.
"""
self.cookies[key] = value
if expires is not None:
if isinstance(expires, datetime.datetime):
if timezone.is_aware(expires):
expires = timezone.make_naive(expires, timezone.utc)
delta = expires - expires.utcnow()
# Add one second so the date matches exactly (a fraction of
# time gets lost between converting to a timedelta and
# then the date string).
delta = delta + datetime.timedelta(seconds=1)
# Just set max_age - the max_age logic will set expires.
expires = None
max_age = max(0, delta.days * 86400 + delta.seconds)
else:
self.cookies[key]['expires'] = expires
if max_age is not None:
self.cookies[key]['max-age'] = max_age
# IE requires expires, so set it if hasn't been already.
if not expires:
self.cookies[key]['expires'] = cookie_date(time.time() +
max_age)
if path is not None:
self.cookies[key]['path'] = path
if domain is not None:
self.cookies[key]['domain'] = domain
if secure:
self.cookies[key]['secure'] = True
if httponly:
self.cookies[key]['httponly'] = True
def set_signed_cookie(self, key, value, salt='', **kwargs):
value = signing.get_cookie_signer(salt=key + salt).sign(value)
return self.set_cookie(key, value, **kwargs)
def delete_cookie(self, key, path='/', domain=None):
self.set_cookie(key, max_age=0, path=path, domain=domain,
expires='Thu, 01-Jan-1970 00:00:00 GMT')
def _get_content(self):
if self.has_header('Content-Encoding'):
return ''.join([str(e) for e in self._container])
return ''.join([smart_str(e, self._charset) for e in self._container])
def _set_content(self, value):
if hasattr(value, '__iter__'):
self._container = value
self._base_content_is_iter = True
else:
self._container = [value]
self._base_content_is_iter = False
content = property(_get_content, _set_content)
def __iter__(self):
self._iterator = iter(self._container)
return self
def next(self):
chunk = self._iterator.next()
if isinstance(chunk, unicode):
chunk = chunk.encode(self._charset)
return str(chunk)
def close(self):
if hasattr(self._container, 'close'):
self._container.close()
# The remaining methods partially implement the file-like object interface.
# See http://docs.python.org/lib/bltin-file-objects.html
def write(self, content):
if self._base_content_is_iter:
raise Exception("This %s instance is not writable" % self.__class__)
self._container.append(content)
def flush(self):
pass
def tell(self):
if self._base_content_is_iter:
raise Exception("This %s instance cannot tell its position" % self.__class__)
return sum([len(str(chunk)) for chunk in self._container])
class HttpResponseRedirectBase(HttpResponse):
allowed_schemes = ['http', 'https', 'ftp']
def __init__(self, redirect_to):
super(HttpResponseRedirectBase, self).__init__()
parsed = urlparse(redirect_to)
if parsed.scheme and parsed.scheme not in self.allowed_schemes:
raise SuspiciousOperation("Unsafe redirect to URL with scheme '%s'" % parsed.scheme)
self['Location'] = iri_to_uri(redirect_to)
class HttpResponseRedirect(HttpResponseRedirectBase):
status_code = 302
class HttpResponsePermanentRedirect(HttpResponseRedirectBase):
status_code = 301
class HttpResponseNotModified(HttpResponse):
status_code = 304
class HttpResponseBadRequest(HttpResponse):
status_code = 400
class HttpResponseNotFound(HttpResponse):
status_code = 404
class HttpResponseForbidden(HttpResponse):
status_code = 403
class HttpResponseNotAllowed(HttpResponse):
status_code = 405
def __init__(self, permitted_methods):
super(HttpResponseNotAllowed, self).__init__()
self['Allow'] = ', '.join(permitted_methods)
class HttpResponseGone(HttpResponse):
status_code = 410
class HttpResponseServerError(HttpResponse):
status_code = 500
# A backwards compatible alias for HttpRequest.get_host.
def get_host(request):
return request.get_host()
# It's neither necessary nor appropriate to use
# django.utils.encoding.smart_unicode for parsing URLs and form inputs. Thus,
# this slightly more restricted function.
def str_to_unicode(s, encoding):
"""
Converts basestring objects to unicode, using the given encoding. Illegally
encoded input characters are replaced with Unicode "unknown" codepoint
(\ufffd).
Returns any non-basestring objects without change.
"""
if isinstance(s, str):
return unicode(s, encoding, 'replace')
else:
return s
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3766_3 |
crossvul-python_data_bad_1467_1 | # coding: UTF-8
'''Mock D-BUS objects for test suites.'''
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 3 of the License, or (at your option) any
# later version. See http://www.gnu.org/copyleft/lgpl.html for the full text
# of the license.
__author__ = 'Martin Pitt'
__email__ = 'martin.pitt@ubuntu.com'
__copyright__ = '(c) 2012 Canonical Ltd.'
__license__ = 'LGPL 3+'
import copy
import time
import sys
import types
import importlib
from xml.etree import ElementTree
# we do not use this ourselves, but mock methods often want to use this
import os
os # pyflakes
import dbus
import dbus.service
# global path -> DBusMockObject mapping
objects = {}
MOCK_IFACE = 'org.freedesktop.DBus.Mock'
OBJECT_MANAGER_IFACE = 'org.freedesktop.DBus.ObjectManager'
# stubs to keep code compatible with Python 2 and 3
if sys.version_info[0] >= 3:
long = int
unicode = str
def load_module(name):
if os.path.exists(name) and os.path.splitext(name)[1] == '.py':
sys.path.insert(0, os.path.dirname(os.path.abspath(name)))
try:
m = os.path.splitext(os.path.basename(name))[0]
module = importlib.import_module(m)
finally:
sys.path.pop(0)
return module
return importlib.import_module('dbusmock.templates.' + name)
class DBusMockObject(dbus.service.Object):
'''Mock D-Bus object
This can be configured to have arbitrary methods (including code execution)
and properties via methods on the org.freedesktop.DBus.Mock interface, so
that you can control the mock from any programming language.
'''
def __init__(self, bus_name, path, interface, props, logfile=None,
is_object_manager=False):
'''Create a new DBusMockObject
bus_name: A dbus.service.BusName instance where the object will be put on
path: D-Bus object path
interface: Primary D-Bus interface name of this object (where
properties and methods will be put on)
props: A property_name (string) → property (Variant) map with initial
properties on "interface"
logfile: When given, method calls will be logged into that file name;
if None, logging will be written to stdout. Note that you can
also query the called methods over D-BUS with GetCalls() and
GetMethodCalls().
is_object_manager: If True, the GetManagedObjects method will
automatically be implemented on the object, returning
all objects which have this one’s path as a prefix of
theirs. Note that the InterfacesAdded and
InterfacesRemoved signals will not be automatically
emitted.
'''
dbus.service.Object.__init__(self, bus_name, path)
self.bus_name = bus_name
self.path = path
self.interface = interface
self.is_object_manager = is_object_manager
self._template = None
self._template_parameters = None
if logfile:
self.logfile = open(logfile, 'w')
else:
self.logfile = None
self.is_logfile_owner = True
self.call_log = []
if props is None:
props = {}
self._reset(props)
def __del__(self):
if self.logfile and self.is_logfile_owner:
self.logfile.close()
def _set_up_object_manager(self):
'''Set up this mock object as a D-Bus ObjectManager.'''
if self.path == '/':
cond = 'k != \'/\''
else:
cond = 'k.startswith(\'%s/\')' % self.path
self.AddMethod(OBJECT_MANAGER_IFACE,
'GetManagedObjects', '', 'a{oa{sa{sv}}}',
'ret = {dbus.ObjectPath(k): objects[k].props ' +
' for k in objects.keys() if ' + cond + '}')
def _reset(self, props):
# interface -> name -> value
self.props = {self.interface: props}
# interface -> name -> (in_signature, out_signature, code, dbus_wrapper_fn)
self.methods = {self.interface: {}}
if self.is_object_manager:
self._set_up_object_manager()
@dbus.service.method(dbus.PROPERTIES_IFACE,
in_signature='ss', out_signature='v')
def Get(self, interface_name, property_name):
'''Standard D-Bus API for getting a property value'''
self.log('Get %s.%s' % (interface_name, property_name))
if not interface_name:
interface_name = self.interface
try:
return self.GetAll(interface_name)[property_name]
except KeyError:
raise dbus.exceptions.DBusException(
'no such property ' + property_name,
name=self.interface + '.UnknownProperty')
@dbus.service.method(dbus.PROPERTIES_IFACE,
in_signature='s', out_signature='a{sv}')
def GetAll(self, interface_name, *args, **kwargs):
'''Standard D-Bus API for getting all property values'''
self.log('GetAll ' + interface_name)
if not interface_name:
interface_name = self.interface
try:
return self.props[interface_name]
except KeyError:
raise dbus.exceptions.DBusException(
'no such interface ' + interface_name,
name=self.interface + '.UnknownInterface')
@dbus.service.method(dbus.PROPERTIES_IFACE,
in_signature='ssv', out_signature='')
def Set(self, interface_name, property_name, value, *args, **kwargs):
'''Standard D-Bus API for setting a property value'''
self.log('Set %s.%s%s' % (interface_name,
property_name,
self.format_args((value,))))
try:
iface_props = self.props[interface_name]
except KeyError:
raise dbus.exceptions.DBusException(
'no such interface ' + interface_name,
name=self.interface + '.UnknownInterface')
if property_name not in iface_props:
raise dbus.exceptions.DBusException(
'no such property ' + property_name,
name=self.interface + '.UnknownProperty')
iface_props[property_name] = value
self.EmitSignal('org.freedesktop.DBus.Properties',
'PropertiesChanged',
'sa{sv}as',
[interface_name,
dbus.Dictionary({property_name: value}, signature='sv'),
dbus.Array([], signature='s')
])
@dbus.service.method(MOCK_IFACE,
in_signature='ssa{sv}a(ssss)',
out_signature='')
def AddObject(self, path, interface, properties, methods):
'''Add a new D-Bus object to the mock
path: D-Bus object path
interface: Primary D-Bus interface name of this object (where
properties and methods will be put on)
properties: A property_name (string) → value map with initial
properties on "interface"
methods: An array of 4-tuples (name, in_sig, out_sig, code) describing
methods to add to "interface"; see AddMethod() for details of
the tuple values
If this is a D-Bus ObjectManager instance, the InterfacesAdded signal
will *not* be emitted for the object automatically; it must be emitted
manually if desired. This is because AddInterface may be called after
AddObject, but before the InterfacesAdded signal should be emitted.
Example:
dbus_proxy.AddObject('/com/example/Foo/Manager',
'com.example.Foo.Control',
{
'state': dbus.String('online', variant_level=1),
},
[
('Start', '', '', ''),
('EchoInt', 'i', 'i', 'ret = args[0]'),
('GetClients', '', 'ao', 'ret = ["/com/example/Foo/Client1"]'),
])
'''
if path in objects:
raise dbus.exceptions.DBusException(
'object %s already exists' % path,
name='org.freedesktop.DBus.Mock.NameError')
obj = DBusMockObject(self.bus_name,
path,
interface,
properties)
# make sure created objects inherit the log file stream
obj.logfile = self.logfile
obj.is_logfile_owner = False
obj.AddMethods(interface, methods)
objects[path] = obj
@dbus.service.method(MOCK_IFACE,
in_signature='s',
out_signature='')
def RemoveObject(self, path):
'''Remove a D-Bus object from the mock
As with AddObject, this will *not* emit the InterfacesRemoved signal if
it’s an ObjectManager instance.
'''
try:
objects[path].remove_from_connection()
del objects[path]
except KeyError:
raise dbus.exceptions.DBusException(
'object %s does not exist' % path,
name='org.freedesktop.DBus.Mock.NameError')
@dbus.service.method(MOCK_IFACE,
in_signature='', out_signature='')
def Reset(self):
'''Reset the mock object state.
Remove all mock objects from the bus and tidy up so the state is as if
python-dbusmock had just been restarted. If the mock object was
originally created with a template (from the command line, the Python
API or by calling AddTemplate over D-Bus), it will be
re-instantiated with that template.
'''
# Clear other existing objects.
for obj_name, obj in objects.items():
if obj_name != self.path:
obj.remove_from_connection()
objects.clear()
# Reinitialise our state. Carefully remove new methods from our dict;
# they don't not actually exist if they are a statically defined
# template function
for method_name in self.methods[self.interface]:
try:
delattr(self.__class__, method_name)
except AttributeError:
pass
self._reset({})
if self._template is not None:
self.AddTemplate(self._template, self._template_parameters)
objects[self.path] = self
@dbus.service.method(MOCK_IFACE,
in_signature='sssss',
out_signature='')
def AddMethod(self, interface, name, in_sig, out_sig, code):
'''Add a method to this object
interface: D-Bus interface to add this to. For convenience you can
specify '' here to add the method to the object's main
interface (as specified on construction).
name: Name of the method
in_sig: Signature of input arguments; for example "ias" for a method
that takes an int32 and a string array as arguments; see
http://dbus.freedesktop.org/doc/dbus-specification.html#message-protocol-signatures
out_sig: Signature of output arguments; for example "s" for a method
that returns a string; use '' for methods that do not return
anything.
code: Python 3 code to run in the method call; you have access to the
arguments through the "args" list, and can set the return value
by assigning a value to the "ret" variable. You can also read the
global "objects" variable, which is a dictionary mapping object
paths to DBusMockObject instances.
For keeping state across method calls, you are free to use normal
Python members of the "self" object, which will be persistant for
the whole mock's life time. E. g. you can have a method with
"self.my_state = True", and another method that returns it with
"ret = self.my_state".
When specifying '', the method will not do anything (except
logging) and return None.
'''
if not interface:
interface = self.interface
n_args = len(dbus.Signature(in_sig))
# we need to have separate methods for dbus-python, so clone
# mock_method(); using message_keyword with this dynamic approach fails
# because inspect cannot handle those, so pass on interface and method
# name as first positional arguments
method = lambda self, *args, **kwargs: DBusMockObject.mock_method(
self, interface, name, in_sig, *args, **kwargs)
# we cannot specify in_signature here, as that trips over a consistency
# check in dbus-python; we need to set it manually instead
dbus_method = dbus.service.method(interface,
out_signature=out_sig)(method)
dbus_method.__name__ = str(name)
dbus_method._dbus_in_signature = in_sig
dbus_method._dbus_args = ['arg%i' % i for i in range(1, n_args + 1)]
# for convenience, add mocked methods on the primary interface as
# callable methods
if interface == self.interface:
setattr(self.__class__, name, dbus_method)
self.methods.setdefault(interface, {})[str(name)] = (in_sig, out_sig, code, dbus_method)
@dbus.service.method(MOCK_IFACE,
in_signature='sa(ssss)',
out_signature='')
def AddMethods(self, interface, methods):
'''Add several methods to this object
interface: D-Bus interface to add this to. For convenience you can
specify '' here to add the method to the object's main
interface (as specified on construction).
methods: list of 4-tuples (name, in_sig, out_sig, code) describing one
method each. See AddMethod() for details of the tuple values.
'''
for method in methods:
self.AddMethod(interface, *method)
@dbus.service.method(MOCK_IFACE,
in_signature='ssv',
out_signature='')
def AddProperty(self, interface, name, value):
'''Add property to this object
interface: D-Bus interface to add this to. For convenience you can
specify '' here to add the property to the object's main
interface (as specified on construction).
name: Property name.
value: Property value.
'''
if not interface:
interface = self.interface
try:
self.props[interface][name]
raise dbus.exceptions.DBusException(
'property %s already exists' % name,
name=self.interface + '.PropertyExists')
except KeyError:
# this is what we expect
pass
# copy.copy removes one level of variant-ness, which means that the
# types get exported in introspection data correctly, but we can't do
# this for container types.
if not (isinstance(value, dbus.Dictionary) or isinstance(value, dbus.Array)):
value = copy.copy(value)
self.props.setdefault(interface, {})[name] = value
@dbus.service.method(MOCK_IFACE,
in_signature='sa{sv}',
out_signature='')
def AddProperties(self, interface, properties):
'''Add several properties to this object
interface: D-Bus interface to add this to. For convenience you can
specify '' here to add the property to the object's main
interface (as specified on construction).
properties: A property_name (string) → value map
'''
for k, v in properties.items():
self.AddProperty(interface, k, v)
@dbus.service.method(MOCK_IFACE,
in_signature='sa{sv}',
out_signature='')
def AddTemplate(self, template, parameters):
'''Load a template into the mock.
python-dbusmock ships a set of standard mocks for common system
services such as UPower and NetworkManager. With these the actual tests
become a lot simpler, as they only have to set up the particular
properties for the tests, and not the skeleton of common properties,
interfaces, and methods.
template: Name of the template to load or the full path to a *.py file
for custom templates. See "pydoc dbusmock.templates" for a
list of available templates from python-dbusmock package, and
"pydoc dbusmock.templates.NAME" for documentation about
template NAME.
parameters: A parameter (string) → value (variant) map, for
parameterizing templates. Each template can define their
own, see documentation of that particular template for
details.
'''
try:
module = load_module(template)
except ImportError as e:
raise dbus.exceptions.DBusException('Cannot add template %s: %s' % (template, str(e)),
name='org.freedesktop.DBus.Mock.TemplateError')
# If the template specifies this is an ObjectManager, set that up
if hasattr(module, 'IS_OBJECT_MANAGER') and module.IS_OBJECT_MANAGER:
self._set_up_object_manager()
# pick out all D-BUS service methods and add them to our interface
for symbol in dir(module):
fn = getattr(module, symbol)
if ('_dbus_interface' in dir(fn) and
('_dbus_is_signal' not in dir(fn) or not fn._dbus_is_signal)):
# for dbus-python compatibility, add methods as callables
setattr(self.__class__, symbol, fn)
self.methods.setdefault(fn._dbus_interface, {})[str(symbol)] = (
fn._dbus_in_signature,
fn._dbus_out_signature, '', fn
)
if parameters is None:
parameters = {}
module.load(self, parameters)
# save the given template and parameters for re-instantiation on
# Reset()
self._template = template
self._template_parameters = parameters
@dbus.service.method(MOCK_IFACE,
in_signature='sssav',
out_signature='')
def EmitSignal(self, interface, name, signature, args):
'''Emit a signal from the object.
interface: D-Bus interface to send the signal from. For convenience you
can specify '' here to add the method to the object's main
interface (as specified on construction).
name: Name of the signal
signature: Signature of input arguments; for example "ias" for a signal
that takes an int32 and a string array as arguments; see
http://dbus.freedesktop.org/doc/dbus-specification.html#message-protocol-signatures
args: variant array with signal arguments; must match order and type in
"signature"
'''
if not interface:
interface = self.interface
# convert types of arguments according to signature, using
# MethodCallMessage.append(); this will also provide type/length
# checks, except for the case of an empty signature
if signature == '' and len(args) > 0:
raise TypeError('Fewer items found in D-Bus signature than in Python arguments')
m = dbus.connection.MethodCallMessage('a.b', '/a', 'a.b', 'a')
m.append(signature=signature, *args)
args = m.get_args_list()
fn = lambda self, *args: self.log('emit %s.%s%s' % (interface, name, self.format_args(args)))
fn.__name__ = str(name)
dbus_fn = dbus.service.signal(interface)(fn)
dbus_fn._dbus_signature = signature
dbus_fn._dbus_args = ['arg%i' % i for i in range(1, len(args) + 1)]
dbus_fn(self, *args)
@dbus.service.method(MOCK_IFACE,
in_signature='',
out_signature='a(tsav)')
def GetCalls(self):
'''List all the logged calls since the last call to ClearCalls().
Return a list of (timestamp, method_name, args_list) tuples.
'''
return self.call_log
@dbus.service.method(MOCK_IFACE,
in_signature='s',
out_signature='a(tav)')
def GetMethodCalls(self, method):
'''List all the logged calls of a particular method.
Return a list of (timestamp, args_list) tuples.
'''
return [(row[0], row[2]) for row in self.call_log if row[1] == method]
@dbus.service.method(MOCK_IFACE,
in_signature='',
out_signature='')
def ClearCalls(self):
'''Empty the log of mock call signatures.'''
self.call_log = []
@dbus.service.signal(MOCK_IFACE, signature='sav')
def MethodCalled(self, name, args):
'''Signal emitted for every called mock method.
This is emitted for all mock method calls. This can be used to confirm
that a particular method was called with particular arguments, as an
alternative to reading the mock's log or GetCalls().
'''
pass
def mock_method(self, interface, dbus_method, in_signature, *args, **kwargs):
'''Master mock method.
This gets "instantiated" in AddMethod(). Execute the code snippet of
the method and return the "ret" variable if it was set.
'''
# print('mock_method', dbus_method, self, in_signature, args, kwargs, file=sys.stderr)
# convert types of arguments according to signature, using
# MethodCallMessage.append(); this will also provide type/length
# checks, except for the case of an empty signature
if in_signature == '' and len(args) > 0:
raise TypeError('Fewer items found in D-Bus signature than in Python arguments')
m = dbus.connection.MethodCallMessage('a.b', '/a', 'a.b', 'a')
m.append(signature=in_signature, *args)
args = m.get_args_list()
self.log(dbus_method + self.format_args(args))
self.call_log.append((int(time.time()), str(dbus_method), args))
self.MethodCalled(dbus_method, args)
# The code may be a Python 3 string to interpret, or may be a function
# object (if AddMethod was called from within Python itself, rather than
# over D-Bus).
code = self.methods[interface][dbus_method][2]
if code and isinstance(code, types.FunctionType):
return code(self, *args)
elif code:
loc = locals().copy()
exec(code, globals(), loc)
if 'ret' in loc:
return loc['ret']
def format_args(self, args):
'''Format a D-BUS argument tuple into an appropriate logging string.'''
def format_arg(a):
if isinstance(a, dbus.Boolean):
return str(bool(a))
if isinstance(a, dbus.Byte):
return str(int(a))
if isinstance(a, int) or isinstance(a, long):
return str(a)
if isinstance(a, str) or isinstance(a, unicode):
return '"' + str(a) + '"'
if isinstance(a, list):
return '[' + ', '.join([format_arg(x) for x in a]) + ']'
if isinstance(a, dict):
fmta = '{'
first = True
for k, v in a.items():
if first:
first = False
else:
fmta += ', '
fmta += format_arg(k) + ': ' + format_arg(v)
return fmta + '}'
# fallback
return repr(a)
s = ''
for a in args:
if s:
s += ' '
s += format_arg(a)
if s:
s = ' ' + s
return s
def log(self, msg):
'''Log a message, prefixed with a timestamp.
If a log file was specified in the constructor, it is written there,
otherwise it goes to stdout.
'''
if self.logfile:
fd = self.logfile
else:
fd = sys.stdout
fd.write('%.3f %s\n' % (time.time(), msg))
fd.flush()
@dbus.service.method(dbus.INTROSPECTABLE_IFACE,
in_signature='',
out_signature='s',
path_keyword='object_path',
connection_keyword='connection')
def Introspect(self, object_path, connection):
'''Return XML description of this object's interfaces, methods and signals.
This wraps dbus-python's Introspect() method to include the dynamic
methods and properties.
'''
# temporarily add our dynamic methods
cls = self.__class__.__module__ + '.' + self.__class__.__name__
orig_interfaces = self._dbus_class_table[cls]
mock_interfaces = orig_interfaces.copy()
for interface, methods in self.methods.items():
for method in methods:
mock_interfaces.setdefault(interface, {})[method] = self.methods[interface][method][3]
self._dbus_class_table[cls] = mock_interfaces
xml = dbus.service.Object.Introspect(self, object_path, connection)
tree = ElementTree.fromstring(xml)
for name in self.props:
# We might have properties for new interfaces we don't know about
# yet. Try to find an existing <interface> node named after our
# interface to append to, and create one if we can't.
interface = tree.find(".//interface[@name='%s']" % name)
if interface is None:
interface = ElementTree.Element("interface", {"name": name})
tree.append(interface)
for prop, val in self.props[name].items():
if val is None:
# can't guess type from None, skip
continue
elem = ElementTree.Element("property", {
"name": prop,
# We don't store the signature anywhere, so guess it.
"type": dbus.lowlevel.Message.guess_signature(val),
"access": "readwrite"})
interface.append(elem)
xml = ElementTree.tostring(tree, encoding='utf8', method='xml').decode('utf8')
# restore original class table
self._dbus_class_table[cls] = orig_interfaces
return xml
# Overwrite dbus-python's _method_lookup(), as that offers no way to have the
# same method name on different interfaces
orig_method_lookup = dbus.service._method_lookup
def _dbusmock_method_lookup(obj, method_name, dbus_interface):
try:
m = obj.methods[dbus_interface or obj.interface][method_name]
return (m[3], m[3])
except KeyError:
return orig_method_lookup(obj, method_name, dbus_interface)
dbus.service._method_lookup = _dbusmock_method_lookup
#
# Helper API for templates
#
def get_objects():
'''Return all existing object paths'''
return objects.keys()
def get_object(path):
'''Return object for a given object path'''
return objects[path]
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_1467_1 |
crossvul-python_data_bad_3768_0 | from django.conf.urls.defaults import patterns
from django.contrib.auth.urls import urlpatterns
from django.contrib.auth.views import password_reset
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.template import Template, RequestContext
from django.views.decorators.cache import never_cache
@never_cache
def remote_user_auth_view(request):
"Dummy view for remote user tests"
t = Template("Username is {{ user }}.")
c = RequestContext(request, {})
return HttpResponse(t.render(c))
# special urls for auth test cases
urlpatterns = urlpatterns + patterns('',
(r'^logout/custom_query/$', 'django.contrib.auth.views.logout', dict(redirect_field_name='follow')),
(r'^logout/next_page/$', 'django.contrib.auth.views.logout', dict(next_page='/somewhere/')),
(r'^remote_user/$', remote_user_auth_view),
(r'^password_reset_from_email/$', 'django.contrib.auth.views.password_reset', dict(from_email='staffmember@example.com')),
(r'^login_required/$', login_required(password_reset)),
(r'^login_required_login_url/$', login_required(password_reset, login_url='/somewhere/')),
)
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3768_0 |
crossvul-python_data_good_2156_1 | # Tests potential DOS of IcnsImagePlugin with 0 length block.
# Run from anywhere that PIL is importable.
from PIL import Image
from io import BytesIO
if bytes is str:
Image.open(BytesIO(bytes('icns\x00\x00\x00\x10hang\x00\x00\x00\x00')))
else:
Image.open(BytesIO(bytes('icns\x00\x00\x00\x10hang\x00\x00\x00\x00', 'latin-1')))
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_2156_1 |
crossvul-python_data_good_2816_1 | # -*- coding: utf-8 -*-
'''
TCP transport classes
Wire protocol: "len(payload) msgpack({'head': SOMEHEADER, 'body': SOMEBODY})"
'''
# Import Python Libs
from __future__ import absolute_import
import logging
import msgpack
import socket
import os
import weakref
import time
import traceback
import errno
# Import Salt Libs
import salt.crypt
import salt.utils
import salt.utils.verify
import salt.utils.event
import salt.utils.async
import salt.payload
import salt.exceptions
import salt.transport.frame
import salt.transport.ipc
import salt.transport.client
import salt.transport.server
import salt.transport.mixins.auth
import salt.ext.six as six
from salt.exceptions import SaltReqTimeoutError, SaltClientError
from salt.transport import iter_transport_opts
# Import Tornado Libs
import tornado
import tornado.tcpserver
import tornado.gen
import tornado.concurrent
import tornado.tcpclient
import tornado.netutil
# pylint: disable=import-error,no-name-in-module
if six.PY2:
import urlparse
else:
import urllib.parse as urlparse
# pylint: enable=import-error,no-name-in-module
# Import third party libs
try:
from Cryptodome.Cipher import PKCS1_OAEP
except ImportError:
from Crypto.Cipher import PKCS1_OAEP
if six.PY3 and salt.utils.is_windows():
USE_LOAD_BALANCER = True
else:
USE_LOAD_BALANCER = False
if USE_LOAD_BALANCER:
import threading
import multiprocessing
import errno
import tornado.util
from salt.utils.process import SignalHandlingMultiprocessingProcess
log = logging.getLogger(__name__)
def _set_tcp_keepalive(sock, opts):
'''
Ensure that TCP keepalives are set for the socket.
'''
if hasattr(socket, 'SO_KEEPALIVE'):
if opts.get('tcp_keepalive', False):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
if hasattr(socket, 'SOL_TCP'):
if hasattr(socket, 'TCP_KEEPIDLE'):
tcp_keepalive_idle = opts.get('tcp_keepalive_idle', -1)
if tcp_keepalive_idle > 0:
sock.setsockopt(
socket.SOL_TCP, socket.TCP_KEEPIDLE,
int(tcp_keepalive_idle))
if hasattr(socket, 'TCP_KEEPCNT'):
tcp_keepalive_cnt = opts.get('tcp_keepalive_cnt', -1)
if tcp_keepalive_cnt > 0:
sock.setsockopt(
socket.SOL_TCP, socket.TCP_KEEPCNT,
int(tcp_keepalive_cnt))
if hasattr(socket, 'TCP_KEEPINTVL'):
tcp_keepalive_intvl = opts.get('tcp_keepalive_intvl', -1)
if tcp_keepalive_intvl > 0:
sock.setsockopt(
socket.SOL_TCP, socket.TCP_KEEPINTVL,
int(tcp_keepalive_intvl))
if hasattr(socket, 'SIO_KEEPALIVE_VALS'):
# Windows doesn't support TCP_KEEPIDLE, TCP_KEEPCNT, nor
# TCP_KEEPINTVL. Instead, it has its own proprietary
# SIO_KEEPALIVE_VALS.
tcp_keepalive_idle = opts.get('tcp_keepalive_idle', -1)
tcp_keepalive_intvl = opts.get('tcp_keepalive_intvl', -1)
# Windows doesn't support changing something equivalent to
# TCP_KEEPCNT.
if tcp_keepalive_idle > 0 or tcp_keepalive_intvl > 0:
# Windows defaults may be found by using the link below.
# Search for 'KeepAliveTime' and 'KeepAliveInterval'.
# https://technet.microsoft.com/en-us/library/bb726981.aspx#EDAA
# If one value is set and the other isn't, we still need
# to send both values to SIO_KEEPALIVE_VALS and they both
# need to be valid. So in that case, use the Windows
# default.
if tcp_keepalive_idle <= 0:
tcp_keepalive_idle = 7200
if tcp_keepalive_intvl <= 0:
tcp_keepalive_intvl = 1
# The values expected are in milliseconds, so multiply by
# 1000.
sock.ioctl(socket.SIO_KEEPALIVE_VALS, (
1, int(tcp_keepalive_idle * 1000),
int(tcp_keepalive_intvl * 1000)))
else:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 0)
if USE_LOAD_BALANCER:
class LoadBalancerServer(SignalHandlingMultiprocessingProcess):
'''
Raw TCP server which runs in its own process and will listen
for incoming connections. Each incoming connection will be
sent via multiprocessing queue to the workers.
Since the queue is shared amongst workers, only one worker will
handle a given connection.
'''
# TODO: opts!
# Based on default used in tornado.netutil.bind_sockets()
backlog = 128
def __init__(self, opts, socket_queue, log_queue=None):
super(LoadBalancerServer, self).__init__(log_queue=log_queue)
self.opts = opts
self.socket_queue = socket_queue
self._socket = None
# __setstate__ and __getstate__ are only used on Windows.
# We do this so that __init__ will be invoked on Windows in the child
# process so that a register_after_fork() equivalent will work on
# Windows.
def __setstate__(self, state):
self._is_child = True
self.__init__(
state['opts'],
state['socket_queue'],
log_queue=state['log_queue']
)
def __getstate__(self):
return {'opts': self.opts,
'socket_queue': self.socket_queue,
'log_queue': self.log_queue}
def close(self):
if self._socket is not None:
self._socket.shutdown(socket.SHUT_RDWR)
self._socket.close()
self._socket = None
def __del__(self):
self.close()
def run(self):
'''
Start the load balancer
'''
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
_set_tcp_keepalive(self._socket, self.opts)
self._socket.setblocking(1)
self._socket.bind((self.opts['interface'], int(self.opts['ret_port'])))
self._socket.listen(self.backlog)
while True:
try:
# Wait for a connection to occur since the socket is
# blocking.
connection, address = self._socket.accept()
# Wait for a free slot to be available to put
# the connection into.
# Sockets are picklable on Windows in Python 3.
self.socket_queue.put((connection, address), True, None)
except socket.error as e:
# ECONNABORTED indicates that there was a connection
# but it was closed while still in the accept queue.
# (observed on FreeBSD).
if tornado.util.errno_from_exception(e) == errno.ECONNABORTED:
continue
raise
# TODO: move serial down into message library
class AsyncTCPReqChannel(salt.transport.client.ReqChannel):
'''
Encapsulate sending routines to tcp.
Note: this class returns a singleton
'''
# This class is only a singleton per minion/master pair
# mapping of io_loop -> {key -> channel}
instance_map = weakref.WeakKeyDictionary()
def __new__(cls, opts, **kwargs):
'''
Only create one instance of channel per __key()
'''
# do we have any mapping for this io_loop
io_loop = kwargs.get('io_loop') or tornado.ioloop.IOLoop.current()
if io_loop not in cls.instance_map:
cls.instance_map[io_loop] = weakref.WeakValueDictionary()
loop_instance_map = cls.instance_map[io_loop]
key = cls.__key(opts, **kwargs)
obj = loop_instance_map.get(key)
if obj is None:
log.debug('Initializing new AsyncTCPReqChannel for {0}'.format(key))
# we need to make a local variable for this, as we are going to store
# it in a WeakValueDictionary-- which will remove the item if no one
# references it-- this forces a reference while we return to the caller
obj = object.__new__(cls)
obj.__singleton_init__(opts, **kwargs)
loop_instance_map[key] = obj
else:
log.debug('Re-using AsyncTCPReqChannel for {0}'.format(key))
return obj
@classmethod
def __key(cls, opts, **kwargs):
if 'master_uri' in kwargs:
opts['master_uri'] = kwargs['master_uri']
return (opts['pki_dir'], # where the keys are stored
opts['id'], # minion ID
opts['master_uri'],
kwargs.get('crypt', 'aes'), # TODO: use the same channel for crypt
)
# has to remain empty for singletons, since __init__ will *always* be called
def __init__(self, opts, **kwargs):
pass
# an init for the singleton instance to call
def __singleton_init__(self, opts, **kwargs):
self.opts = dict(opts)
self.serial = salt.payload.Serial(self.opts)
# crypt defaults to 'aes'
self.crypt = kwargs.get('crypt', 'aes')
self.io_loop = kwargs.get('io_loop') or tornado.ioloop.IOLoop.current()
if self.crypt != 'clear':
self.auth = salt.crypt.AsyncAuth(self.opts, io_loop=self.io_loop)
resolver = kwargs.get('resolver')
parse = urlparse.urlparse(self.opts['master_uri'])
host, port = parse.netloc.rsplit(':', 1)
self.master_addr = (host, int(port))
self._closing = False
self.message_client = SaltMessageClientPool(self.opts,
args=(self.opts, host, int(port),),
kwargs={'io_loop': self.io_loop, 'resolver': resolver})
def close(self):
if self._closing:
return
self._closing = True
self.message_client.close()
def __del__(self):
self.close()
def _package_load(self, load):
return {
'enc': self.crypt,
'load': load,
}
@tornado.gen.coroutine
def crypted_transfer_decode_dictentry(self, load, dictkey=None, tries=3, timeout=60):
if not self.auth.authenticated:
yield self.auth.authenticate()
ret = yield self.message_client.send(self._package_load(self.auth.crypticle.dumps(load)), timeout=timeout)
key = self.auth.get_keys()
cipher = PKCS1_OAEP.new(key)
aes = cipher.decrypt(ret['key'])
pcrypt = salt.crypt.Crypticle(self.opts, aes)
data = pcrypt.loads(ret[dictkey])
if six.PY3:
data = salt.transport.frame.decode_embedded_strs(data)
raise tornado.gen.Return(data)
@tornado.gen.coroutine
def _crypted_transfer(self, load, tries=3, timeout=60):
'''
In case of authentication errors, try to renegotiate authentication
and retry the method.
Indeed, we can fail too early in case of a master restart during a
minion state execution call
'''
@tornado.gen.coroutine
def _do_transfer():
data = yield self.message_client.send(self._package_load(self.auth.crypticle.dumps(load)),
timeout=timeout,
)
# we may not have always data
# as for example for saltcall ret submission, this is a blind
# communication, we do not subscribe to return events, we just
# upload the results to the master
if data:
data = self.auth.crypticle.loads(data)
if six.PY3:
data = salt.transport.frame.decode_embedded_strs(data)
raise tornado.gen.Return(data)
if not self.auth.authenticated:
yield self.auth.authenticate()
try:
ret = yield _do_transfer()
raise tornado.gen.Return(ret)
except salt.crypt.AuthenticationError:
yield self.auth.authenticate()
ret = yield _do_transfer()
raise tornado.gen.Return(ret)
@tornado.gen.coroutine
def _uncrypted_transfer(self, load, tries=3, timeout=60):
ret = yield self.message_client.send(self._package_load(load), timeout=timeout)
raise tornado.gen.Return(ret)
@tornado.gen.coroutine
def send(self, load, tries=3, timeout=60, raw=False):
'''
Send a request, return a future which will complete when we send the message
'''
try:
if self.crypt == 'clear':
ret = yield self._uncrypted_transfer(load, tries=tries, timeout=timeout)
else:
ret = yield self._crypted_transfer(load, tries=tries, timeout=timeout)
except tornado.iostream.StreamClosedError:
# Convert to 'SaltClientError' so that clients can handle this
# exception more appropriately.
raise SaltClientError('Connection to master lost')
raise tornado.gen.Return(ret)
class AsyncTCPPubChannel(salt.transport.mixins.auth.AESPubClientMixin, salt.transport.client.AsyncPubChannel):
def __init__(self,
opts,
**kwargs):
self.opts = opts
self.serial = salt.payload.Serial(self.opts)
self.crypt = kwargs.get('crypt', 'aes')
self.io_loop = kwargs.get('io_loop') or tornado.ioloop.IOLoop.current()
self.connected = False
self._closing = False
self._reconnected = False
self.event = salt.utils.event.get_event(
'minion',
opts=self.opts,
listen=False
)
def close(self):
if self._closing:
return
self._closing = True
if hasattr(self, 'message_client'):
self.message_client.close()
def __del__(self):
self.close()
def _package_load(self, load):
return {
'enc': self.crypt,
'load': load,
}
@tornado.gen.coroutine
def send_id(self, tok, force_auth):
'''
Send the minion id to the master so that the master may better
track the connection state of the minion.
In case of authentication errors, try to renegotiate authentication
and retry the method.
'''
load = {'id': self.opts['id'], 'tok': tok}
@tornado.gen.coroutine
def _do_transfer():
msg = self._package_load(self.auth.crypticle.dumps(load))
package = salt.transport.frame.frame_msg(msg, header=None)
yield self.message_client.write_to_stream(package)
raise tornado.gen.Return(True)
if force_auth or not self.auth.authenticated:
count = 0
while count <= self.opts['tcp_authentication_retries'] or self.opts['tcp_authentication_retries'] < 0:
try:
yield self.auth.authenticate()
break
except SaltClientError as exc:
log.debug(exc)
count += 1
try:
ret = yield _do_transfer()
raise tornado.gen.Return(ret)
except salt.crypt.AuthenticationError:
yield self.auth.authenticate()
ret = yield _do_transfer()
raise tornado.gen.Return(ret)
@tornado.gen.coroutine
def connect_callback(self, result):
if self._closing:
return
# Force re-auth on reconnect since the master
# may have been restarted
yield self.send_id(self.tok, self._reconnected)
self.connected = True
self.event.fire_event(
{'master': self.opts['master']},
'__master_connected'
)
if self._reconnected:
# On reconnects, fire a master event to notify that the minion is
# available.
if self.opts.get('__role') == 'syndic':
data = 'Syndic {0} started at {1}'.format(
self.opts['id'],
time.asctime()
)
tag = salt.utils.event.tagify(
[self.opts['id'], 'start'],
'syndic'
)
else:
data = 'Minion {0} started at {1}'.format(
self.opts['id'],
time.asctime()
)
tag = salt.utils.event.tagify(
[self.opts['id'], 'start'],
'minion'
)
load = {'id': self.opts['id'],
'cmd': '_minion_event',
'pretag': None,
'tok': self.tok,
'data': data,
'tag': tag}
req_channel = salt.utils.async.SyncWrapper(
AsyncTCPReqChannel, (self.opts,)
)
try:
req_channel.send(load, timeout=60)
except salt.exceptions.SaltReqTimeoutError:
log.info('fire_master failed: master could not be contacted. Request timed out.')
except Exception:
log.info('fire_master failed: {0}'.format(
traceback.format_exc())
)
else:
self._reconnected = True
def disconnect_callback(self):
if self._closing:
return
self.connected = False
self.event.fire_event(
{'master': self.opts['master']},
'__master_disconnected'
)
@tornado.gen.coroutine
def connect(self):
try:
self.auth = salt.crypt.AsyncAuth(self.opts, io_loop=self.io_loop)
self.tok = self.auth.gen_token('salt')
if not self.auth.authenticated:
yield self.auth.authenticate()
if self.auth.authenticated:
self.message_client = SaltMessageClientPool(
self.opts,
args=(self.opts, self.opts['master_ip'], int(self.auth.creds['publish_port']),),
kwargs={'io_loop': self.io_loop,
'connect_callback': self.connect_callback,
'disconnect_callback': self.disconnect_callback})
yield self.message_client.connect() # wait for the client to be connected
self.connected = True
# TODO: better exception handling...
except KeyboardInterrupt:
raise
except Exception as exc:
if '-|RETRY|-' not in str(exc):
raise SaltClientError('Unable to sign_in to master: {0}'.format(exc)) # TODO: better error message
def on_recv(self, callback):
'''
Register an on_recv callback
'''
if callback is None:
return self.message_client.on_recv(callback)
@tornado.gen.coroutine
def wrap_callback(body):
if not isinstance(body, dict):
# TODO: For some reason we need to decode here for things
# to work. Fix this.
body = msgpack.loads(body)
if six.PY3:
body = salt.transport.frame.decode_embedded_strs(body)
ret = yield self._decode_payload(body)
callback(ret)
return self.message_client.on_recv(wrap_callback)
class TCPReqServerChannel(salt.transport.mixins.auth.AESReqServerMixin, salt.transport.server.ReqServerChannel):
# TODO: opts!
backlog = 5
def __init__(self, opts):
salt.transport.server.ReqServerChannel.__init__(self, opts)
self._socket = None
@property
def socket(self):
return self._socket
def close(self):
if self._socket is not None:
try:
self._socket.shutdown(socket.SHUT_RDWR)
except socket.error as exc:
if exc.errno == errno.ENOTCONN:
# We may try to shutdown a socket which is already disconnected.
# Ignore this condition and continue.
pass
else:
raise exc
self._socket.close()
self._socket = None
def __del__(self):
self.close()
def pre_fork(self, process_manager):
'''
Pre-fork we need to create the zmq router device
'''
salt.transport.mixins.auth.AESReqServerMixin.pre_fork(self, process_manager)
if USE_LOAD_BALANCER:
self.socket_queue = multiprocessing.Queue()
process_manager.add_process(
LoadBalancerServer, args=(self.opts, self.socket_queue)
)
elif not salt.utils.is_windows():
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
_set_tcp_keepalive(self._socket, self.opts)
self._socket.setblocking(0)
self._socket.bind((self.opts['interface'], int(self.opts['ret_port'])))
def post_fork(self, payload_handler, io_loop):
'''
After forking we need to create all of the local sockets to listen to the
router
payload_handler: function to call with your payloads
'''
self.payload_handler = payload_handler
self.io_loop = io_loop
self.serial = salt.payload.Serial(self.opts)
if USE_LOAD_BALANCER:
self.req_server = LoadBalancerWorker(self.socket_queue,
self.handle_message,
io_loop=self.io_loop,
ssl_options=self.opts.get('ssl'))
else:
if salt.utils.is_windows():
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
_set_tcp_keepalive(self._socket, self.opts)
self._socket.setblocking(0)
self._socket.bind((self.opts['interface'], int(self.opts['ret_port'])))
self.req_server = SaltMessageServer(self.handle_message,
io_loop=self.io_loop,
ssl_options=self.opts.get('ssl'))
self.req_server.add_socket(self._socket)
self._socket.listen(self.backlog)
salt.transport.mixins.auth.AESReqServerMixin.post_fork(self, payload_handler, io_loop)
@tornado.gen.coroutine
def handle_message(self, stream, header, payload):
'''
Handle incoming messages from underylying tcp streams
'''
try:
try:
payload = self._decode_payload(payload)
except Exception:
stream.write(salt.transport.frame.frame_msg('bad load', header=header))
raise tornado.gen.Return()
# TODO helper functions to normalize payload?
if not isinstance(payload, dict) or not isinstance(payload.get('load'), dict):
yield stream.write(salt.transport.frame.frame_msg(
'payload and load must be a dict', header=header))
raise tornado.gen.Return()
try:
id_ = payload['load'].get('id', '')
if '\0' in id_:
log.error('Payload contains an id with a null byte: %s', payload)
stream.send(self.serial.dumps('bad load: id contains a null byte'))
raise tornado.gen.Return()
except TypeError:
log.error('Payload contains non-string id: %s', payload)
stream.send(self.serial.dumps('bad load: id {0} is not a string'.format(id_)))
raise tornado.gen.Return()
# intercept the "_auth" commands, since the main daemon shouldn't know
# anything about our key auth
if payload['enc'] == 'clear' and payload.get('load', {}).get('cmd') == '_auth':
yield stream.write(salt.transport.frame.frame_msg(
self._auth(payload['load']), header=header))
raise tornado.gen.Return()
# TODO: test
try:
ret, req_opts = yield self.payload_handler(payload)
except Exception as e:
# always attempt to return an error to the minion
stream.write('Some exception handling minion payload')
log.error('Some exception handling a payload from minion', exc_info=True)
stream.close()
raise tornado.gen.Return()
req_fun = req_opts.get('fun', 'send')
if req_fun == 'send_clear':
stream.write(salt.transport.frame.frame_msg(ret, header=header))
elif req_fun == 'send':
stream.write(salt.transport.frame.frame_msg(self.crypticle.dumps(ret), header=header))
elif req_fun == 'send_private':
stream.write(salt.transport.frame.frame_msg(self._encrypt_private(ret,
req_opts['key'],
req_opts['tgt'],
), header=header))
else:
log.error('Unknown req_fun {0}'.format(req_fun))
# always attempt to return an error to the minion
stream.write('Server-side exception handling payload')
stream.close()
except tornado.gen.Return:
raise
except tornado.iostream.StreamClosedError:
# Stream was closed. This could happen if the remote side
# closed the connection on its end (eg in a timeout or shutdown
# situation).
log.error('Connection was unexpectedly closed', exc_info=True)
except Exception as exc: # pylint: disable=broad-except
# Absorb any other exceptions
log.error('Unexpected exception occurred: {0}'.format(exc), exc_info=True)
raise tornado.gen.Return()
class SaltMessageServer(tornado.tcpserver.TCPServer, object):
'''
Raw TCP server which will receive all of the TCP streams and re-assemble
messages that are sent through to us
'''
def __init__(self, message_handler, *args, **kwargs):
super(SaltMessageServer, self).__init__(*args, **kwargs)
self.clients = []
self.message_handler = message_handler
@tornado.gen.coroutine
def handle_stream(self, stream, address):
'''
Handle incoming streams and add messages to the incoming queue
'''
log.trace('Req client {0} connected'.format(address))
self.clients.append((stream, address))
unpacker = msgpack.Unpacker()
try:
while True:
wire_bytes = yield stream.read_bytes(4096, partial=True)
unpacker.feed(wire_bytes)
for framed_msg in unpacker:
if six.PY3:
framed_msg = salt.transport.frame.decode_embedded_strs(
framed_msg
)
header = framed_msg['head']
self.io_loop.spawn_callback(self.message_handler, stream, header, framed_msg['body'])
except tornado.iostream.StreamClosedError:
log.trace('req client disconnected {0}'.format(address))
self.clients.remove((stream, address))
except Exception as e:
log.trace('other master-side exception: {0}'.format(e))
self.clients.remove((stream, address))
stream.close()
def shutdown(self):
'''
Shutdown the whole server
'''
for item in self.clients:
client, address = item
client.close()
self.clients.remove(item)
if USE_LOAD_BALANCER:
class LoadBalancerWorker(SaltMessageServer):
'''
This will receive TCP connections from 'LoadBalancerServer' via
a multiprocessing queue.
Since the queue is shared amongst workers, only one worker will handle
a given connection.
'''
def __init__(self, socket_queue, message_handler, *args, **kwargs):
super(LoadBalancerWorker, self).__init__(
message_handler, *args, **kwargs)
self.socket_queue = socket_queue
t = threading.Thread(target=self.socket_queue_thread)
t.start()
def socket_queue_thread(self):
try:
while True:
client_socket, address = self.socket_queue.get(True, None)
# 'self.io_loop' initialized in super class
# 'tornado.tcpserver.TCPServer'.
# 'self._handle_connection' defined in same super class.
self.io_loop.spawn_callback(
self._handle_connection, client_socket, address)
except (KeyboardInterrupt, SystemExit):
pass
class TCPClientKeepAlive(tornado.tcpclient.TCPClient):
'''
Override _create_stream() in TCPClient to enable keep alive support.
'''
def __init__(self, opts, resolver=None, io_loop=None):
self.opts = opts
super(TCPClientKeepAlive, self).__init__(
resolver=resolver, io_loop=io_loop)
def _create_stream(self, max_buffer_size, af, addr, **kwargs): # pylint: disable=unused-argument
'''
Override _create_stream() in TCPClient.
Tornado 4.5 added the kwargs 'source_ip' and 'source_port'.
Due to this, use **kwargs to swallow these and any future
kwargs to maintain compatibility.
'''
# Always connect in plaintext; we'll convert to ssl if necessary
# after one connection has completed.
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
_set_tcp_keepalive(sock, self.opts)
stream = tornado.iostream.IOStream(
sock,
io_loop=self.io_loop,
max_buffer_size=max_buffer_size)
return stream.connect(addr)
class SaltMessageClientPool(salt.transport.MessageClientPool):
'''
Wrapper class of SaltMessageClient to avoid blocking waiting while writing data to socket.
'''
def __init__(self, opts, args=None, kwargs=None):
super(SaltMessageClientPool, self).__init__(SaltMessageClient, opts, args=args, kwargs=kwargs)
def __del__(self):
self.close()
def close(self):
for message_client in self.message_clients:
message_client.close()
self.message_clients = []
@tornado.gen.coroutine
def connect(self):
futures = []
for message_client in self.message_clients:
futures.append(message_client.connect())
for future in futures:
yield future
raise tornado.gen.Return(None)
def on_recv(self, *args, **kwargs):
for message_client in self.message_clients:
message_client.on_recv(*args, **kwargs)
def send(self, *args, **kwargs):
message_clients = sorted(self.message_clients, key=lambda x: len(x.send_queue))
return message_clients[0].send(*args, **kwargs)
def write_to_stream(self, *args, **kwargs):
message_clients = sorted(self.message_clients, key=lambda x: len(x.send_queue))
return message_clients[0]._stream.write(*args, **kwargs)
# TODO consolidate with IPCClient
# TODO: limit in-flight messages.
# TODO: singleton? Something to not re-create the tcp connection so much
class SaltMessageClient(object):
'''
Low-level message sending client
'''
def __init__(self, opts, host, port, io_loop=None, resolver=None,
connect_callback=None, disconnect_callback=None):
self.opts = opts
self.host = host
self.port = port
self.connect_callback = connect_callback
self.disconnect_callback = disconnect_callback
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
self._tcp_client = TCPClientKeepAlive(
opts, io_loop=self.io_loop, resolver=resolver)
self._mid = 1
self._max_messages = int((1 << 31) - 2) # number of IDs before we wrap
# TODO: max queue size
self.send_queue = [] # queue of messages to be sent
self.send_future_map = {} # mapping of request_id -> Future
self.send_timeout_map = {} # request_id -> timeout_callback
self._read_until_future = None
self._on_recv = None
self._closing = False
self._connecting_future = self.connect()
self._stream_return_future = tornado.concurrent.Future()
self.io_loop.spawn_callback(self._stream_return)
# TODO: timeout inflight sessions
def close(self):
if self._closing:
return
self._closing = True
if hasattr(self, '_stream') and not self._stream.closed():
self._stream.close()
if self._read_until_future is not None:
# This will prevent this message from showing up:
# '[ERROR ] Future exception was never retrieved:
# StreamClosedError'
# This happens because the logic is always waiting to read
# the next message and the associated read future is marked
# 'StreamClosedError' when the stream is closed.
self._read_until_future.exc_info()
if (not self._stream_return_future.done() and
self.io_loop != tornado.ioloop.IOLoop.current(
instance=False)):
# If _stream_return() hasn't completed, it means the IO
# Loop is stopped (such as when using
# 'salt.utils.async.SyncWrapper'). Ensure that
# _stream_return() completes by restarting the IO Loop.
# This will prevent potential errors on shutdown.
orig_loop = tornado.ioloop.IOLoop.current()
self.io_loop.make_current()
try:
self.io_loop.add_future(
self._stream_return_future,
lambda future: self.io_loop.stop()
)
self.io_loop.start()
finally:
orig_loop.make_current()
self._tcp_client.close()
# Clear callback references to allow the object that they belong to
# to be deleted.
self.connect_callback = None
self.disconnect_callback = None
def __del__(self):
self.close()
def connect(self):
'''
Ask for this client to reconnect to the origin
'''
if hasattr(self, '_connecting_future') and not self._connecting_future.done():
future = self._connecting_future
else:
future = tornado.concurrent.Future()
self._connecting_future = future
self.io_loop.add_callback(self._connect)
# Add the callback only when a new future is created
if self.connect_callback is not None:
def handle_future(future):
response = future.result()
self.io_loop.add_callback(self.connect_callback, response)
future.add_done_callback(handle_future)
return future
# TODO: tcp backoff opts
@tornado.gen.coroutine
def _connect(self):
'''
Try to connect for the rest of time!
'''
while True:
if self._closing:
break
try:
self._stream = yield self._tcp_client.connect(self.host,
self.port,
ssl_options=self.opts.get('ssl'))
self._connecting_future.set_result(True)
break
except Exception as e:
yield tornado.gen.sleep(1) # TODO: backoff
#self._connecting_future.set_exception(e)
@tornado.gen.coroutine
def _stream_return(self):
try:
while not self._closing and (
not self._connecting_future.done() or
self._connecting_future.result() is not True):
yield self._connecting_future
unpacker = msgpack.Unpacker()
while not self._closing:
try:
self._read_until_future = self._stream.read_bytes(4096, partial=True)
wire_bytes = yield self._read_until_future
unpacker.feed(wire_bytes)
for framed_msg in unpacker:
if six.PY3:
framed_msg = salt.transport.frame.decode_embedded_strs(
framed_msg
)
header = framed_msg['head']
body = framed_msg['body']
message_id = header.get('mid')
if message_id in self.send_future_map:
self.send_future_map.pop(message_id).set_result(body)
self.remove_message_timeout(message_id)
else:
if self._on_recv is not None:
self.io_loop.spawn_callback(self._on_recv, header, body)
else:
log.error('Got response for message_id {0} that we are not tracking'.format(message_id))
except tornado.iostream.StreamClosedError as e:
log.debug('tcp stream to {0}:{1} closed, unable to recv'.format(self.host, self.port))
for future in six.itervalues(self.send_future_map):
future.set_exception(e)
self.send_future_map = {}
if self._closing:
return
if self.disconnect_callback:
self.disconnect_callback()
# if the last connect finished, then we need to make a new one
if self._connecting_future.done():
self._connecting_future = self.connect()
yield self._connecting_future
except TypeError:
# This is an invalid transport
if 'detect_mode' in self.opts:
log.info('There was an error trying to use TCP transport; '
'attempting to fallback to another transport')
else:
raise SaltClientError
except Exception as e:
log.error('Exception parsing response', exc_info=True)
for future in six.itervalues(self.send_future_map):
future.set_exception(e)
self.send_future_map = {}
if self._closing:
return
if self.disconnect_callback:
self.disconnect_callback()
# if the last connect finished, then we need to make a new one
if self._connecting_future.done():
self._connecting_future = self.connect()
yield self._connecting_future
finally:
self._stream_return_future.set_result(True)
@tornado.gen.coroutine
def _stream_send(self):
while not self._connecting_future.done() or self._connecting_future.result() is not True:
yield self._connecting_future
while len(self.send_queue) > 0:
message_id, item = self.send_queue[0]
try:
yield self._stream.write(item)
del self.send_queue[0]
# if the connection is dead, lets fail this send, and make sure we
# attempt to reconnect
except tornado.iostream.StreamClosedError as e:
if message_id in self.send_future_map:
self.send_future_map.pop(message_id).set_exception(e)
self.remove_message_timeout(message_id)
del self.send_queue[0]
if self._closing:
return
if self.disconnect_callback:
self.disconnect_callback()
# if the last connect finished, then we need to make a new one
if self._connecting_future.done():
self._connecting_future = self.connect()
yield self._connecting_future
def _message_id(self):
wrap = False
while self._mid in self.send_future_map:
if self._mid >= self._max_messages:
if wrap:
# this shouldn't ever happen, but just in case
raise Exception('Unable to find available messageid')
self._mid = 1
wrap = True
else:
self._mid += 1
return self._mid
# TODO: return a message object which takes care of multiplexing?
def on_recv(self, callback):
'''
Register a callback for received messages (that we didn't initiate)
'''
if callback is None:
self._on_recv = callback
else:
def wrap_recv(header, body):
callback(body)
self._on_recv = wrap_recv
def remove_message_timeout(self, message_id):
if message_id not in self.send_timeout_map:
return
timeout = self.send_timeout_map.pop(message_id)
self.io_loop.remove_timeout(timeout)
def timeout_message(self, message_id):
if message_id in self.send_timeout_map:
del self.send_timeout_map[message_id]
if message_id in self.send_future_map:
self.send_future_map.pop(message_id).set_exception(
SaltReqTimeoutError('Message timed out')
)
def send(self, msg, timeout=None, callback=None, raw=False):
'''
Send given message, and return a future
'''
message_id = self._message_id()
header = {'mid': message_id}
future = tornado.concurrent.Future()
if callback is not None:
def handle_future(future):
response = future.result()
self.io_loop.add_callback(callback, response)
future.add_done_callback(handle_future)
# Add this future to the mapping
self.send_future_map[message_id] = future
if self.opts.get('detect_mode') is True:
timeout = 1
if timeout is not None:
send_timeout = self.io_loop.call_later(timeout, self.timeout_message, message_id)
self.send_timeout_map[message_id] = send_timeout
# if we don't have a send queue, we need to spawn the callback to do the sending
if len(self.send_queue) == 0:
self.io_loop.spawn_callback(self._stream_send)
self.send_queue.append((message_id, salt.transport.frame.frame_msg(msg, header=header)))
return future
class Subscriber(object):
'''
Client object for use with the TCP publisher server
'''
def __init__(self, stream, address):
self.stream = stream
self.address = address
self._closing = False
self._read_until_future = None
self.id_ = None
def close(self):
if self._closing:
return
self._closing = True
if not self.stream.closed():
self.stream.close()
if self._read_until_future is not None:
# This will prevent this message from showing up:
# '[ERROR ] Future exception was never retrieved:
# StreamClosedError'
# This happens because the logic is always waiting to read
# the next message and the associated read future is marked
# 'StreamClosedError' when the stream is closed.
self._read_until_future.exc_info()
def __del__(self):
self.close()
class PubServer(tornado.tcpserver.TCPServer, object):
'''
TCP publisher
'''
def __init__(self, opts, io_loop=None):
super(PubServer, self).__init__(io_loop=io_loop, ssl_options=opts.get('ssl'))
self.opts = opts
self._closing = False
self.clients = set()
self.aes_funcs = salt.master.AESFuncs(self.opts)
self.present = {}
self.presence_events = False
if self.opts.get('presence_events', False):
tcp_only = True
for transport, _ in iter_transport_opts(self.opts):
if transport != 'tcp':
tcp_only = False
if tcp_only:
# Only when the transport is TCP only, the presence events will
# be handled here. Otherwise, it will be handled in the
# 'Maintenance' process.
self.presence_events = True
if self.presence_events:
self.event = salt.utils.event.get_event(
'master',
opts=self.opts,
listen=False
)
def close(self):
if self._closing:
return
self._closing = True
def __del__(self):
self.close()
def _add_client_present(self, client):
id_ = client.id_
if id_ in self.present:
clients = self.present[id_]
clients.add(client)
else:
self.present[id_] = set([client])
if self.presence_events:
data = {'new': [id_],
'lost': []}
self.event.fire_event(
data,
salt.utils.event.tagify('change', 'presence')
)
data = {'present': list(self.present.keys())}
self.event.fire_event(
data,
salt.utils.event.tagify('present', 'presence')
)
def _remove_client_present(self, client):
id_ = client.id_
if id_ is None or id_ not in self.present:
# This is possible if _remove_client_present() is invoked
# before the minion's id is validated.
return
clients = self.present[id_]
if client not in clients:
# Since _remove_client_present() is potentially called from
# _stream_read() and/or publish_payload(), it is possible for
# it to be called twice, in which case we will get here.
# This is not an abnormal case, so no logging is required.
return
clients.remove(client)
if len(clients) == 0:
del self.present[id_]
if self.presence_events:
data = {'new': [],
'lost': [id_]}
self.event.fire_event(
data,
salt.utils.event.tagify('change', 'presence')
)
data = {'present': list(self.present.keys())}
self.event.fire_event(
data,
salt.utils.event.tagify('present', 'presence')
)
@tornado.gen.coroutine
def _stream_read(self, client):
unpacker = msgpack.Unpacker()
while not self._closing:
try:
client._read_until_future = client.stream.read_bytes(4096, partial=True)
wire_bytes = yield client._read_until_future
unpacker.feed(wire_bytes)
for framed_msg in unpacker:
if six.PY3:
framed_msg = salt.transport.frame.decode_embedded_strs(
framed_msg
)
body = framed_msg['body']
if body['enc'] != 'aes':
# We only accept 'aes' encoded messages for 'id'
continue
crypticle = salt.crypt.Crypticle(self.opts, salt.master.SMaster.secrets['aes']['secret'].value)
load = crypticle.loads(body['load'])
if six.PY3:
load = salt.transport.frame.decode_embedded_strs(load)
if not self.aes_funcs.verify_minion(load['id'], load['tok']):
continue
client.id_ = load['id']
self._add_client_present(client)
except tornado.iostream.StreamClosedError as e:
log.debug('tcp stream to {0} closed, unable to recv'.format(client.address))
client.close()
self._remove_client_present(client)
self.clients.discard(client)
break
except Exception as e:
log.error('Exception parsing response', exc_info=True)
continue
def handle_stream(self, stream, address):
log.trace('Subscriber at {0} connected'.format(address))
client = Subscriber(stream, address)
self.clients.add(client)
self.io_loop.spawn_callback(self._stream_read, client)
# TODO: ACK the publish through IPC
@tornado.gen.coroutine
def publish_payload(self, package, _):
log.debug('TCP PubServer sending payload: {0}'.format(package))
payload = salt.transport.frame.frame_msg(package['payload'])
to_remove = []
if 'topic_lst' in package:
topic_lst = package['topic_lst']
for topic in topic_lst:
if topic in self.present:
# This will rarely be a list of more than 1 item. It will
# be more than 1 item if the minion disconnects from the
# master in an unclean manner (eg cable yank), then
# restarts and the master is yet to detect the disconnect
# via TCP keep-alive.
for client in self.present[topic]:
try:
# Write the packed str
f = client.stream.write(payload)
self.io_loop.add_future(f, lambda f: True)
except tornado.iostream.StreamClosedError:
to_remove.append(client)
else:
log.debug('Publish target {0} not connected'.format(topic))
else:
for client in self.clients:
try:
# Write the packed str
f = client.stream.write(payload)
self.io_loop.add_future(f, lambda f: True)
except tornado.iostream.StreamClosedError:
to_remove.append(client)
for client in to_remove:
log.debug('Subscriber at {0} has disconnected from publisher'.format(client.address))
client.close()
self._remove_client_present(client)
self.clients.discard(client)
log.trace('TCP PubServer finished publishing payload')
class TCPPubServerChannel(salt.transport.server.PubServerChannel):
# TODO: opts!
# Based on default used in tornado.netutil.bind_sockets()
backlog = 128
def __init__(self, opts):
self.opts = opts
self.serial = salt.payload.Serial(self.opts) # TODO: in init?
self.io_loop = None
def __setstate__(self, state):
salt.master.SMaster.secrets = state['secrets']
self.__init__(state['opts'])
def __getstate__(self):
return {'opts': self.opts,
'secrets': salt.master.SMaster.secrets}
def _publish_daemon(self, log_queue=None):
'''
Bind to the interface specified in the configuration file
'''
salt.utils.appendproctitle(self.__class__.__name__)
if log_queue is not None:
salt.log.setup.set_multiprocessing_logging_queue(log_queue)
salt.log.setup.setup_multiprocessing_logging(log_queue)
# Check if io_loop was set outside
if self.io_loop is None:
self.io_loop = tornado.ioloop.IOLoop.current()
# Spin up the publisher
pub_server = PubServer(self.opts, io_loop=self.io_loop)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
_set_tcp_keepalive(sock, self.opts)
sock.setblocking(0)
sock.bind((self.opts['interface'], int(self.opts['publish_port'])))
sock.listen(self.backlog)
# pub_server will take ownership of the socket
pub_server.add_socket(sock)
# Set up Salt IPC server
if self.opts.get('ipc_mode', '') == 'tcp':
pull_uri = int(self.opts.get('tcp_master_publish_pull', 4514))
else:
pull_uri = os.path.join(self.opts['sock_dir'], 'publish_pull.ipc')
pull_sock = salt.transport.ipc.IPCMessageServer(
pull_uri,
io_loop=self.io_loop,
payload_handler=pub_server.publish_payload,
)
# Securely create socket
log.info('Starting the Salt Puller on {0}'.format(pull_uri))
old_umask = os.umask(0o177)
try:
pull_sock.start()
finally:
os.umask(old_umask)
# run forever
try:
self.io_loop.start()
except (KeyboardInterrupt, SystemExit):
salt.log.setup.shutdown_multiprocessing_logging()
def pre_fork(self, process_manager):
'''
Do anything necessary pre-fork. Since this is on the master side this will
primarily be used to create IPC channels and create our daemon process to
do the actual publishing
'''
kwargs = {}
if salt.utils.is_windows():
kwargs['log_queue'] = (
salt.log.setup.get_multiprocessing_logging_queue()
)
process_manager.add_process(self._publish_daemon, kwargs=kwargs)
def publish(self, load):
'''
Publish "load" to minions
'''
payload = {'enc': 'aes'}
crypticle = salt.crypt.Crypticle(self.opts, salt.master.SMaster.secrets['aes']['secret'].value)
payload['load'] = crypticle.dumps(load)
if self.opts['sign_pub_messages']:
master_pem_path = os.path.join(self.opts['pki_dir'], 'master.pem')
log.debug("Signing data packet")
payload['sig'] = salt.crypt.sign_message(master_pem_path, payload['load'])
# Use the Salt IPC server
if self.opts.get('ipc_mode', '') == 'tcp':
pull_uri = int(self.opts.get('tcp_master_publish_pull', 4514))
else:
pull_uri = os.path.join(self.opts['sock_dir'], 'publish_pull.ipc')
# TODO: switch to the actual async interface
#pub_sock = salt.transport.ipc.IPCMessageClient(self.opts, io_loop=self.io_loop)
pub_sock = salt.utils.async.SyncWrapper(
salt.transport.ipc.IPCMessageClient,
(pull_uri,)
)
pub_sock.connect()
int_payload = {'payload': self.serial.dumps(payload)}
# add some targeting stuff for lists only (for now)
if load['tgt_type'] == 'list':
int_payload['topic_lst'] = load['tgt']
# Send it over IPC!
pub_sock.send(int_payload)
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_2816_1 |
crossvul-python_data_bad_872_0 | # -*- test-case-name: twisted.web.test.test_newclient -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
An U{HTTP 1.1<http://www.w3.org/Protocols/rfc2616/rfc2616.html>} client.
The way to use the functionality provided by this module is to:
- Connect a L{HTTP11ClientProtocol} to an HTTP server
- Create a L{Request} with the appropriate data
- Pass the request to L{HTTP11ClientProtocol.request}
- The returned Deferred will fire with a L{Response} object
- Create a L{IProtocol} provider which can handle the response body
- Connect it to the response with L{Response.deliverBody}
- When the protocol's C{connectionLost} method is called, the response is
complete. See L{Response.deliverBody} for details.
Various other classes in this module support this usage:
- HTTPParser is the basic HTTP parser. It can handle the parts of HTTP which
are symmetric between requests and responses.
- HTTPClientParser extends HTTPParser to handle response-specific parts of
HTTP. One instance is created for each request to parse the corresponding
response.
"""
from __future__ import division, absolute_import
__metaclass__ = type
from zope.interface import implementer
from twisted.python.compat import networkString
from twisted.python.components import proxyForInterface
from twisted.python.reflect import fullyQualifiedName
from twisted.python.failure import Failure
from twisted.internet.interfaces import IConsumer, IPushProducer
from twisted.internet.error import ConnectionDone
from twisted.internet.defer import Deferred, succeed, fail, maybeDeferred
from twisted.internet.defer import CancelledError
from twisted.internet.protocol import Protocol
from twisted.protocols.basic import LineReceiver
from twisted.web.iweb import UNKNOWN_LENGTH, IResponse, IClientRequest
from twisted.web.http_headers import Headers
from twisted.web.http import NO_CONTENT, NOT_MODIFIED
from twisted.web.http import _DataLoss, PotentialDataLoss
from twisted.web.http import _IdentityTransferDecoder, _ChunkedTransferDecoder
from twisted.logger import Logger
# States HTTPParser can be in
STATUS = u'STATUS'
HEADER = u'HEADER'
BODY = u'BODY'
DONE = u'DONE'
_moduleLog = Logger()
class BadHeaders(Exception):
"""
Headers passed to L{Request} were in some way invalid.
"""
class ExcessWrite(Exception):
"""
The body L{IBodyProducer} for a request tried to write data after
indicating it had finished writing data.
"""
class ParseError(Exception):
"""
Some received data could not be parsed.
@ivar data: The string which could not be parsed.
"""
def __init__(self, reason, data):
Exception.__init__(self, reason, data)
self.data = data
class BadResponseVersion(ParseError):
"""
The version string in a status line was unparsable.
"""
class _WrapperException(Exception):
"""
L{_WrapperException} is the base exception type for exceptions which
include one or more other exceptions as the low-level causes.
@ivar reasons: A L{list} of one or more L{Failure} instances encountered
during an HTTP request. See subclass documentation for more details.
"""
def __init__(self, reasons):
Exception.__init__(self, reasons)
self.reasons = reasons
class RequestGenerationFailed(_WrapperException):
"""
There was an error while creating the bytes which make up a request.
@ivar reasons: A C{list} of one or more L{Failure} instances giving the
reasons the request generation was considered to have failed.
"""
class RequestTransmissionFailed(_WrapperException):
"""
There was an error while sending the bytes which make up a request.
@ivar reasons: A C{list} of one or more L{Failure} instances giving the
reasons the request transmission was considered to have failed.
"""
class ConnectionAborted(Exception):
"""
The connection was explicitly aborted by application code.
"""
class WrongBodyLength(Exception):
"""
An L{IBodyProducer} declared the number of bytes it was going to
produce (via its C{length} attribute) and then produced a different number
of bytes.
"""
class ResponseDone(Exception):
"""
L{ResponseDone} may be passed to L{IProtocol.connectionLost} on the
protocol passed to L{Response.deliverBody} and indicates that the entire
response has been delivered.
"""
class ResponseFailed(_WrapperException):
"""
L{ResponseFailed} indicates that all of the response to a request was not
received for some reason.
@ivar reasons: A C{list} of one or more L{Failure} instances giving the
reasons the response was considered to have failed.
@ivar response: If specified, the L{Response} received from the server (and
in particular the status code and the headers).
"""
def __init__(self, reasons, response=None):
_WrapperException.__init__(self, reasons)
self.response = response
class ResponseNeverReceived(ResponseFailed):
"""
A L{ResponseFailed} that knows no response bytes at all have been received.
"""
class RequestNotSent(Exception):
"""
L{RequestNotSent} indicates that an attempt was made to issue a request but
for reasons unrelated to the details of the request itself, the request
could not be sent. For example, this may indicate that an attempt was made
to send a request using a protocol which is no longer connected to a
server.
"""
def _callAppFunction(function):
"""
Call C{function}. If it raises an exception, log it with a minimal
description of the source.
@return: L{None}
"""
try:
function()
except:
_moduleLog.failure(
u"Unexpected exception from {name}",
name=fullyQualifiedName(function)
)
class HTTPParser(LineReceiver):
"""
L{HTTPParser} handles the parsing side of HTTP processing. With a suitable
subclass, it can parse either the client side or the server side of the
connection.
@ivar headers: All of the non-connection control message headers yet
received.
@ivar state: State indicator for the response parsing state machine. One
of C{STATUS}, C{HEADER}, C{BODY}, C{DONE}.
@ivar _partialHeader: L{None} or a C{list} of the lines of a multiline
header while that header is being received.
"""
# NOTE: According to HTTP spec, we're supposed to eat the
# 'Proxy-Authenticate' and 'Proxy-Authorization' headers also, but that
# doesn't sound like a good idea to me, because it makes it impossible to
# have a non-authenticating transparent proxy in front of an authenticating
# proxy. An authenticating proxy can eat them itself. -jknight
#
# Further, quoting
# http://homepages.tesco.net/J.deBoynePollard/FGA/web-proxy-connection-header.html
# regarding the 'Proxy-Connection' header:
#
# The Proxy-Connection: header is a mistake in how some web browsers
# use HTTP. Its name is the result of a false analogy. It is not a
# standard part of the protocol. There is a different standard
# protocol mechanism for doing what it does. And its existence
# imposes a requirement upon HTTP servers such that no proxy HTTP
# server can be standards-conforming in practice.
#
# -exarkun
# Some servers (like http://news.ycombinator.com/) return status lines and
# HTTP headers delimited by \n instead of \r\n.
delimiter = b'\n'
CONNECTION_CONTROL_HEADERS = set([
b'content-length', b'connection', b'keep-alive', b'te',
b'trailers', b'transfer-encoding', b'upgrade',
b'proxy-connection'])
def connectionMade(self):
self.headers = Headers()
self.connHeaders = Headers()
self.state = STATUS
self._partialHeader = None
def switchToBodyMode(self, decoder):
"""
Switch to body parsing mode - interpret any more bytes delivered as
part of the message body and deliver them to the given decoder.
"""
if self.state == BODY:
raise RuntimeError(u"already in body mode")
self.bodyDecoder = decoder
self.state = BODY
self.setRawMode()
def lineReceived(self, line):
"""
Handle one line from a response.
"""
# Handle the normal CR LF case.
if line[-1:] == b'\r':
line = line[:-1]
if self.state == STATUS:
self.statusReceived(line)
self.state = HEADER
elif self.state == HEADER:
if not line or line[0] not in b' \t':
if self._partialHeader is not None:
header = b''.join(self._partialHeader)
name, value = header.split(b':', 1)
value = value.strip()
self.headerReceived(name, value)
if not line:
# Empty line means the header section is over.
self.allHeadersReceived()
else:
# Line not beginning with LWS is another header.
self._partialHeader = [line]
else:
# A line beginning with LWS is a continuation of a header
# begun on a previous line.
self._partialHeader.append(line)
def rawDataReceived(self, data):
"""
Pass data from the message body to the body decoder object.
"""
self.bodyDecoder.dataReceived(data)
def isConnectionControlHeader(self, name):
"""
Return C{True} if the given lower-cased name is the name of a
connection control header (rather than an entity header).
According to RFC 2616, section 14.10, the tokens in the Connection
header are probably relevant here. However, I am not sure what the
practical consequences of either implementing or ignoring that are.
So I leave it unimplemented for the time being.
"""
return name in self.CONNECTION_CONTROL_HEADERS
def statusReceived(self, status):
"""
Callback invoked whenever the first line of a new message is received.
Override this.
@param status: The first line of an HTTP request or response message
without trailing I{CR LF}.
@type status: C{bytes}
"""
def headerReceived(self, name, value):
"""
Store the given header in C{self.headers}.
"""
name = name.lower()
if self.isConnectionControlHeader(name):
headers = self.connHeaders
else:
headers = self.headers
headers.addRawHeader(name, value)
def allHeadersReceived(self):
"""
Callback invoked after the last header is passed to C{headerReceived}.
Override this to change to the C{BODY} or C{DONE} state.
"""
self.switchToBodyMode(None)
class HTTPClientParser(HTTPParser):
"""
An HTTP parser which only handles HTTP responses.
@ivar request: The request with which the expected response is associated.
@type request: L{Request}
@ivar NO_BODY_CODES: A C{set} of response codes which B{MUST NOT} have a
body.
@ivar finisher: A callable to invoke when this response is fully parsed.
@ivar _responseDeferred: A L{Deferred} which will be called back with the
response when all headers in the response have been received.
Thereafter, L{None}.
@ivar _everReceivedData: C{True} if any bytes have been received.
"""
NO_BODY_CODES = set([NO_CONTENT, NOT_MODIFIED])
_transferDecoders = {
b'chunked': _ChunkedTransferDecoder,
}
bodyDecoder = None
_log = Logger()
def __init__(self, request, finisher):
self.request = request
self.finisher = finisher
self._responseDeferred = Deferred()
self._everReceivedData = False
def dataReceived(self, data):
"""
Override so that we know if any response has been received.
"""
self._everReceivedData = True
HTTPParser.dataReceived(self, data)
def parseVersion(self, strversion):
"""
Parse version strings of the form Protocol '/' Major '.' Minor. E.g.
b'HTTP/1.1'. Returns (protocol, major, minor). Will raise ValueError
on bad syntax.
"""
try:
proto, strnumber = strversion.split(b'/')
major, minor = strnumber.split(b'.')
major, minor = int(major), int(minor)
except ValueError as e:
raise BadResponseVersion(str(e), strversion)
if major < 0 or minor < 0:
raise BadResponseVersion(u"version may not be negative",
strversion)
return (proto, major, minor)
def statusReceived(self, status):
"""
Parse the status line into its components and create a response object
to keep track of this response's state.
"""
parts = status.split(b' ', 2)
if len(parts) == 2:
# Some broken servers omit the required `phrase` portion of
# `status-line`. One such server identified as
# "cloudflare-nginx". Others fail to identify themselves
# entirely. Fill in an empty phrase for such cases.
version, codeBytes = parts
phrase = b""
elif len(parts) == 3:
version, codeBytes, phrase = parts
else:
raise ParseError(u"wrong number of parts", status)
try:
statusCode = int(codeBytes)
except ValueError:
raise ParseError(u"non-integer status code", status)
self.response = Response._construct(
self.parseVersion(version),
statusCode,
phrase,
self.headers,
self.transport,
self.request,
)
def _finished(self, rest):
"""
Called to indicate that an entire response has been received. No more
bytes will be interpreted by this L{HTTPClientParser}. Extra bytes are
passed up and the state of this L{HTTPClientParser} is set to I{DONE}.
@param rest: A C{bytes} giving any extra bytes delivered to this
L{HTTPClientParser} which are not part of the response being
parsed.
"""
self.state = DONE
self.finisher(rest)
def isConnectionControlHeader(self, name):
"""
Content-Length in the response to a HEAD request is an entity header,
not a connection control header.
"""
if self.request.method == b'HEAD' and name == b'content-length':
return False
return HTTPParser.isConnectionControlHeader(self, name)
def allHeadersReceived(self):
"""
Figure out how long the response body is going to be by examining
headers and stuff.
"""
if 100 <= self.response.code < 200:
# RFC 7231 Section 6.2 says that if we receive a 1XX status code
# and aren't expecting it, we MAY ignore it. That's what we're
# going to do. We reset the parser here, but we leave
# _everReceivedData in its True state because we have, in fact,
# received data.
self._log.info(
"Ignoring unexpected {code} response",
code=self.response.code
)
self.connectionMade()
del self.response
return
if (self.response.code in self.NO_BODY_CODES
or self.request.method == b'HEAD'):
self.response.length = 0
# The order of the next two lines might be of interest when adding
# support for pipelining.
self._finished(self.clearLineBuffer())
self.response._bodyDataFinished()
else:
transferEncodingHeaders = self.connHeaders.getRawHeaders(
b'transfer-encoding')
if transferEncodingHeaders:
# This could be a KeyError. However, that would mean we do not
# know how to decode the response body, so failing the request
# is as good a behavior as any. Perhaps someday we will want
# to normalize/document/test this specifically, but failing
# seems fine to me for now.
transferDecoder = self._transferDecoders[transferEncodingHeaders[0].lower()]
# If anyone ever invents a transfer encoding other than
# chunked (yea right), and that transfer encoding can predict
# the length of the response body, it might be sensible to
# allow the transfer decoder to set the response object's
# length attribute.
else:
contentLengthHeaders = self.connHeaders.getRawHeaders(
b'content-length')
if contentLengthHeaders is None:
contentLength = None
elif len(contentLengthHeaders) == 1:
contentLength = int(contentLengthHeaders[0])
self.response.length = contentLength
else:
# "HTTP Message Splitting" or "HTTP Response Smuggling"
# potentially happening. Or it's just a buggy server.
raise ValueError(u"Too many Content-Length headers; "
u"response is invalid")
if contentLength == 0:
self._finished(self.clearLineBuffer())
transferDecoder = None
else:
transferDecoder = lambda x, y: _IdentityTransferDecoder(
contentLength, x, y)
if transferDecoder is None:
self.response._bodyDataFinished()
else:
# Make sure as little data as possible from the response body
# gets delivered to the response object until the response
# object actually indicates it is ready to handle bytes
# (probably because an application gave it a way to interpret
# them).
self.transport.pauseProducing()
self.switchToBodyMode(transferDecoder(
self.response._bodyDataReceived,
self._finished))
# This must be last. If it were first, then application code might
# change some state (for example, registering a protocol to receive the
# response body). Then the pauseProducing above would be wrong since
# the response is ready for bytes and nothing else would ever resume
# the transport.
self._responseDeferred.callback(self.response)
del self._responseDeferred
def connectionLost(self, reason):
if self.bodyDecoder is not None:
try:
try:
self.bodyDecoder.noMoreData()
except PotentialDataLoss:
self.response._bodyDataFinished(Failure())
except _DataLoss:
self.response._bodyDataFinished(
Failure(ResponseFailed([reason, Failure()],
self.response)))
else:
self.response._bodyDataFinished()
except:
# Handle exceptions from both the except suites and the else
# suite. Those functions really shouldn't raise exceptions,
# but maybe there's some buggy application code somewhere
# making things difficult.
self._log.failure('')
elif self.state != DONE:
if self._everReceivedData:
exceptionClass = ResponseFailed
else:
exceptionClass = ResponseNeverReceived
self._responseDeferred.errback(Failure(exceptionClass([reason])))
del self._responseDeferred
@implementer(IClientRequest)
class Request:
"""
A L{Request} instance describes an HTTP request to be sent to an HTTP
server.
@ivar method: See L{__init__}.
@ivar uri: See L{__init__}.
@ivar headers: See L{__init__}.
@ivar bodyProducer: See L{__init__}.
@ivar persistent: See L{__init__}.
@ivar _parsedURI: Parsed I{URI} for the request, or L{None}.
@type _parsedURI: L{twisted.web.client.URI} or L{None}
"""
_log = Logger()
def __init__(self, method, uri, headers, bodyProducer, persistent=False):
"""
@param method: The HTTP method for this request, ex: b'GET', b'HEAD',
b'POST', etc.
@type method: L{bytes}
@param uri: The relative URI of the resource to request. For example,
C{b'/foo/bar?baz=quux'}.
@type uri: L{bytes}
@param headers: Headers to be sent to the server. It is important to
note that this object does not create any implicit headers. So it
is up to the HTTP Client to add required headers such as 'Host'.
@type headers: L{twisted.web.http_headers.Headers}
@param bodyProducer: L{None} or an L{IBodyProducer} provider which
produces the content body to send to the remote HTTP server.
@param persistent: Set to C{True} when you use HTTP persistent
connection, defaults to C{False}.
@type persistent: L{bool}
"""
self.method = method
self.uri = uri
self.headers = headers
self.bodyProducer = bodyProducer
self.persistent = persistent
self._parsedURI = None
@classmethod
def _construct(cls, method, uri, headers, bodyProducer, persistent=False,
parsedURI=None):
"""
Private constructor.
@param method: See L{__init__}.
@param uri: See L{__init__}.
@param headers: See L{__init__}.
@param bodyProducer: See L{__init__}.
@param persistent: See L{__init__}.
@param parsedURI: See L{Request._parsedURI}.
@return: L{Request} instance.
"""
request = cls(method, uri, headers, bodyProducer, persistent)
request._parsedURI = parsedURI
return request
@property
def absoluteURI(self):
"""
The absolute URI of the request as C{bytes}, or L{None} if the
absolute URI cannot be determined.
"""
return getattr(self._parsedURI, 'toBytes', lambda: None)()
def _writeHeaders(self, transport, TEorCL):
hosts = self.headers.getRawHeaders(b'host', ())
if len(hosts) != 1:
raise BadHeaders(u"Exactly one Host header required")
# In the future, having the protocol version be a parameter to this
# method would probably be good. It would be nice if this method
# weren't limited to issuing HTTP/1.1 requests.
requestLines = []
requestLines.append(b' '.join([self.method, self.uri,
b'HTTP/1.1\r\n']))
if not self.persistent:
requestLines.append(b'Connection: close\r\n')
if TEorCL is not None:
requestLines.append(TEorCL)
for name, values in self.headers.getAllRawHeaders():
requestLines.extend([name + b': ' + v + b'\r\n' for v in values])
requestLines.append(b'\r\n')
transport.writeSequence(requestLines)
def _writeToBodyProducerChunked(self, transport):
"""
Write this request to the given transport using chunked
transfer-encoding to frame the body.
@param transport: See L{writeTo}.
@return: See L{writeTo}.
"""
self._writeHeaders(transport, b'Transfer-Encoding: chunked\r\n')
encoder = ChunkedEncoder(transport)
encoder.registerProducer(self.bodyProducer, True)
d = self.bodyProducer.startProducing(encoder)
def cbProduced(ignored):
encoder.unregisterProducer()
def ebProduced(err):
encoder._allowNoMoreWrites()
# Don't call the encoder's unregisterProducer because it will write
# a zero-length chunk. This would indicate to the server that the
# request body is complete. There was an error, though, so we
# don't want to do that.
transport.unregisterProducer()
return err
d.addCallbacks(cbProduced, ebProduced)
return d
def _writeToBodyProducerContentLength(self, transport):
"""
Write this request to the given transport using content-length to frame
the body.
@param transport: See L{writeTo}.
@return: See L{writeTo}.
"""
self._writeHeaders(
transport,
networkString(
'Content-Length: %d\r\n' % (self.bodyProducer.length,)))
# This Deferred is used to signal an error in the data written to the
# encoder below. It can only errback and it will only do so before too
# many bytes have been written to the encoder and before the producer
# Deferred fires.
finishedConsuming = Deferred()
# This makes sure the producer writes the correct number of bytes for
# the request body.
encoder = LengthEnforcingConsumer(
self.bodyProducer, transport, finishedConsuming)
transport.registerProducer(self.bodyProducer, True)
finishedProducing = self.bodyProducer.startProducing(encoder)
def combine(consuming, producing):
# This Deferred is returned and will be fired when the first of
# consuming or producing fires. If it's cancelled, forward that
# cancellation to the producer.
def cancelConsuming(ign):
finishedProducing.cancel()
ultimate = Deferred(cancelConsuming)
# Keep track of what has happened so far. This initially
# contains None, then an integer uniquely identifying what
# sequence of events happened. See the callbacks and errbacks
# defined below for the meaning of each value.
state = [None]
def ebConsuming(err):
if state == [None]:
# The consuming Deferred failed first. This means the
# overall writeTo Deferred is going to errback now. The
# producing Deferred should not fire later (because the
# consumer should have called stopProducing on the
# producer), but if it does, a callback will be ignored
# and an errback will be logged.
state[0] = 1
ultimate.errback(err)
else:
# The consuming Deferred errbacked after the producing
# Deferred fired. This really shouldn't ever happen.
# If it does, I goofed. Log the error anyway, just so
# there's a chance someone might notice and complain.
self._log.failure(
u"Buggy state machine in {request}/[{state}]: "
u"ebConsuming called",
failure=err,
request=repr(self),
state=state[0]
)
def cbProducing(result):
if state == [None]:
# The producing Deferred succeeded first. Nothing will
# ever happen to the consuming Deferred. Tell the
# encoder we're done so it can check what the producer
# wrote and make sure it was right.
state[0] = 2
try:
encoder._noMoreWritesExpected()
except:
# Fail the overall writeTo Deferred - something the
# producer did was wrong.
ultimate.errback()
else:
# Success - succeed the overall writeTo Deferred.
ultimate.callback(None)
# Otherwise, the consuming Deferred already errbacked. The
# producing Deferred wasn't supposed to fire, but it did
# anyway. It's buggy, but there's not really anything to be
# done about it. Just ignore this result.
def ebProducing(err):
if state == [None]:
# The producing Deferred failed first. This means the
# overall writeTo Deferred is going to errback now.
# Tell the encoder that we're done so it knows to reject
# further writes from the producer (which should not
# happen, but the producer may be buggy).
state[0] = 3
encoder._allowNoMoreWrites()
ultimate.errback(err)
else:
# The producing Deferred failed after the consuming
# Deferred failed. It shouldn't have, so it's buggy.
# Log the exception in case anyone who can fix the code
# is watching.
self._log.failure(u"Producer is buggy", failure=err)
consuming.addErrback(ebConsuming)
producing.addCallbacks(cbProducing, ebProducing)
return ultimate
d = combine(finishedConsuming, finishedProducing)
def f(passthrough):
# Regardless of what happens with the overall Deferred, once it
# fires, the producer registered way up above the definition of
# combine should be unregistered.
transport.unregisterProducer()
return passthrough
d.addBoth(f)
return d
def _writeToEmptyBodyContentLength(self, transport):
"""
Write this request to the given transport using content-length to frame
the (empty) body.
@param transport: See L{writeTo}.
@return: See L{writeTo}.
"""
self._writeHeaders(transport, b"Content-Length: 0\r\n")
return succeed(None)
def writeTo(self, transport):
"""
Format this L{Request} as an HTTP/1.1 request and write it to the given
transport. If bodyProducer is not None, it will be associated with an
L{IConsumer}.
@param transport: The transport to which to write.
@type transport: L{twisted.internet.interfaces.ITransport} provider
@return: A L{Deferred} which fires with L{None} when the request has
been completely written to the transport or with a L{Failure} if
there is any problem generating the request bytes.
"""
if self.bodyProducer is None:
# If the method semantics anticipate a body, include a
# Content-Length even if it is 0.
# https://tools.ietf.org/html/rfc7230#section-3.3.2
if self.method in (b"PUT", b"POST"):
self._writeToEmptyBodyContentLength(transport)
else:
self._writeHeaders(transport, None)
elif self.bodyProducer.length is UNKNOWN_LENGTH:
return self._writeToBodyProducerChunked(transport)
else:
return self._writeToBodyProducerContentLength(transport)
def stopWriting(self):
"""
Stop writing this request to the transport. This can only be called
after C{writeTo} and before the L{Deferred} returned by C{writeTo}
fires. It should cancel any asynchronous task started by C{writeTo}.
The L{Deferred} returned by C{writeTo} need not be fired if this method
is called.
"""
# If bodyProducer is None, then the Deferred returned by writeTo has
# fired already and this method cannot be called.
_callAppFunction(self.bodyProducer.stopProducing)
class LengthEnforcingConsumer:
"""
An L{IConsumer} proxy which enforces an exact length requirement on the
total data written to it.
@ivar _length: The number of bytes remaining to be written.
@ivar _producer: The L{IBodyProducer} which is writing to this
consumer.
@ivar _consumer: The consumer to which at most C{_length} bytes will be
forwarded.
@ivar _finished: A L{Deferred} which will be fired with a L{Failure} if too
many bytes are written to this consumer.
"""
def __init__(self, producer, consumer, finished):
self._length = producer.length
self._producer = producer
self._consumer = consumer
self._finished = finished
def _allowNoMoreWrites(self):
"""
Indicate that no additional writes are allowed. Attempts to write
after calling this method will be met with an exception.
"""
self._finished = None
def write(self, bytes):
"""
Write C{bytes} to the underlying consumer unless
C{_noMoreWritesExpected} has been called or there are/have been too
many bytes.
"""
if self._finished is None:
# No writes are supposed to happen any more. Try to convince the
# calling code to stop calling this method by calling its
# stopProducing method and then throwing an exception at it. This
# exception isn't documented as part of the API because you're
# never supposed to expect it: only buggy code will ever receive
# it.
self._producer.stopProducing()
raise ExcessWrite()
if len(bytes) <= self._length:
self._length -= len(bytes)
self._consumer.write(bytes)
else:
# No synchronous exception is raised in *this* error path because
# we still have _finished which we can use to report the error to a
# better place than the direct caller of this method (some
# arbitrary application code).
_callAppFunction(self._producer.stopProducing)
self._finished.errback(WrongBodyLength(u"too many bytes written"))
self._allowNoMoreWrites()
def _noMoreWritesExpected(self):
"""
Called to indicate no more bytes will be written to this consumer.
Check to see that the correct number have been written.
@raise WrongBodyLength: If not enough bytes have been written.
"""
if self._finished is not None:
self._allowNoMoreWrites()
if self._length:
raise WrongBodyLength(u"too few bytes written")
def makeStatefulDispatcher(name, template):
"""
Given a I{dispatch} name and a function, return a function which can be
used as a method and which, when called, will call another method defined
on the instance and return the result. The other method which is called is
determined by the value of the C{_state} attribute of the instance.
@param name: A string which is used to construct the name of the subsidiary
method to invoke. The subsidiary method is named like C{'_%s_%s' %
(name, _state)}.
@param template: A function object which is used to give the returned
function a docstring.
@return: The dispatcher function.
"""
def dispatcher(self, *args, **kwargs):
func = getattr(self, '_' + name + '_' + self._state, None)
if func is None:
raise RuntimeError(
u"%r has no %s method in state %s" % (self, name, self._state))
return func(*args, **kwargs)
dispatcher.__doc__ = template.__doc__
return dispatcher
# This proxy class is used only in the private constructor of the Response
# class below, in order to prevent users relying on any property of the
# concrete request object: they can only use what is provided by
# IClientRequest.
_ClientRequestProxy = proxyForInterface(IClientRequest)
@implementer(IResponse)
class Response:
"""
A L{Response} instance describes an HTTP response received from an HTTP
server.
L{Response} should not be subclassed or instantiated.
@ivar _transport: See L{__init__}.
@ivar _bodyProtocol: The L{IProtocol} provider to which the body is
delivered. L{None} before one has been registered with
C{deliverBody}.
@ivar _bodyBuffer: A C{list} of the strings passed to C{bodyDataReceived}
before C{deliverBody} is called. L{None} afterwards.
@ivar _state: Indicates what state this L{Response} instance is in,
particularly with respect to delivering bytes from the response body
to an application-supplied protocol object. This may be one of
C{'INITIAL'}, C{'CONNECTED'}, C{'DEFERRED_CLOSE'}, or C{'FINISHED'},
with the following meanings:
- INITIAL: This is the state L{Response} objects start in. No
protocol has yet been provided and the underlying transport may
still have bytes to deliver to it.
- DEFERRED_CLOSE: If the underlying transport indicates all bytes
have been delivered but no application-provided protocol is yet
available, the L{Response} moves to this state. Data is
buffered and waiting for a protocol to be delivered to.
- CONNECTED: If a protocol is provided when the state is INITIAL,
the L{Response} moves to this state. Any buffered data is
delivered and any data which arrives from the transport
subsequently is given directly to the protocol.
- FINISHED: If a protocol is provided in the DEFERRED_CLOSE state,
the L{Response} moves to this state after delivering all
buffered data to the protocol. Otherwise, if the L{Response} is
in the CONNECTED state, if the transport indicates there is no
more data, the L{Response} moves to this state. Nothing else
can happen once the L{Response} is in this state.
@type _state: C{str}
"""
length = UNKNOWN_LENGTH
_bodyProtocol = None
_bodyFinished = False
def __init__(self, version, code, phrase, headers, _transport):
"""
@param version: HTTP version components protocol, major, minor. E.g.
C{(b'HTTP', 1, 1)} to mean C{b'HTTP/1.1'}.
@param code: HTTP status code.
@type code: L{int}
@param phrase: HTTP reason phrase, intended to give a short description
of the HTTP status code.
@param headers: HTTP response headers.
@type headers: L{twisted.web.http_headers.Headers}
@param _transport: The transport which is delivering this response.
"""
self.version = version
self.code = code
self.phrase = phrase
self.headers = headers
self._transport = _transport
self._bodyBuffer = []
self._state = 'INITIAL'
self.request = None
self.previousResponse = None
@classmethod
def _construct(cls, version, code, phrase, headers, _transport, request):
"""
Private constructor.
@param version: See L{__init__}.
@param code: See L{__init__}.
@param phrase: See L{__init__}.
@param headers: See L{__init__}.
@param _transport: See L{__init__}.
@param request: See L{IResponse.request}.
@return: L{Response} instance.
"""
response = Response(version, code, phrase, headers, _transport)
response.request = _ClientRequestProxy(request)
return response
def setPreviousResponse(self, previousResponse):
self.previousResponse = previousResponse
def deliverBody(self, protocol):
"""
Dispatch the given L{IProtocol} depending of the current state of the
response.
"""
deliverBody = makeStatefulDispatcher('deliverBody', deliverBody)
def _deliverBody_INITIAL(self, protocol):
"""
Deliver any buffered data to C{protocol} and prepare to deliver any
future data to it. Move to the C{'CONNECTED'} state.
"""
protocol.makeConnection(self._transport)
self._bodyProtocol = protocol
for data in self._bodyBuffer:
self._bodyProtocol.dataReceived(data)
self._bodyBuffer = None
self._state = 'CONNECTED'
# Now that there's a protocol to consume the body, resume the
# transport. It was previously paused by HTTPClientParser to avoid
# reading too much data before it could be handled. We need to do this
# after we transition our state as it may recursively lead to more data
# being delivered, or even the body completing.
self._transport.resumeProducing()
def _deliverBody_CONNECTED(self, protocol):
"""
It is invalid to attempt to deliver data to a protocol when it is
already being delivered to another protocol.
"""
raise RuntimeError(
u"Response already has protocol %r, cannot deliverBody "
u"again" % (self._bodyProtocol,))
def _deliverBody_DEFERRED_CLOSE(self, protocol):
"""
Deliver any buffered data to C{protocol} and then disconnect the
protocol. Move to the C{'FINISHED'} state.
"""
# Unlike _deliverBody_INITIAL, there is no need to resume the
# transport here because all of the response data has been received
# already. Some higher level code may want to resume the transport if
# that code expects further data to be received over it.
protocol.makeConnection(self._transport)
for data in self._bodyBuffer:
protocol.dataReceived(data)
self._bodyBuffer = None
protocol.connectionLost(self._reason)
self._state = 'FINISHED'
def _deliverBody_FINISHED(self, protocol):
"""
It is invalid to attempt to deliver data to a protocol after the
response body has been delivered to another protocol.
"""
raise RuntimeError(
u"Response already finished, cannot deliverBody now.")
def _bodyDataReceived(self, data):
"""
Called by HTTPClientParser with chunks of data from the response body.
They will be buffered or delivered to the protocol passed to
deliverBody.
"""
_bodyDataReceived = makeStatefulDispatcher('bodyDataReceived',
_bodyDataReceived)
def _bodyDataReceived_INITIAL(self, data):
"""
Buffer any data received for later delivery to a protocol passed to
C{deliverBody}.
Little or no data should be buffered by this method, since the
transport has been paused and will not be resumed until a protocol
is supplied.
"""
self._bodyBuffer.append(data)
def _bodyDataReceived_CONNECTED(self, data):
"""
Deliver any data received to the protocol to which this L{Response}
is connected.
"""
self._bodyProtocol.dataReceived(data)
def _bodyDataReceived_DEFERRED_CLOSE(self, data):
"""
It is invalid for data to be delivered after it has been indicated
that the response body has been completely delivered.
"""
raise RuntimeError(u"Cannot receive body data after _bodyDataFinished")
def _bodyDataReceived_FINISHED(self, data):
"""
It is invalid for data to be delivered after the response body has
been delivered to a protocol.
"""
raise RuntimeError(u"Cannot receive body data after "
u"protocol disconnected")
def _bodyDataFinished(self, reason=None):
"""
Called by HTTPClientParser when no more body data is available. If the
optional reason is supplied, this indicates a problem or potential
problem receiving all of the response body.
"""
_bodyDataFinished = makeStatefulDispatcher('bodyDataFinished',
_bodyDataFinished)
def _bodyDataFinished_INITIAL(self, reason=None):
"""
Move to the C{'DEFERRED_CLOSE'} state to wait for a protocol to
which to deliver the response body.
"""
self._state = 'DEFERRED_CLOSE'
if reason is None:
reason = Failure(ResponseDone(u"Response body fully received"))
self._reason = reason
def _bodyDataFinished_CONNECTED(self, reason=None):
"""
Disconnect the protocol and move to the C{'FINISHED'} state.
"""
if reason is None:
reason = Failure(ResponseDone(u"Response body fully received"))
self._bodyProtocol.connectionLost(reason)
self._bodyProtocol = None
self._state = 'FINISHED'
def _bodyDataFinished_DEFERRED_CLOSE(self):
"""
It is invalid to attempt to notify the L{Response} of the end of the
response body data more than once.
"""
raise RuntimeError(u"Cannot finish body data more than once")
def _bodyDataFinished_FINISHED(self):
"""
It is invalid to attempt to notify the L{Response} of the end of the
response body data more than once.
"""
raise RuntimeError(u"Cannot finish body data after "
u"protocol disconnected")
@implementer(IConsumer)
class ChunkedEncoder:
"""
Helper object which exposes L{IConsumer} on top of L{HTTP11ClientProtocol}
for streaming request bodies to the server.
"""
def __init__(self, transport):
self.transport = transport
def _allowNoMoreWrites(self):
"""
Indicate that no additional writes are allowed. Attempts to write
after calling this method will be met with an exception.
"""
self.transport = None
def registerProducer(self, producer, streaming):
"""
Register the given producer with C{self.transport}.
"""
self.transport.registerProducer(producer, streaming)
def write(self, data):
"""
Write the given request body bytes to the transport using chunked
encoding.
@type data: C{bytes}
"""
if self.transport is None:
raise ExcessWrite()
self.transport.writeSequence((networkString("%x\r\n" % len(data)),
data, b"\r\n"))
def unregisterProducer(self):
"""
Indicate that the request body is complete and finish the request.
"""
self.write(b'')
self.transport.unregisterProducer()
self._allowNoMoreWrites()
@implementer(IPushProducer)
class TransportProxyProducer:
"""
An L{twisted.internet.interfaces.IPushProducer} implementation which
wraps another such thing and proxies calls to it until it is told to stop.
@ivar _producer: The wrapped L{twisted.internet.interfaces.IPushProducer}
provider or L{None} after this proxy has been stopped.
"""
# LineReceiver uses this undocumented attribute of transports to decide
# when to stop calling lineReceived or rawDataReceived (if it finds it to
# be true, it doesn't bother to deliver any more data). Set disconnecting
# to False here and never change it to true so that all data is always
# delivered to us and so that LineReceiver doesn't fail with an
# AttributeError.
disconnecting = False
def __init__(self, producer):
self._producer = producer
def stopProxying(self):
"""
Stop forwarding calls of L{twisted.internet.interfaces.IPushProducer}
methods to the underlying L{twisted.internet.interfaces.IPushProducer}
provider.
"""
self._producer = None
def stopProducing(self):
"""
Proxy the stoppage to the underlying producer, unless this proxy has
been stopped.
"""
if self._producer is not None:
self._producer.stopProducing()
def resumeProducing(self):
"""
Proxy the resumption to the underlying producer, unless this proxy has
been stopped.
"""
if self._producer is not None:
self._producer.resumeProducing()
def pauseProducing(self):
"""
Proxy the pause to the underlying producer, unless this proxy has been
stopped.
"""
if self._producer is not None:
self._producer.pauseProducing()
def loseConnection(self):
"""
Proxy the request to lose the connection to the underlying producer,
unless this proxy has been stopped.
"""
if self._producer is not None:
self._producer.loseConnection()
class HTTP11ClientProtocol(Protocol):
"""
L{HTTP11ClientProtocol} is an implementation of the HTTP 1.1 client
protocol. It supports as few features as possible.
@ivar _parser: After a request is issued, the L{HTTPClientParser} to
which received data making up the response to that request is
delivered.
@ivar _finishedRequest: After a request is issued, the L{Deferred} which
will fire when a L{Response} object corresponding to that request is
available. This allows L{HTTP11ClientProtocol} to fail the request
if there is a connection or parsing problem.
@ivar _currentRequest: After a request is issued, the L{Request}
instance used to make that request. This allows
L{HTTP11ClientProtocol} to stop request generation if necessary (for
example, if the connection is lost).
@ivar _transportProxy: After a request is issued, the
L{TransportProxyProducer} to which C{_parser} is connected. This
allows C{_parser} to pause and resume the transport in a way which
L{HTTP11ClientProtocol} can exert some control over.
@ivar _responseDeferred: After a request is issued, the L{Deferred} from
C{_parser} which will fire with a L{Response} when one has been
received. This is eventually chained with C{_finishedRequest}, but
only in certain cases to avoid double firing that Deferred.
@ivar _state: Indicates what state this L{HTTP11ClientProtocol} instance
is in with respect to transmission of a request and reception of a
response. This may be one of the following strings:
- QUIESCENT: This is the state L{HTTP11ClientProtocol} instances
start in. Nothing is happening: no request is being sent and no
response is being received or expected.
- TRANSMITTING: When a request is made (via L{request}), the
instance moves to this state. L{Request.writeTo} has been used
to start to send a request but it has not yet finished.
- TRANSMITTING_AFTER_RECEIVING_RESPONSE: The server has returned a
complete response but the request has not yet been fully sent
yet. The instance will remain in this state until the request
is fully sent.
- GENERATION_FAILED: There was an error while the request. The
request was not fully sent to the network.
- WAITING: The request was fully sent to the network. The
instance is now waiting for the response to be fully received.
- ABORTING: Application code has requested that the HTTP connection
be aborted.
- CONNECTION_LOST: The connection has been lost.
@type _state: C{str}
@ivar _abortDeferreds: A list of C{Deferred} instances that will fire when
the connection is lost.
"""
_state = 'QUIESCENT'
_parser = None
_finishedRequest = None
_currentRequest = None
_transportProxy = None
_responseDeferred = None
_log = Logger()
def __init__(self, quiescentCallback=lambda c: None):
self._quiescentCallback = quiescentCallback
self._abortDeferreds = []
@property
def state(self):
return self._state
def request(self, request):
"""
Issue C{request} over C{self.transport} and return a L{Deferred} which
will fire with a L{Response} instance or an error.
@param request: The object defining the parameters of the request to
issue.
@type request: L{Request}
@rtype: L{Deferred}
@return: The deferred may errback with L{RequestGenerationFailed} if
the request was not fully written to the transport due to a local
error. It may errback with L{RequestTransmissionFailed} if it was
not fully written to the transport due to a network error. It may
errback with L{ResponseFailed} if the request was sent (not
necessarily received) but some or all of the response was lost. It
may errback with L{RequestNotSent} if it is not possible to send
any more requests using this L{HTTP11ClientProtocol}.
"""
if self._state != 'QUIESCENT':
return fail(RequestNotSent())
self._state = 'TRANSMITTING'
_requestDeferred = maybeDeferred(request.writeTo, self.transport)
def cancelRequest(ign):
# Explicitly cancel the request's deferred if it's still trying to
# write when this request is cancelled.
if self._state in (
'TRANSMITTING', 'TRANSMITTING_AFTER_RECEIVING_RESPONSE'):
_requestDeferred.cancel()
else:
self.transport.abortConnection()
self._disconnectParser(Failure(CancelledError()))
self._finishedRequest = Deferred(cancelRequest)
# Keep track of the Request object in case we need to call stopWriting
# on it.
self._currentRequest = request
self._transportProxy = TransportProxyProducer(self.transport)
self._parser = HTTPClientParser(request, self._finishResponse)
self._parser.makeConnection(self._transportProxy)
self._responseDeferred = self._parser._responseDeferred
def cbRequestWritten(ignored):
if self._state == 'TRANSMITTING':
self._state = 'WAITING'
self._responseDeferred.chainDeferred(self._finishedRequest)
def ebRequestWriting(err):
if self._state == 'TRANSMITTING':
self._state = 'GENERATION_FAILED'
self.transport.abortConnection()
self._finishedRequest.errback(
Failure(RequestGenerationFailed([err])))
else:
self._log.failure(
u'Error writing request, but not in valid state '
u'to finalize request: {state}',
failure=err,
state=self._state
)
_requestDeferred.addCallbacks(cbRequestWritten, ebRequestWriting)
return self._finishedRequest
def _finishResponse(self, rest):
"""
Called by an L{HTTPClientParser} to indicate that it has parsed a
complete response.
@param rest: A C{bytes} giving any trailing bytes which were given to
the L{HTTPClientParser} which were not part of the response it
was parsing.
"""
_finishResponse = makeStatefulDispatcher('finishResponse', _finishResponse)
def _finishResponse_WAITING(self, rest):
# Currently the rest parameter is ignored. Don't forget to use it if
# we ever add support for pipelining. And maybe check what trailers
# mean.
if self._state == 'WAITING':
self._state = 'QUIESCENT'
else:
# The server sent the entire response before we could send the
# whole request. That sucks. Oh well. Fire the request()
# Deferred with the response. But first, make sure that if the
# request does ever finish being written that it won't try to fire
# that Deferred.
self._state = 'TRANSMITTING_AFTER_RECEIVING_RESPONSE'
self._responseDeferred.chainDeferred(self._finishedRequest)
# This will happen if we're being called due to connection being lost;
# if so, no need to disconnect parser again, or to call
# _quiescentCallback.
if self._parser is None:
return
reason = ConnectionDone(u"synthetic!")
connHeaders = self._parser.connHeaders.getRawHeaders(b'connection', ())
if ((b'close' in connHeaders) or self._state != "QUIESCENT" or
not self._currentRequest.persistent):
self._giveUp(Failure(reason))
else:
# Just in case we had paused the transport, resume it before
# considering it quiescent again.
self.transport.resumeProducing()
# We call the quiescent callback first, to ensure connection gets
# added back to connection pool before we finish the request.
try:
self._quiescentCallback(self)
except:
# If callback throws exception, just log it and disconnect;
# keeping persistent connections around is an optimisation:
self._log.failure('')
self.transport.loseConnection()
self._disconnectParser(reason)
_finishResponse_TRANSMITTING = _finishResponse_WAITING
def _disconnectParser(self, reason):
"""
If there is still a parser, call its C{connectionLost} method with the
given reason. If there is not, do nothing.
@type reason: L{Failure}
"""
if self._parser is not None:
parser = self._parser
self._parser = None
self._currentRequest = None
self._finishedRequest = None
self._responseDeferred = None
# The parser is no longer allowed to do anything to the real
# transport. Stop proxying from the parser's transport to the real
# transport before telling the parser it's done so that it can't do
# anything.
self._transportProxy.stopProxying()
self._transportProxy = None
parser.connectionLost(reason)
def _giveUp(self, reason):
"""
Lose the underlying connection and disconnect the parser with the given
L{Failure}.
Use this method instead of calling the transport's loseConnection
method directly otherwise random things will break.
"""
self.transport.loseConnection()
self._disconnectParser(reason)
def dataReceived(self, bytes):
"""
Handle some stuff from some place.
"""
try:
self._parser.dataReceived(bytes)
except:
self._giveUp(Failure())
def connectionLost(self, reason):
"""
The underlying transport went away. If appropriate, notify the parser
object.
"""
connectionLost = makeStatefulDispatcher('connectionLost', connectionLost)
def _connectionLost_QUIESCENT(self, reason):
"""
Nothing is currently happening. Move to the C{'CONNECTION_LOST'}
state but otherwise do nothing.
"""
self._state = 'CONNECTION_LOST'
def _connectionLost_GENERATION_FAILED(self, reason):
"""
The connection was in an inconsistent state. Move to the
C{'CONNECTION_LOST'} state but otherwise do nothing.
"""
self._state = 'CONNECTION_LOST'
def _connectionLost_TRANSMITTING(self, reason):
"""
Fail the L{Deferred} for the current request, notify the request
object that it does not need to continue transmitting itself, and
move to the C{'CONNECTION_LOST'} state.
"""
self._state = 'CONNECTION_LOST'
self._finishedRequest.errback(
Failure(RequestTransmissionFailed([reason])))
del self._finishedRequest
# Tell the request that it should stop bothering now.
self._currentRequest.stopWriting()
def _connectionLost_TRANSMITTING_AFTER_RECEIVING_RESPONSE(self, reason):
"""
Move to the C{'CONNECTION_LOST'} state.
"""
self._state = 'CONNECTION_LOST'
def _connectionLost_WAITING(self, reason):
"""
Disconnect the response parser so that it can propagate the event as
necessary (for example, to call an application protocol's
C{connectionLost} method, or to fail a request L{Deferred}) and move
to the C{'CONNECTION_LOST'} state.
"""
self._disconnectParser(reason)
self._state = 'CONNECTION_LOST'
def _connectionLost_ABORTING(self, reason):
"""
Disconnect the response parser with a L{ConnectionAborted} failure, and
move to the C{'CONNECTION_LOST'} state.
"""
self._disconnectParser(Failure(ConnectionAborted()))
self._state = 'CONNECTION_LOST'
for d in self._abortDeferreds:
d.callback(None)
self._abortDeferreds = []
def abort(self):
"""
Close the connection and cause all outstanding L{request} L{Deferred}s
to fire with an error.
"""
if self._state == "CONNECTION_LOST":
return succeed(None)
self.transport.loseConnection()
self._state = 'ABORTING'
d = Deferred()
self._abortDeferreds.append(d)
return d
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_872_0 |
crossvul-python_data_good_3268_3 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import contextlib
import datetime
import os
import pwd
import re
import time
from io import StringIO
from numbers import Number
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
from jinja2 import Environment
from jinja2.loaders import FileSystemLoader
from jinja2.exceptions import TemplateSyntaxError, UndefinedError
from jinja2.utils import concat as j2_concat
from jinja2.runtime import Context, StrictUndefined
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable
from ansible.module_utils.six import string_types, text_type
from ansible.module_utils._text import to_native, to_text, to_bytes
from ansible.plugins import filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.unsafe_proxy import UnsafeProxy, wrap_var
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['Templar', 'generate_ansible_template_vars']
# A regex for checking to see if a variable we're trying to
# expand is just a single variable name.
# Primitive Types which we don't want Jinja to convert to strings.
NON_TEMPLATED_TYPES = ( bool, Number )
JINJA2_OVERRIDE = '#jinja2:'
def generate_ansible_template_vars(path):
b_path = to_bytes(path)
try:
template_uid = pwd.getpwuid(os.stat(b_path).st_uid).pw_name
except:
template_uid = os.stat(b_path).st_uid
temp_vars = {}
temp_vars['template_host'] = os.uname()[1]
temp_vars['template_path'] = b_path
temp_vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(b_path))
temp_vars['template_uid'] = template_uid
temp_vars['template_fullpath'] = os.path.abspath(path)
temp_vars['template_run_date'] = datetime.datetime.now()
managed_default = C.DEFAULT_MANAGED_STR
managed_str = managed_default.format(
host = temp_vars['template_host'],
uid = temp_vars['template_uid'],
file = temp_vars['template_path'],
)
temp_vars['ansible_managed'] = time.strftime( managed_str, time.localtime(os.path.getmtime(b_path)))
return temp_vars
def _escape_backslashes(data, jinja_env):
"""Double backslashes within jinja2 expressions
A user may enter something like this in a playbook::
debug:
msg: "Test Case 1\\3; {{ test1_name | regex_replace('^(.*)_name$', '\\1')}}"
The string inside of the {{ gets interpreted multiple times First by yaml.
Then by python. And finally by jinja2 as part of it's variable. Because
it is processed by both python and jinja2, the backslash escaped
characters get unescaped twice. This means that we'd normally have to use
four backslashes to escape that. This is painful for playbook authors as
they have to remember different rules for inside vs outside of a jinja2
expression (The backslashes outside of the "{{ }}" only get processed by
yaml and python. So they only need to be escaped once). The following
code fixes this by automatically performing the extra quoting of
backslashes inside of a jinja2 expression.
"""
if '\\' in data and '{{' in data:
new_data = []
d2 = jinja_env.preprocess(data)
in_var = False
for token in jinja_env.lex(d2):
if token[1] == 'variable_begin':
in_var = True
new_data.append(token[2])
elif token[1] == 'variable_end':
in_var = False
new_data.append(token[2])
elif in_var and token[1] == 'string':
# Double backslashes only if we're inside of a jinja2 variable
new_data.append(token[2].replace('\\','\\\\'))
else:
new_data.append(token[2])
data = ''.join(new_data)
return data
def _count_newlines_from_end(in_str):
'''
Counts the number of newlines at the end of a string. This is used during
the jinja2 templating to ensure the count matches the input, since some newlines
may be thrown away during the templating.
'''
try:
i = len(in_str)
j = i -1
while in_str[j] == '\n':
j -= 1
return i - 1 - j
except IndexError:
# Uncommon cases: zero length string and string containing only newlines
return i
class AnsibleContext(Context):
'''
A custom context, which intercepts resolve() calls and sets a flag
internally if any variable lookup returns an AnsibleUnsafe value. This
flag is checked post-templating, and (when set) will result in the
final templated result being wrapped via UnsafeProxy.
'''
def __init__(self, *args, **kwargs):
super(AnsibleContext, self).__init__(*args, **kwargs)
self.unsafe = False
def _is_unsafe(self, val):
'''
Our helper function, which will also recursively check dict and
list entries due to the fact that they may be repr'd and contain
a key or value which contains jinja2 syntax and would otherwise
lose the AnsibleUnsafe value.
'''
if isinstance(val, dict):
for key in val.keys():
if self._is_unsafe(val[key]):
return True
elif isinstance(val, list):
for item in val:
if self._is_unsafe(item):
return True
elif isinstance(val, string_types) and hasattr(val, '__UNSAFE__'):
return True
return False
def _update_unsafe(self, val):
if val is not None and not self.unsafe and self._is_unsafe(val):
self.unsafe = True
def resolve(self, key):
'''
The intercepted resolve(), which uses the helper above to set the
internal flag whenever an unsafe variable value is returned.
'''
val = super(AnsibleContext, self).resolve(key)
self._update_unsafe(val)
return val
def resolve_or_missing(self, key):
val = super(AnsibleContext, self).resolve_or_missing(key)
self._update_unsafe(val)
return val
class AnsibleEnvironment(Environment):
'''
Our custom environment, which simply allows us to override the class-level
values for the Template and Context classes used by jinja2 internally.
'''
context_class = AnsibleContext
template_class = AnsibleJ2Template
class Templar:
'''
The main class for templating, with the main entry-point of template().
'''
def __init__(self, loader, shared_loader_obj=None, variables=dict()):
self._loader = loader
self._filters = None
self._tests = None
self._available_variables = variables
self._cached_result = {}
if loader:
self._basedir = loader.get_basedir()
else:
self._basedir = './'
if shared_loader_obj:
self._filter_loader = getattr(shared_loader_obj, 'filter_loader')
self._test_loader = getattr(shared_loader_obj, 'test_loader')
self._lookup_loader = getattr(shared_loader_obj, 'lookup_loader')
else:
self._filter_loader = filter_loader
self._test_loader = test_loader
self._lookup_loader = lookup_loader
# flags to determine whether certain failures during templating
# should result in fatal errors being raised
self._fail_on_lookup_errors = True
self._fail_on_filter_errors = True
self._fail_on_undefined_errors = C.DEFAULT_UNDEFINED_VAR_BEHAVIOR
self.environment = AnsibleEnvironment(
trim_blocks=True,
undefined=StrictUndefined,
extensions=self._get_extensions(),
finalize=self._finalize,
loader=FileSystemLoader(self._basedir),
)
# the current rendering context under which the templar class is working
self.cur_context = None
self.SINGLE_VAR = re.compile(r"^%s\s*(\w*)\s*%s$" % (self.environment.variable_start_string, self.environment.variable_end_string))
self._clean_regex = re.compile(r'(?:%s|%s|%s|%s)' % (
self.environment.variable_start_string,
self.environment.block_start_string,
self.environment.block_end_string,
self.environment.variable_end_string
))
self._no_type_regex = re.compile(r'.*\|\s*(?:%s)\s*(?:%s)?$' % ('|'.join(C.STRING_TYPE_FILTERS), self.environment.variable_end_string))
def _get_filters(self):
'''
Returns filter plugins, after loading and caching them if need be
'''
if self._filters is not None:
return self._filters.copy()
plugins = [x for x in self._filter_loader.all()]
self._filters = dict()
for fp in plugins:
self._filters.update(fp.filters())
self._filters.update(self._get_tests())
return self._filters.copy()
def _get_tests(self):
'''
Returns tests plugins, after loading and caching them if need be
'''
if self._tests is not None:
return self._tests.copy()
plugins = [x for x in self._test_loader.all()]
self._tests = dict()
for fp in plugins:
self._tests.update(fp.tests())
return self._tests.copy()
def _get_extensions(self):
'''
Return jinja2 extensions to load.
If some extensions are set via jinja_extensions in ansible.cfg, we try
to load them with the jinja environment.
'''
jinja_exts = []
if C.DEFAULT_JINJA2_EXTENSIONS:
# make sure the configuration directive doesn't contain spaces
# and split extensions in an array
jinja_exts = C.DEFAULT_JINJA2_EXTENSIONS.replace(" ", "").split(',')
return jinja_exts
def _clean_data(self, orig_data):
''' remove jinja2 template tags from data '''
if hasattr(orig_data, '__ENCRYPTED__'):
ret = orig_data
elif isinstance(orig_data, list):
clean_list = []
for list_item in orig_data:
clean_list.append(self._clean_data(list_item))
ret = clean_list
elif isinstance(orig_data, dict):
clean_dict = {}
for k in orig_data:
clean_dict[self._clean_data(k)] = self._clean_data(orig_data[k])
ret = clean_dict
elif isinstance(orig_data, string_types):
# This will error with str data (needs unicode), but all strings should already be converted already.
# If you get exception, the problem is at the data origin, do not add to_text here.
with contextlib.closing(StringIO(orig_data)) as data:
# these variables keep track of opening block locations, as we only
# want to replace matched pairs of print/block tags
print_openings = []
block_openings = []
for mo in self._clean_regex.finditer(orig_data):
token = mo.group(0)
token_start = mo.start(0)
if token[0] == self.environment.variable_start_string[0]:
if token == self.environment.block_start_string:
block_openings.append(token_start)
elif token == self.environment.variable_start_string:
print_openings.append(token_start)
elif token[1] == self.environment.variable_end_string[1]:
prev_idx = None
if token == self.environment.block_end_string and block_openings:
prev_idx = block_openings.pop()
elif token == self.environment.variable_end_string and print_openings:
prev_idx = print_openings.pop()
if prev_idx is not None:
# replace the opening
data.seek(prev_idx, os.SEEK_SET)
data.write(to_text(self.environment.comment_start_string))
# replace the closing
data.seek(token_start, os.SEEK_SET)
data.write(to_text(self.environment.comment_end_string))
else:
raise AnsibleError("Error while cleaning data for safety: unhandled regex match")
ret = data.getvalue()
else:
ret = orig_data
return ret
def set_available_variables(self, variables):
'''
Sets the list of template variables this Templar instance will use
to template things, so we don't have to pass them around between
internal methods. We also clear the template cache here, as the variables
are being changed.
'''
assert isinstance(variables, dict)
self._available_variables = variables
self._cached_result = {}
def template(self, variable, convert_bare=False, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None,
convert_data=True, static_vars=[''], cache=True, bare_deprecated=True, disable_lookups=False):
'''
Templates (possibly recursively) any given data as input. If convert_bare is
set to True, the given data will be wrapped as a jinja2 variable ('{{foo}}')
before being sent through the template engine.
'''
# Don't template unsafe variables, just return them.
if hasattr(variable, '__UNSAFE__'):
return variable
if fail_on_undefined is None:
fail_on_undefined = self._fail_on_undefined_errors
try:
if convert_bare:
variable = self._convert_bare_variable(variable, bare_deprecated=bare_deprecated)
if isinstance(variable, string_types):
result = variable
if self._contains_vars(variable):
# Check to see if the string we are trying to render is just referencing a single
# var. In this case we don't want to accidentally change the type of the variable
# to a string by using the jinja template renderer. We just want to pass it.
only_one = self.SINGLE_VAR.match(variable)
if only_one:
var_name = only_one.group(1)
if var_name in self._available_variables:
resolved_val = self._available_variables[var_name]
if isinstance(resolved_val, NON_TEMPLATED_TYPES):
return resolved_val
elif resolved_val is None:
return C.DEFAULT_NULL_REPRESENTATION
# Using a cache in order to prevent template calls with already templated variables
sha1_hash = None
if cache:
variable_hash = sha1(text_type(variable).encode('utf-8'))
options_hash = sha1(
(
text_type(preserve_trailing_newlines) +
text_type(escape_backslashes) +
text_type(fail_on_undefined) +
text_type(overrides)
).encode('utf-8')
)
sha1_hash = variable_hash.hexdigest() + options_hash.hexdigest()
if cache and sha1_hash in self._cached_result:
result = self._cached_result[sha1_hash]
else:
result = self.do_template(
variable,
preserve_trailing_newlines=preserve_trailing_newlines,
escape_backslashes=escape_backslashes,
fail_on_undefined=fail_on_undefined,
overrides=overrides,
disable_lookups=disable_lookups,
)
unsafe = hasattr(result, '__UNSAFE__')
if convert_data and not self._no_type_regex.match(variable):
# if this looks like a dictionary or list, convert it to such using the safe_eval method
if (result.startswith("{") and not result.startswith(self.environment.variable_start_string)) or \
result.startswith("[") or result in ("True", "False"):
eval_results = safe_eval(result, locals=self._available_variables, include_exceptions=True)
if eval_results[1] is None:
result = eval_results[0]
if unsafe:
result = wrap_var(result)
else:
# FIXME: if the safe_eval raised an error, should we do something with it?
pass
# we only cache in the case where we have a single variable
# name, to make sure we're not putting things which may otherwise
# be dynamic in the cache (filters, lookups, etc.)
if cache:
self._cached_result[sha1_hash] = result
return result
elif isinstance(variable, (list, tuple)):
return [self.template(
v,
preserve_trailing_newlines=preserve_trailing_newlines,
fail_on_undefined=fail_on_undefined,
overrides=overrides,
disable_lookups=disable_lookups,
) for v in variable]
elif isinstance(variable, dict):
d = {}
# we don't use iteritems() here to avoid problems if the underlying dict
# changes sizes due to the templating, which can happen with hostvars
for k in variable.keys():
if k not in static_vars:
d[k] = self.template(
variable[k],
preserve_trailing_newlines=preserve_trailing_newlines,
fail_on_undefined=fail_on_undefined,
overrides=overrides,
disable_lookups=disable_lookups,
)
else:
d[k] = variable[k]
return d
else:
return variable
except AnsibleFilterError:
if self._fail_on_filter_errors:
raise
else:
return variable
def is_template(self, data):
''' lets us know if data has a template'''
if isinstance(data, string_types):
try:
new = self.do_template(data, fail_on_undefined=True)
except (AnsibleUndefinedVariable, UndefinedError):
return True
except:
return False
return (new != data)
elif isinstance(data, (list, tuple)):
for v in data:
if self.is_template(v):
return True
elif isinstance(data, dict):
for k in data:
if self.is_template(k) or self.is_template(data[k]):
return True
return False
def templatable(self, data):
'''
returns True if the data can be templated w/o errors
'''
templatable = True
try:
self.template(data)
except:
templatable = False
return templatable
def _contains_vars(self, data):
'''
returns True if the data contains a variable pattern
'''
if isinstance(data, string_types):
for marker in (self.environment.block_start_string, self.environment.variable_start_string, self.environment.comment_start_string):
if marker in data:
return True
return False
def _convert_bare_variable(self, variable, bare_deprecated):
'''
Wraps a bare string, which may have an attribute portion (ie. foo.bar)
in jinja2 variable braces so that it is evaluated properly.
'''
if isinstance(variable, string_types):
contains_filters = "|" in variable
first_part = variable.split("|")[0].split(".")[0].split("[")[0]
if (contains_filters or first_part in self._available_variables) and self.environment.variable_start_string not in variable:
if bare_deprecated:
display.deprecated("Using bare variables is deprecated."
" Update your playbooks so that the environment value uses the full variable syntax ('%s%s%s')" %
(self.environment.variable_start_string, variable, self.environment.variable_end_string), version='2.7')
return "%s%s%s" % (self.environment.variable_start_string, variable, self.environment.variable_end_string)
# the variable didn't meet the conditions to be converted,
# so just return it as-is
return variable
def _finalize(self, thing):
'''
A custom finalize method for jinja2, which prevents None from being returned
'''
return thing if thing is not None else ''
def _fail_lookup(self, name, *args, **kwargs):
raise AnsibleError("The lookup `%s` was found, however lookups were disabled from templating" % name)
def _lookup(self, name, *args, **kwargs):
instance = self._lookup_loader.get(name.lower(), loader=self._loader, templar=self)
if instance is not None:
wantlist = kwargs.pop('wantlist', False)
allow_unsafe = kwargs.pop('allow_unsafe', C.DEFAULT_ALLOW_UNSAFE_LOOKUPS)
from ansible.utils.listify import listify_lookup_plugin_terms
loop_terms = listify_lookup_plugin_terms(terms=args, templar=self, loader=self._loader, fail_on_undefined=True, convert_bare=False)
# safely catch run failures per #5059
try:
ran = instance.run(loop_terms, variables=self._available_variables, **kwargs)
except (AnsibleUndefinedVariable, UndefinedError) as e:
raise AnsibleUndefinedVariable(e)
except Exception as e:
if self._fail_on_lookup_errors:
raise AnsibleError("An unhandled exception occurred while running the lookup plugin '%s'. Error was a %s, "
"original message: %s" % (name, type(e), e))
ran = None
if ran and not allow_unsafe:
from ansible.vars.unsafe_proxy import UnsafeProxy, wrap_var
if wantlist:
ran = wrap_var(ran)
else:
try:
ran = UnsafeProxy(",".join(ran))
except TypeError:
if isinstance(ran, list) and len(ran) == 1:
ran = wrap_var(ran[0])
else:
ran = wrap_var(ran)
if self.cur_context:
self.cur_context.unsafe = True
return ran
else:
raise AnsibleError("lookup plugin (%s) not found" % name)
def do_template(self, data, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None, disable_lookups=False):
# For preserving the number of input newlines in the output (used
# later in this method)
data_newlines = _count_newlines_from_end(data)
if fail_on_undefined is None:
fail_on_undefined = self._fail_on_undefined_errors
try:
# allows template header overrides to change jinja2 options.
if overrides is None:
myenv = self.environment.overlay()
else:
myenv = self.environment.overlay(overrides)
# Get jinja env overrides from template
if data.startswith(JINJA2_OVERRIDE):
eol = data.find('\n')
line = data[len(JINJA2_OVERRIDE):eol]
data = data[eol+1:]
for pair in line.split(','):
(key,val) = pair.split(':')
key = key.strip()
setattr(myenv, key, ast.literal_eval(val.strip()))
# Adds Ansible custom filters and tests
myenv.filters.update(self._get_filters())
myenv.tests.update(self._get_tests())
if escape_backslashes:
# Allow users to specify backslashes in playbooks as "\\" instead of as "\\\\".
data = _escape_backslashes(data, myenv)
try:
t = myenv.from_string(data)
except TemplateSyntaxError as e:
raise AnsibleError("template error while templating string: %s. String: %s" % (to_native(e), to_native(data)))
except Exception as e:
if 'recursion' in to_native(e):
raise AnsibleError("recursive loop detected in template string: %s" % to_native(data))
else:
return data
if disable_lookups:
t.globals['lookup'] = self._fail_lookup
else:
t.globals['lookup'] = self._lookup
t.globals['finalize'] = self._finalize
jvars = AnsibleJ2Vars(self, t.globals)
self.cur_context = new_context = t.new_context(jvars, shared=True)
rf = t.root_render_func(new_context)
try:
res = j2_concat(rf)
if new_context.unsafe:
res = wrap_var(res)
except TypeError as te:
if 'StrictUndefined' in to_native(te):
errmsg = "Unable to look up a name or access an attribute in template string (%s).\n" % to_native(data)
errmsg += "Make sure your variable name does not contain invalid characters like '-': %s" % to_native(te)
raise AnsibleUndefinedVariable(errmsg)
else:
display.debug("failing because of a type error, template data is: %s" % to_native(data))
raise AnsibleError("Unexpected templating type error occurred on (%s): %s" % (to_native(data),to_native(te)))
if preserve_trailing_newlines:
# The low level calls above do not preserve the newline
# characters at the end of the input data, so we use the
# calculate the difference in newlines and append them
# to the resulting output for parity
#
# jinja2 added a keep_trailing_newline option in 2.7 when
# creating an Environment. That would let us make this code
# better (remove a single newline if
# preserve_trailing_newlines is False). Once we can depend on
# that version being present, modify our code to set that when
# initializing self.environment and remove a single trailing
# newline here if preserve_newlines is False.
res_newlines = _count_newlines_from_end(res)
if data_newlines > res_newlines:
res += self.environment.newline_sequence * (data_newlines - res_newlines)
return res
except (UndefinedError, AnsibleUndefinedVariable) as e:
if fail_on_undefined:
raise AnsibleUndefinedVariable(e)
else:
#TODO: return warning about undefined var
return data
# for backwards compatibility in case anyone is using old private method directly
_do_template = do_template
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3268_3 |
crossvul-python_data_bad_1739_0 | """Serve files directly from the ContentsManager."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import mimetypes
import json
import base64
from tornado import web
from IPython.html.base.handlers import IPythonHandler
class FilesHandler(IPythonHandler):
"""serve files via ContentsManager"""
@web.authenticated
def get(self, path):
cm = self.contents_manager
if cm.is_hidden(path):
self.log.info("Refusing to serve hidden file, via 404 Error")
raise web.HTTPError(404)
path = path.strip('/')
if '/' in path:
_, name = path.rsplit('/', 1)
else:
name = path
model = cm.get(path, type='file')
if self.get_argument("download", False):
self.set_header('Content-Disposition','attachment; filename="%s"' % name)
# get mimetype from filename
if name.endswith('.ipynb'):
self.set_header('Content-Type', 'application/json')
else:
cur_mime = mimetypes.guess_type(name)[0]
if cur_mime is not None:
self.set_header('Content-Type', cur_mime)
if model['format'] == 'base64':
b64_bytes = model['content'].encode('ascii')
self.write(base64.decodestring(b64_bytes))
elif model['format'] == 'json':
self.write(json.dumps(model['content']))
else:
self.write(model['content'])
self.flush()
default_handlers = [
(r"/files/(.*)", FilesHandler),
] | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_1739_0 |
crossvul-python_data_good_3767_1 | import os
import re
from django.conf import global_settings, settings
from django.contrib.sites.models import Site, RequestSite
from django.contrib.auth.models import User
from django.core import mail
from django.core.exceptions import SuspiciousOperation
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import QueryDict
from django.utils.encoding import force_text
from django.utils.html import escape
from django.utils.http import urlquote
from django.test import TestCase
from django.test.utils import override_settings
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import (AuthenticationForm, PasswordChangeForm,
SetPasswordForm, PasswordResetForm)
from django.contrib.auth.tests.utils import skipIfCustomUser
@override_settings(
LANGUAGES=(
('en', 'English'),
),
LANGUAGE_CODE='en',
TEMPLATE_LOADERS=global_settings.TEMPLATE_LOADERS,
TEMPLATE_DIRS=(
os.path.join(os.path.dirname(__file__), 'templates'),
),
USE_TZ=False,
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
fixtures = ['authtestdata.json']
urls = 'django.contrib.auth.tests.urls'
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
self.assertTrue(SESSION_KEY in self.client.session)
def assertContainsEscaped(self, response, text, **kwargs):
return self.assertContains(response, escape(force_text(text)), **kwargs)
@skipIfCustomUser
class AuthViewNamedURLTests(AuthViewsTestCase):
urls = 'django.contrib.auth.urls'
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb36': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
@skipIfCustomUser
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"Error is raised if the provided email address isn't currently registered"
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertContainsEscaped(response, PasswordResetForm.error_messages['unknown'])
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
def test_admin_reset(self):
"If the reset view is marked as being for admin, the HTTP_HOST header is used for a domain override."
response = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='adminsite.com'
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://adminsite.com" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with self.assertRaises(SuspiciousOperation):
self.client.post('/password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(len(mail.outbox), 0)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with self.assertRaises(SuspiciousOperation):
self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(len(mail.outbox), 0)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404
response = self.client.get('/reset/123456-1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz-1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# It redirects us to a 'complete' page:
self.assertEqual(response.status_code, 302)
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'x'})
self.assertContainsEscaped(response, SetPasswordForm.error_messages['password_mismatch'])
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CustomUserPasswordResetTest(AuthViewsTestCase):
fixtures = ['custom_user.json']
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
@skipIfCustomUser
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertContainsEscaped(response, AuthenticationForm.error_messages['invalid_login'])
def logout(self):
response = self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertContainsEscaped(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertContainsEscaped(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/password_change/done/'))
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/password_change/done/'))
def test_password_change_done_fails(self):
with self.settings(LOGIN_URL='/login/'):
response = self.client.get('/password_change/done/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/?next=/password_change/done/'))
@skipIfCustomUser
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('django.contrib.auth.views.login'))
self.assertEqual(response.status_code, 200)
if Site._meta.installed:
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertTrue(isinstance(response.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def test_security_check(self, password='password'):
login_url = reverse('django.contrib.auth.views.login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
@skipIfCustomUser
class LoginURLSettings(AuthViewsTestCase):
def setUp(self):
super(LoginURLSettings, self).setUp()
self.old_LOGIN_URL = settings.LOGIN_URL
def tearDown(self):
super(LoginURLSettings, self).tearDown()
settings.LOGIN_URL = self.old_LOGIN_URL
def get_login_required_url(self, login_url):
settings.LOGIN_URL = login_url
response = self.client.get('/login_required/')
self.assertEqual(response.status_code, 302)
return response['Location']
def test_standard_login_url(self):
login_url = '/login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver%s?%s' %
(login_url, querystring.urlencode('/')))
def test_remote_login_url(self):
login_url = 'http://remote.example.com/login'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_https_login_url(self):
login_url = 'https:///login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_login_url_with_querystring(self):
login_url = '/login/?pretty=1'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('pretty=1', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver/login/?%s' %
querystring.urlencode('/'))
def test_remote_login_url_with_next_querystring(self):
login_url = 'http://remote.example.com/login/'
login_required_url = self.get_login_required_url('%s?next=/default/' %
login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url, '%s?%s' % (login_url,
querystring.urlencode('/')))
@skipIfCustomUser
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertTrue(SESSION_KEY not in self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertTrue('site' in response.context)
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
response = self.client.get('/logout/next_page/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_security_check(self, password='password'):
logout_url = reverse('django.contrib.auth.views.logout')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
self.confirm_logged_out()
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
self.confirm_logged_out()
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3767_1 |
crossvul-python_data_good_3683_0 | # Copyright 2012, Piston Cloud Computing, Inc.
# Copyright 2012, OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import netaddr
from nova.compute import api as compute
from nova.scheduler import filters
class AffinityFilter(filters.BaseHostFilter):
def __init__(self):
self.compute_api = compute.API()
def _all_hosts(self, context):
all_hosts = {}
for instance in self.compute_api.get_all(context):
all_hosts[instance['uuid']] = instance['host']
return all_hosts
class DifferentHostFilter(AffinityFilter):
'''Schedule the instance on a different host from a set of instances.'''
def host_passes(self, host_state, filter_properties):
context = filter_properties['context']
scheduler_hints = filter_properties.get('scheduler_hints') or {}
me = host_state.host
affinity_uuids = scheduler_hints.get('different_host', [])
if isinstance(affinity_uuids, basestring):
affinity_uuids = [affinity_uuids]
if affinity_uuids:
all_hosts = self._all_hosts(context)
return not any([i for i in affinity_uuids
if all_hosts.get(i) == me])
# With no different_host key
return True
class SameHostFilter(AffinityFilter):
'''Schedule the instance on the same host as another instance in a set of
of instances.
'''
def host_passes(self, host_state, filter_properties):
context = filter_properties['context']
scheduler_hints = filter_properties.get('scheduler_hints') or {}
me = host_state.host
affinity_uuids = scheduler_hints.get('same_host', [])
if isinstance(affinity_uuids, basestring):
affinity_uuids = [affinity_uuids]
if affinity_uuids:
all_hosts = self._all_hosts(context)
return any([i for i
in affinity_uuids
if all_hosts.get(i) == me])
# With no same_host key
return True
class SimpleCIDRAffinityFilter(AffinityFilter):
def host_passes(self, host_state, filter_properties):
scheduler_hints = filter_properties.get('scheduler_hints') or {}
affinity_cidr = scheduler_hints.get('cidr', '/24')
affinity_host_addr = scheduler_hints.get('build_near_host_ip')
host_ip = host_state.capabilities.get('host_ip')
if affinity_host_addr:
affinity_net = netaddr.IPNetwork(str.join('', (affinity_host_addr,
affinity_cidr)))
return netaddr.IPAddress(host_ip) in affinity_net
# We don't have an affinity host address.
return True
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3683_0 |
crossvul-python_data_bad_3500_2 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3500_2 |
crossvul-python_data_good_2141_4 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import codecs
import jinja2
from jinja2.runtime import StrictUndefined
from jinja2.exceptions import TemplateSyntaxError
import yaml
import json
from ansible import errors
import ansible.constants as C
import time
import subprocess
import datetime
import pwd
import ast
import traceback
from ansible.utils.string_functions import count_newlines_from_end
class Globals(object):
FILTERS = None
def __init__(self):
pass
def _get_filters():
''' return filter plugin instances '''
if Globals.FILTERS is not None:
return Globals.FILTERS
from ansible import utils
plugins = [ x for x in utils.plugins.filter_loader.all()]
filters = {}
for fp in plugins:
filters.update(fp.filters())
Globals.FILTERS = filters
return Globals.FILTERS
def _get_extensions():
''' return jinja2 extensions to load '''
'''
if some extensions are set via jinja_extensions in ansible.cfg, we try
to load them with the jinja environment
'''
jinja_exts = []
if C.DEFAULT_JINJA2_EXTENSIONS:
'''
Let's make sure the configuration directive doesn't contain spaces
and split extensions in an array
'''
jinja_exts = C.DEFAULT_JINJA2_EXTENSIONS.replace(" ", "").split(',')
return jinja_exts
class Flags:
LEGACY_TEMPLATE_WARNING = False
# TODO: refactor this file
FILTER_PLUGINS = None
_LISTRE = re.compile(r"(\w+)\[(\d+)\]")
def lookup(name, *args, **kwargs):
from ansible import utils
instance = utils.plugins.lookup_loader.get(name.lower(), basedir=kwargs.get('basedir',None))
vars = kwargs.get('vars', None)
if instance is not None:
# safely catch run failures per #5059
try:
ran = instance.run(*args, inject=vars, **kwargs)
except errors.AnsibleError:
# Plugin raised this on purpose
raise
except Exception, e:
ran = None
if ran:
ran = ",".join(ran)
return ran
else:
raise errors.AnsibleError("lookup plugin (%s) not found" % name)
def template(basedir, varname, vars, lookup_fatal=True, depth=0, expand_lists=True, convert_bare=False, fail_on_undefined=False, filter_fatal=True):
''' templates a data structure by traversing it and substituting for other data structures '''
from ansible import utils
try:
if convert_bare and isinstance(varname, basestring):
first_part = varname.split(".")[0].split("[")[0]
if first_part in vars and '{{' not in varname and '$' not in varname:
varname = "{{%s}}" % varname
if isinstance(varname, basestring):
if '{{' in varname or '{%' in varname:
varname = template_from_string(basedir, varname, vars, fail_on_undefined)
if (varname.startswith("{") and not varname.startswith("{{")) or varname.startswith("["):
eval_results = utils.safe_eval(varname, locals=vars, include_exceptions=True)
if eval_results[1] is None:
varname = eval_results[0]
return varname
elif isinstance(varname, (list, tuple)):
return [template(basedir, v, vars, lookup_fatal, depth, expand_lists, fail_on_undefined=fail_on_undefined) for v in varname]
elif isinstance(varname, dict):
d = {}
for (k, v) in varname.iteritems():
d[k] = template(basedir, v, vars, lookup_fatal, depth, expand_lists, fail_on_undefined=fail_on_undefined)
return d
else:
return varname
except errors.AnsibleFilterError:
if filter_fatal:
raise
else:
return varname
class _jinja2_vars(object):
'''
Helper class to template all variable content before jinja2 sees it.
This is done by hijacking the variable storage that jinja2 uses, and
overriding __contains__ and __getitem__ to look like a dict. Added bonus
is avoiding duplicating the large hashes that inject tends to be.
To facilitate using builtin jinja2 things like range, globals are handled
here.
extras is a list of locals to also search for variables.
'''
def __init__(self, basedir, vars, globals, fail_on_undefined, *extras):
self.basedir = basedir
self.vars = vars
self.globals = globals
self.fail_on_undefined = fail_on_undefined
self.extras = extras
def __contains__(self, k):
if k in self.vars:
return True
for i in self.extras:
if k in i:
return True
if k in self.globals:
return True
return False
def __getitem__(self, varname):
if varname not in self.vars:
for i in self.extras:
if varname in i:
return i[varname]
if varname in self.globals:
return self.globals[varname]
else:
raise KeyError("undefined variable: %s" % varname)
var = self.vars[varname]
# HostVars is special, return it as-is
if isinstance(var, dict) and type(var) != dict:
return var
else:
return template(self.basedir, var, self.vars, fail_on_undefined=self.fail_on_undefined)
def add_locals(self, locals):
'''
If locals are provided, create a copy of self containing those
locals in addition to what is already in this variable proxy.
'''
if locals is None:
return self
return _jinja2_vars(self.basedir, self.vars, self.globals, self.fail_on_undefined, locals, *self.extras)
class J2Template(jinja2.environment.Template):
'''
This class prevents Jinja2 from running _jinja2_vars through dict()
Without this, {% include %} and similar will create new contexts unlike
the special one created in template_from_file. This ensures they are all
alike, with the exception of potential locals.
'''
def new_context(self, vars=None, shared=False, locals=None):
return jinja2.runtime.Context(self.environment, vars.add_locals(locals), self.name, self.blocks)
def template_from_file(basedir, path, vars, vault_password=None):
''' run a file through the templating engine '''
fail_on_undefined = C.DEFAULT_UNDEFINED_VAR_BEHAVIOR
from ansible import utils
realpath = utils.path_dwim(basedir, path)
loader=jinja2.FileSystemLoader([basedir,os.path.dirname(realpath)])
def my_lookup(*args, **kwargs):
kwargs['vars'] = vars
return lookup(*args, basedir=basedir, **kwargs)
def my_finalize(thing):
return thing if thing is not None else ''
environment = jinja2.Environment(loader=loader, trim_blocks=True, extensions=_get_extensions())
environment.filters.update(_get_filters())
environment.globals['lookup'] = my_lookup
environment.globals['finalize'] = my_finalize
if fail_on_undefined:
environment.undefined = StrictUndefined
try:
data = codecs.open(realpath, encoding="utf8").read()
except UnicodeDecodeError:
raise errors.AnsibleError("unable to process as utf-8: %s" % realpath)
except:
raise errors.AnsibleError("unable to read %s" % realpath)
environment.template_class = J2Template
try:
t = environment.from_string(data)
except TemplateSyntaxError, e:
# Throw an exception which includes a more user friendly error message
values = {'name': realpath, 'lineno': e.lineno, 'error': str(e)}
msg = 'file: %(name)s, line number: %(lineno)s, error: %(error)s' % \
values
error = errors.AnsibleError(msg)
raise error
vars = vars.copy()
try:
template_uid = pwd.getpwuid(os.stat(realpath).st_uid).pw_name
except:
template_uid = os.stat(realpath).st_uid
vars['template_host'] = os.uname()[1]
vars['template_path'] = realpath
vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(realpath))
vars['template_uid'] = template_uid
vars['template_fullpath'] = os.path.abspath(realpath)
vars['template_run_date'] = datetime.datetime.now()
managed_default = C.DEFAULT_MANAGED_STR
managed_str = managed_default.format(
host = vars['template_host'],
uid = vars['template_uid'],
file = vars['template_path']
)
vars['ansible_managed'] = time.strftime(
managed_str,
time.localtime(os.path.getmtime(realpath))
)
# This line performs deep Jinja2 magic that uses the _jinja2_vars object for vars
# Ideally, this could use some API where setting shared=True and the object won't get
# passed through dict(o), but I have not found that yet.
try:
res = jinja2.utils.concat(t.root_render_func(t.new_context(_jinja2_vars(basedir, vars, t.globals, fail_on_undefined), shared=True)))
except jinja2.exceptions.UndefinedError, e:
raise errors.AnsibleUndefinedVariable("One or more undefined variables: %s" % str(e))
except jinja2.exceptions.TemplateNotFound, e:
# Throw an exception which includes a more user friendly error message
# This likely will happen for included sub-template. Not that besides
# pure "file not found" it may happen due to Jinja2's "security"
# checks on path.
values = {'name': realpath, 'subname': str(e)}
msg = 'file: %(name)s, error: Cannot find/not allowed to load (include) template %(subname)s' % \
values
error = errors.AnsibleError(msg)
raise error
# The low level calls above do not preserve the newline
# characters at the end of the input data, so we use the
# calculate the difference in newlines and append them
# to the resulting output for parity
res_newlines = count_newlines_from_end(res)
data_newlines = count_newlines_from_end(data)
if data_newlines > res_newlines:
res += '\n' * (data_newlines - res_newlines)
if isinstance(res, unicode):
# do not try to re-template a unicode string
result = res
else:
result = template(basedir, res, vars)
return result
def template_from_string(basedir, data, vars, fail_on_undefined=False):
''' run a string through the (Jinja2) templating engine '''
try:
if type(data) == str:
data = unicode(data, 'utf-8')
def my_finalize(thing):
return thing if thing is not None else ''
environment = jinja2.Environment(trim_blocks=True, undefined=StrictUndefined, extensions=_get_extensions(), finalize=my_finalize)
environment.filters.update(_get_filters())
environment.template_class = J2Template
if '_original_file' in vars:
basedir = os.path.dirname(vars['_original_file'])
filesdir = os.path.abspath(os.path.join(basedir, '..', 'files'))
if os.path.exists(filesdir):
basedir = filesdir
# 6227
if isinstance(data, unicode):
try:
data = data.decode('utf-8')
except UnicodeEncodeError, e:
pass
try:
t = environment.from_string(data)
except Exception, e:
if 'recursion' in str(e):
raise errors.AnsibleError("recursive loop detected in template string: %s" % data)
else:
return data
def my_lookup(*args, **kwargs):
kwargs['vars'] = vars
return lookup(*args, basedir=basedir, **kwargs)
t.globals['lookup'] = my_lookup
t.globals['finalize'] = my_finalize
jvars =_jinja2_vars(basedir, vars, t.globals, fail_on_undefined)
new_context = t.new_context(jvars, shared=True)
rf = t.root_render_func(new_context)
try:
res = jinja2.utils.concat(rf)
except TypeError, te:
if 'StrictUndefined' in str(te):
raise errors.AnsibleUndefinedVariable(
"Unable to look up a name or access an attribute in template string. " + \
"Make sure your variable name does not contain invalid characters like '-'."
)
else:
raise errors.AnsibleError("an unexpected type error occured. Error was %s" % te)
return res
except (jinja2.exceptions.UndefinedError, errors.AnsibleUndefinedVariable):
if fail_on_undefined:
raise
else:
return data
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_2141_4 |
crossvul-python_data_bad_3766_2 | import urlparse
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, QueryDict
from django.template.response import TemplateResponse
from django.utils.http import base36_to_int
from django.utils.translation import ugettext as _
from django.views.decorators.debug import sensitive_post_parameters
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
# Avoid shadowing the login() and logout() views below.
from django.contrib.auth import REDIRECT_FIELD_NAME, login as auth_login, logout as auth_logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AuthenticationForm, PasswordResetForm, SetPasswordForm, PasswordChangeForm
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.models import get_current_site
@sensitive_post_parameters()
@csrf_protect
@never_cache
def login(request, template_name='registration/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm,
current_app=None, extra_context=None):
"""
Displays the login form and handles the login action.
"""
redirect_to = request.REQUEST.get(redirect_field_name, '')
if request.method == "POST":
form = authentication_form(data=request.POST)
if form.is_valid():
netloc = urlparse.urlparse(redirect_to)[1]
# Use default setting if redirect_to is empty
if not redirect_to:
redirect_to = settings.LOGIN_REDIRECT_URL
# Heavier security check -- don't allow redirection to a different
# host.
elif netloc and netloc != request.get_host():
redirect_to = settings.LOGIN_REDIRECT_URL
# Okay, security checks complete. Log the user in.
auth_login(request, form.get_user())
if request.session.test_cookie_worked():
request.session.delete_test_cookie()
return HttpResponseRedirect(redirect_to)
else:
form = authentication_form(request)
request.session.set_test_cookie()
current_site = get_current_site(request)
context = {
'form': form,
redirect_field_name: redirect_to,
'site': current_site,
'site_name': current_site.name,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
def logout(request, next_page=None,
template_name='registration/logged_out.html',
redirect_field_name=REDIRECT_FIELD_NAME,
current_app=None, extra_context=None):
"""
Logs out the user and displays 'You are logged out' message.
"""
auth_logout(request)
redirect_to = request.REQUEST.get(redirect_field_name, '')
if redirect_to:
netloc = urlparse.urlparse(redirect_to)[1]
# Security check -- don't allow redirection to a different host.
if not (netloc and netloc != request.get_host()):
return HttpResponseRedirect(redirect_to)
if next_page is None:
current_site = get_current_site(request)
context = {
'site': current_site,
'site_name': current_site.name,
'title': _('Logged out')
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
else:
# Redirect to this page until the session has been cleared.
return HttpResponseRedirect(next_page or request.path)
def logout_then_login(request, login_url=None, current_app=None, extra_context=None):
"""
Logs out the user if he is logged in. Then redirects to the log-in page.
"""
if not login_url:
login_url = settings.LOGIN_URL
return logout(request, login_url, current_app=current_app, extra_context=extra_context)
def redirect_to_login(next, login_url=None,
redirect_field_name=REDIRECT_FIELD_NAME):
"""
Redirects the user to the login page, passing the given 'next' page
"""
if not login_url:
login_url = settings.LOGIN_URL
login_url_parts = list(urlparse.urlparse(login_url))
if redirect_field_name:
querystring = QueryDict(login_url_parts[4], mutable=True)
querystring[redirect_field_name] = next
login_url_parts[4] = querystring.urlencode(safe='/')
return HttpResponseRedirect(urlparse.urlunparse(login_url_parts))
# 4 views for password reset:
# - password_reset sends the mail
# - password_reset_done shows a success message for the above
# - password_reset_confirm checks the link the user clicked and
# prompts for a new password
# - password_reset_complete shows a success message for the above
@csrf_protect
def password_reset(request, is_admin_site=False,
template_name='registration/password_reset_form.html',
email_template_name='registration/password_reset_email.html',
subject_template_name='registration/password_reset_subject.txt',
password_reset_form=PasswordResetForm,
token_generator=default_token_generator,
post_reset_redirect=None,
from_email=None,
current_app=None,
extra_context=None):
if post_reset_redirect is None:
post_reset_redirect = reverse('django.contrib.auth.views.password_reset_done')
if request.method == "POST":
form = password_reset_form(request.POST)
if form.is_valid():
opts = {
'use_https': request.is_secure(),
'token_generator': token_generator,
'from_email': from_email,
'email_template_name': email_template_name,
'subject_template_name': subject_template_name,
'request': request,
}
if is_admin_site:
opts = dict(opts, domain_override=request.META['HTTP_HOST'])
form.save(**opts)
return HttpResponseRedirect(post_reset_redirect)
else:
form = password_reset_form()
context = {
'form': form,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
def password_reset_done(request,
template_name='registration/password_reset_done.html',
current_app=None, extra_context=None):
context = {}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
# Doesn't need csrf_protect since no-one can guess the URL
@sensitive_post_parameters()
@never_cache
def password_reset_confirm(request, uidb36=None, token=None,
template_name='registration/password_reset_confirm.html',
token_generator=default_token_generator,
set_password_form=SetPasswordForm,
post_reset_redirect=None,
current_app=None, extra_context=None):
"""
View that checks the hash in a password reset link and presents a
form for entering a new password.
"""
assert uidb36 is not None and token is not None # checked by URLconf
if post_reset_redirect is None:
post_reset_redirect = reverse('django.contrib.auth.views.password_reset_complete')
try:
uid_int = base36_to_int(uidb36)
user = User.objects.get(id=uid_int)
except (ValueError, User.DoesNotExist):
user = None
if user is not None and token_generator.check_token(user, token):
validlink = True
if request.method == 'POST':
form = set_password_form(user, request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(post_reset_redirect)
else:
form = set_password_form(None)
else:
validlink = False
form = None
context = {
'form': form,
'validlink': validlink,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
def password_reset_complete(request,
template_name='registration/password_reset_complete.html',
current_app=None, extra_context=None):
context = {
'login_url': settings.LOGIN_URL
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
@sensitive_post_parameters()
@csrf_protect
@login_required
def password_change(request,
template_name='registration/password_change_form.html',
post_change_redirect=None,
password_change_form=PasswordChangeForm,
current_app=None, extra_context=None):
if post_change_redirect is None:
post_change_redirect = reverse('django.contrib.auth.views.password_change_done')
if request.method == "POST":
form = password_change_form(user=request.user, data=request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(post_change_redirect)
else:
form = password_change_form(user=request.user)
context = {
'form': form,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
@login_required
def password_change_done(request,
template_name='registration/password_change_done.html',
current_app=None, extra_context=None):
context = {}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3766_2 |
crossvul-python_data_bad_802_0 | # -*- coding: utf-8 -*-
# Copyright 2014-2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import socket
import random
import smtplib
import email.utils
import string
import twisted.python.log
import cgi
import urllib
import email.utils
from sydent.util import time_msec
logger = logging.getLogger(__name__)
def sendEmail(sydent, templateName, mailTo, substitutions):
mailFrom = sydent.cfg.get('email', 'email.from')
mailTemplateFile = sydent.cfg.get('email', templateName)
myHostname = sydent.cfg.get('email', 'email.hostname')
if myHostname == '':
myHostname = socket.getfqdn()
midRandom = "".join([random.choice(string.ascii_letters) for _ in range(16)])
messageid = "<%d%s@%s>" % (time_msec(), midRandom, myHostname)
allSubstitutions = {}
allSubstitutions.update(substitutions)
allSubstitutions.update({
'messageid': messageid,
'date': email.utils.formatdate(localtime=False),
'to': mailTo,
'from': mailFrom,
})
for k,v in allSubstitutions.items():
allSubstitutions[k] = v.decode('utf8')
allSubstitutions[k+"_forhtml"] = cgi.escape(v.decode('utf8'))
allSubstitutions[k+"_forurl"] = urllib.quote(v)
mailString = open(mailTemplateFile).read() % allSubstitutions
rawFrom = email.utils.parseaddr(mailFrom)[1]
rawTo = email.utils.parseaddr(mailTo)[1]
if rawFrom == '' or rawTo == '':
logger.info("Couldn't parse from / to address %s / %s", mailFrom, mailTo)
raise EmailAddressException()
mailServer = sydent.cfg.get('email', 'email.smtphost')
mailPort = sydent.cfg.get('email', 'email.smtpport')
mailUsername = sydent.cfg.get('email', 'email.smtpusername')
mailPassword = sydent.cfg.get('email', 'email.smtppassword')
mailTLSMode = sydent.cfg.get('email', 'email.tlsmode')
logger.info("Sending mail to %s with mail server: %s" % (mailTo, mailServer,))
try:
if mailTLSMode == 'SSL' or mailTLSMode == 'TLS':
smtp = smtplib.SMTP_SSL(mailServer, mailPort, myHostname)
elif mailTLSMode == 'STARTTLS':
smtp = smtplib.SMTP(mailServer, mailPort, myHostname)
smtp.starttls()
else:
smtp = smtplib.SMTP(mailServer, mailPort, myHostname)
if mailUsername != '':
smtp.login(mailUsername, mailPassword)
smtp.sendmail(rawFrom, rawTo, mailString.encode('utf-8'))
smtp.quit()
except Exception as origException:
twisted.python.log.err()
ese = EmailSendException()
ese.cause = origException
raise ese
class EmailAddressException(Exception):
pass
class EmailSendException(Exception):
pass
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_802_0 |
crossvul-python_data_bad_2078_3 | #!/usr/bin/env python
"""
f2py2e - Fortran to Python C/API generator. 2nd Edition.
See __usage__ below.
Copyright 1999--2011 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@cens.ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/05/06 08:31:19 $
Pearu Peterson
"""
from __future__ import division, absolute_import, print_function
import sys
import os
import pprint
import re
from . import crackfortran
from . import rules
from . import cb_rules
from . import auxfuncs
from . import cfuncs
from . import f90mod_rules
from . import __version__
f2py_version = __version__.version
errmess = sys.stderr.write
#outmess=sys.stdout.write
show = pprint.pprint
outmess = auxfuncs.outmess
try:
from numpy import __version__ as numpy_version
except ImportError:
numpy_version = 'N/A'
__usage__ = """\
Usage:
1) To construct extension module sources:
f2py [<options>] <fortran files> [[[only:]||[skip:]] \\
<fortran functions> ] \\
[: <fortran files> ...]
2) To compile fortran files and build extension modules:
f2py -c [<options>, <build_flib options>, <extra options>] <fortran files>
3) To generate signature files:
f2py -h <filename.pyf> ...< same options as in (1) >
Description: This program generates a Python C/API file (<modulename>module.c)
that contains wrappers for given fortran functions so that they
can be called from Python. With the -c option the corresponding
extension modules are built.
Options:
--2d-numpy Use numpy.f2py tool with NumPy support. [DEFAULT]
--2d-numeric Use f2py2e tool with Numeric support.
--2d-numarray Use f2py2e tool with Numarray support.
--g3-numpy Use 3rd generation f2py from the separate f2py package.
[NOT AVAILABLE YET]
-h <filename> Write signatures of the fortran routines to file <filename>
and exit. You can then edit <filename> and use it instead
of <fortran files>. If <filename>==stdout then the
signatures are printed to stdout.
<fortran functions> Names of fortran routines for which Python C/API
functions will be generated. Default is all that are found
in <fortran files>.
<fortran files> Paths to fortran/signature files that will be scanned for
<fortran functions> in order to determine their signatures.
skip: Ignore fortran functions that follow until `:'.
only: Use only fortran functions that follow until `:'.
: Get back to <fortran files> mode.
-m <modulename> Name of the module; f2py generates a Python/C API
file <modulename>module.c or extension module <modulename>.
Default is 'untitled'.
--[no-]lower Do [not] lower the cases in <fortran files>. By default,
--lower is assumed with -h key, and --no-lower without -h key.
--build-dir <dirname> All f2py generated files are created in <dirname>.
Default is tempfile.mktemp().
--overwrite-signature Overwrite existing signature file.
--[no-]latex-doc Create (or not) <modulename>module.tex.
Default is --no-latex-doc.
--short-latex Create 'incomplete' LaTeX document (without commands
\\documentclass, \\tableofcontents, and \\begin{document},
\\end{document}).
--[no-]rest-doc Create (or not) <modulename>module.rst.
Default is --no-rest-doc.
--debug-capi Create C/API code that reports the state of the wrappers
during runtime. Useful for debugging.
--[no-]wrap-functions Create Fortran subroutine wrappers to Fortran 77
functions. --wrap-functions is default because it ensures
maximum portability/compiler independence.
--include-paths <path1>:<path2>:... Search include files from the given
directories.
--help-link [..] List system resources found by system_info.py. See also
--link-<resource> switch below. [..] is optional list
of resources names. E.g. try 'f2py --help-link lapack_opt'.
--quiet Run quietly.
--verbose Run with extra verbosity.
-v Print f2py version ID and exit.
numpy.distutils options (only effective with -c):
--fcompiler= Specify Fortran compiler type by vendor
--compiler= Specify C compiler type (as defined by distutils)
--help-fcompiler List available Fortran compilers and exit
--f77exec= Specify the path to F77 compiler
--f90exec= Specify the path to F90 compiler
--f77flags= Specify F77 compiler flags
--f90flags= Specify F90 compiler flags
--opt= Specify optimization flags
--arch= Specify architecture specific optimization flags
--noopt Compile without optimization
--noarch Compile without arch-dependent optimization
--debug Compile with debugging information
Extra options (only effective with -c):
--link-<resource> Link extension module with <resource> as defined
by numpy.distutils/system_info.py. E.g. to link
with optimized LAPACK libraries (vecLib on MacOSX,
ATLAS elsewhere), use --link-lapack_opt.
See also --help-link switch.
-L/path/to/lib/ -l<libname>
-D<define> -U<name>
-I/path/to/include/
<filename>.o <filename>.so <filename>.a
Using the following macros may be required with non-gcc Fortran
compilers:
-DPREPEND_FORTRAN -DNO_APPEND_FORTRAN -DUPPERCASE_FORTRAN
-DUNDERSCORE_G77
When using -DF2PY_REPORT_ATEXIT, a performance report of F2PY
interface is printed out at exit (platforms: Linux).
When using -DF2PY_REPORT_ON_ARRAY_COPY=<int>, a message is
sent to stderr whenever F2PY interface makes a copy of an
array. Integer <int> sets the threshold for array sizes when
a message should be shown.
Version: %s
numpy Version: %s
Requires: Python 2.3 or higher.
License: NumPy license (see LICENSE.txt in the NumPy source code)
Copyright 1999 - 2011 Pearu Peterson all rights reserved.
http://cens.ioc.ee/projects/f2py2e/"""%(f2py_version, numpy_version)
def scaninputline(inputline):
files, funcs, skipfuncs, onlyfuncs, debug=[], [], [], [], []
f, f2, f3, f4, f5, f6, f7, f8, f9=1, 0, 0, 0, 0, 0, 0, 0, 0
verbose = 1
dolc=-1
dolatexdoc = 0
dorestdoc = 0
wrapfuncs = 1
buildpath = '.'
include_paths = []
signsfile, modulename=None, None
options = {'buildpath':buildpath,
'coutput': None,
'f2py_wrapper_output': None}
for l in inputline:
if l=='': pass
elif l=='only:': f=0
elif l=='skip:': f=-1
elif l==':': f=1;f4=0
elif l[:8]=='--debug-': debug.append(l[8:])
elif l=='--lower': dolc=1
elif l=='--build-dir': f6=1
elif l=='--no-lower': dolc=0
elif l=='--quiet': verbose = 0
elif l=='--verbose': verbose += 1
elif l=='--latex-doc': dolatexdoc=1
elif l=='--no-latex-doc': dolatexdoc=0
elif l=='--rest-doc': dorestdoc=1
elif l=='--no-rest-doc': dorestdoc=0
elif l=='--wrap-functions': wrapfuncs=1
elif l=='--no-wrap-functions': wrapfuncs=0
elif l=='--short-latex': options['shortlatex']=1
elif l=='--coutput': f8=1
elif l=='--f2py-wrapper-output': f9=1
elif l=='--overwrite-signature': options['h-overwrite']=1
elif l=='-h': f2=1
elif l=='-m': f3=1
elif l[:2]=='-v':
print(f2py_version)
sys.exit()
elif l=='--show-compilers':
f5=1
elif l[:8]=='-include':
cfuncs.outneeds['userincludes'].append(l[9:-1])
cfuncs.userincludes[l[9:-1]]='#include '+l[8:]
elif l[:15] in '--include_paths':
outmess('f2py option --include_paths is deprecated, use --include-paths instead.\n')
f7=1
elif l[:15] in '--include-paths':
f7=1
elif l[0]=='-':
errmess('Unknown option %s\n'%repr(l))
sys.exit()
elif f2: f2=0;signsfile=l
elif f3: f3=0;modulename=l
elif f6: f6=0;buildpath=l
elif f7: f7=0;include_paths.extend(l.split(os.pathsep))
elif f8: f8=0;options["coutput"]=l
elif f9: f9=0;options["f2py_wrapper_output"]=l
elif f==1:
try:
open(l).close()
files.append(l)
except IOError as detail:
errmess('IOError: %s. Skipping file "%s".\n'%(str(detail), l))
elif f==-1: skipfuncs.append(l)
elif f==0: onlyfuncs.append(l)
if not f5 and not files and not modulename:
print(__usage__)
sys.exit()
if not os.path.isdir(buildpath):
if not verbose:
outmess('Creating build directory %s'%(buildpath))
os.mkdir(buildpath)
if signsfile:
signsfile = os.path.join(buildpath, signsfile)
if signsfile and os.path.isfile(signsfile) and 'h-overwrite' not in options:
errmess('Signature file "%s" exists!!! Use --overwrite-signature to overwrite.\n'%(signsfile))
sys.exit()
options['debug']=debug
options['verbose']=verbose
if dolc==-1 and not signsfile: options['do-lower']=0
else: options['do-lower']=dolc
if modulename: options['module']=modulename
if signsfile: options['signsfile']=signsfile
if onlyfuncs: options['onlyfuncs']=onlyfuncs
if skipfuncs: options['skipfuncs']=skipfuncs
options['dolatexdoc'] = dolatexdoc
options['dorestdoc'] = dorestdoc
options['wrapfuncs'] = wrapfuncs
options['buildpath']=buildpath
options['include_paths']=include_paths
return files, options
def callcrackfortran(files, options):
rules.options=options
funcs=[]
crackfortran.debug=options['debug']
crackfortran.verbose=options['verbose']
if 'module' in options:
crackfortran.f77modulename=options['module']
if 'skipfuncs' in options:
crackfortran.skipfuncs=options['skipfuncs']
if 'onlyfuncs' in options:
crackfortran.onlyfuncs=options['onlyfuncs']
crackfortran.include_paths[:]=options['include_paths']
crackfortran.dolowercase=options['do-lower']
postlist=crackfortran.crackfortran(files)
if 'signsfile' in options:
outmess('Saving signatures to file "%s"\n'%(options['signsfile']))
pyf=crackfortran.crack2fortran(postlist)
if options['signsfile'][-6:]=='stdout':
sys.stdout.write(pyf)
else:
f=open(options['signsfile'], 'w')
f.write(pyf)
f.close()
if options["coutput"] is None:
for mod in postlist:
mod["coutput"] = "%smodule.c" % mod["name"]
else:
for mod in postlist:
mod["coutput"] = options["coutput"]
if options["f2py_wrapper_output"] is None:
for mod in postlist:
mod["f2py_wrapper_output"] = "%s-f2pywrappers.f" % mod["name"]
else:
for mod in postlist:
mod["f2py_wrapper_output"] = options["f2py_wrapper_output"]
return postlist
def buildmodules(lst):
cfuncs.buildcfuncs()
outmess('Building modules...\n')
modules, mnames, isusedby=[], [], {}
for i in range(len(lst)):
if '__user__' in lst[i]['name']:
cb_rules.buildcallbacks(lst[i])
else:
if 'use' in lst[i]:
for u in lst[i]['use'].keys():
if u not in isusedby:
isusedby[u]=[]
isusedby[u].append(lst[i]['name'])
modules.append(lst[i])
mnames.append(lst[i]['name'])
ret = {}
for i in range(len(mnames)):
if mnames[i] in isusedby:
outmess('\tSkipping module "%s" which is used by %s.\n'%(mnames[i], ','.join(['"%s"'%s for s in isusedby[mnames[i]]])))
else:
um=[]
if 'use' in modules[i]:
for u in modules[i]['use'].keys():
if u in isusedby and u in mnames:
um.append(modules[mnames.index(u)])
else:
outmess('\tModule "%s" uses nonexisting "%s" which will be ignored.\n'%(mnames[i], u))
ret[mnames[i]] = {}
dict_append(ret[mnames[i]], rules.buildmodule(modules[i], um))
return ret
def dict_append(d_out, d_in):
for (k, v) in d_in.items():
if k not in d_out:
d_out[k] = []
if isinstance(v, list):
d_out[k] = d_out[k] + v
else:
d_out[k].append(v)
def run_main(comline_list):
"""Run f2py as if string.join(comline_list,' ') is used as a command line.
In case of using -h flag, return None.
"""
crackfortran.reset_global_f2py_vars()
f2pydir=os.path.dirname(os.path.abspath(cfuncs.__file__))
fobjhsrc = os.path.join(f2pydir, 'src', 'fortranobject.h')
fobjcsrc = os.path.join(f2pydir, 'src', 'fortranobject.c')
files, options=scaninputline(comline_list)
auxfuncs.options=options
postlist=callcrackfortran(files, options)
isusedby={}
for i in range(len(postlist)):
if 'use' in postlist[i]:
for u in postlist[i]['use'].keys():
if u not in isusedby:
isusedby[u]=[]
isusedby[u].append(postlist[i]['name'])
for i in range(len(postlist)):
if postlist[i]['block']=='python module' and '__user__' in postlist[i]['name']:
if postlist[i]['name'] in isusedby:
#if not quiet:
outmess('Skipping Makefile build for module "%s" which is used by %s\n'%(postlist[i]['name'], ','.join(['"%s"'%s for s in isusedby[postlist[i]['name']]])))
if 'signsfile' in options:
if options['verbose']>1:
outmess('Stopping. Edit the signature file and then run f2py on the signature file: ')
outmess('%s %s\n'%(os.path.basename(sys.argv[0]), options['signsfile']))
return
for i in range(len(postlist)):
if postlist[i]['block']!='python module':
if 'python module' not in options:
errmess('Tip: If your original code is Fortran source then you must use -m option.\n')
raise TypeError('All blocks must be python module blocks but got %s'%(repr(postlist[i]['block'])))
auxfuncs.debugoptions=options['debug']
f90mod_rules.options=options
auxfuncs.wrapfuncs=options['wrapfuncs']
ret=buildmodules(postlist)
for mn in ret.keys():
dict_append(ret[mn], {'csrc':fobjcsrc,'h':fobjhsrc})
return ret
def filter_files(prefix,suffix,files,remove_prefix=None):
"""
Filter files by prefix and suffix.
"""
filtered, rest = [], []
match = re.compile(prefix+r'.*'+suffix+r'\Z').match
if remove_prefix:
ind = len(prefix)
else:
ind = 0
for file in [x.strip() for x in files]:
if match(file): filtered.append(file[ind:])
else: rest.append(file)
return filtered, rest
def get_prefix(module):
p = os.path.dirname(os.path.dirname(module.__file__))
return p
def run_compile():
"""
Do it all in one call!
"""
import tempfile
i = sys.argv.index('-c')
del sys.argv[i]
remove_build_dir = 0
try: i = sys.argv.index('--build-dir')
except ValueError: i=None
if i is not None:
build_dir = sys.argv[i+1]
del sys.argv[i+1]
del sys.argv[i]
else:
remove_build_dir = 1
build_dir = os.path.join(tempfile.mktemp())
_reg1 = re.compile(r'[-][-]link[-]')
sysinfo_flags = [_m for _m in sys.argv[1:] if _reg1.match(_m)]
sys.argv = [_m for _m in sys.argv if _m not in sysinfo_flags]
if sysinfo_flags:
sysinfo_flags = [f[7:] for f in sysinfo_flags]
_reg2 = re.compile(r'[-][-]((no[-]|)(wrap[-]functions|lower)|debug[-]capi|quiet)|[-]include')
f2py_flags = [_m for _m in sys.argv[1:] if _reg2.match(_m)]
sys.argv = [_m for _m in sys.argv if _m not in f2py_flags]
f2py_flags2 = []
fl = 0
for a in sys.argv[1:]:
if a in ['only:', 'skip:']:
fl = 1
elif a==':':
fl = 0
if fl or a==':':
f2py_flags2.append(a)
if f2py_flags2 and f2py_flags2[-1]!=':':
f2py_flags2.append(':')
f2py_flags.extend(f2py_flags2)
sys.argv = [_m for _m in sys.argv if _m not in f2py_flags2]
_reg3 = re.compile(r'[-][-]((f(90)?compiler([-]exec|)|compiler)=|help[-]compiler)')
flib_flags = [_m for _m in sys.argv[1:] if _reg3.match(_m)]
sys.argv = [_m for _m in sys.argv if _m not in flib_flags]
_reg4 = re.compile(r'[-][-]((f(77|90)(flags|exec)|opt|arch)=|(debug|noopt|noarch|help[-]fcompiler))')
fc_flags = [_m for _m in sys.argv[1:] if _reg4.match(_m)]
sys.argv = [_m for _m in sys.argv if _m not in fc_flags]
if 1:
del_list = []
for s in flib_flags:
v = '--fcompiler='
if s[:len(v)]==v:
from numpy.distutils import fcompiler
fcompiler.load_all_fcompiler_classes()
allowed_keys = list(fcompiler.fcompiler_class.keys())
nv = ov = s[len(v):].lower()
if ov not in allowed_keys:
vmap = {} # XXX
try:
nv = vmap[ov]
except KeyError:
if ov not in vmap.values():
print('Unknown vendor: "%s"' % (s[len(v):]))
nv = ov
i = flib_flags.index(s)
flib_flags[i] = '--fcompiler=' + nv
continue
for s in del_list:
i = flib_flags.index(s)
del flib_flags[i]
assert len(flib_flags)<=2, repr(flib_flags)
_reg5 = re.compile(r'[-][-](verbose)')
setup_flags = [_m for _m in sys.argv[1:] if _reg5.match(_m)]
sys.argv = [_m for _m in sys.argv if _m not in setup_flags]
if '--quiet' in f2py_flags:
setup_flags.append('--quiet')
modulename = 'untitled'
sources = sys.argv[1:]
for optname in ['--include_paths', '--include-paths']:
if optname in sys.argv:
i = sys.argv.index (optname)
f2py_flags.extend (sys.argv[i:i+2])
del sys.argv[i+1], sys.argv[i]
sources = sys.argv[1:]
if '-m' in sys.argv:
i = sys.argv.index('-m')
modulename = sys.argv[i+1]
del sys.argv[i+1], sys.argv[i]
sources = sys.argv[1:]
else:
from numpy.distutils.command.build_src import get_f2py_modulename
pyf_files, sources = filter_files('', '[.]pyf([.]src|)', sources)
sources = pyf_files + sources
for f in pyf_files:
modulename = get_f2py_modulename(f)
if modulename:
break
extra_objects, sources = filter_files('', '[.](o|a|so)', sources)
include_dirs, sources = filter_files('-I', '', sources, remove_prefix=1)
library_dirs, sources = filter_files('-L', '', sources, remove_prefix=1)
libraries, sources = filter_files('-l', '', sources, remove_prefix=1)
undef_macros, sources = filter_files('-U', '', sources, remove_prefix=1)
define_macros, sources = filter_files('-D', '', sources, remove_prefix=1)
using_numarray = 0
using_numeric = 0
for i in range(len(define_macros)):
name_value = define_macros[i].split('=', 1)
if len(name_value)==1:
name_value.append(None)
if len(name_value)==2:
define_macros[i] = tuple(name_value)
else:
print('Invalid use of -D:', name_value)
from numpy.distutils.system_info import get_info
num_include_dir = None
num_info = {}
#import numpy
#n = 'numpy'
#p = get_prefix(numpy)
#from numpy.distutils.misc_util import get_numpy_include_dirs
#num_info = {'include_dirs': get_numpy_include_dirs()}
if num_info:
include_dirs.extend(num_info.get('include_dirs', []))
from numpy.distutils.core import setup, Extension
ext_args = {'name': modulename, 'sources': sources,
'include_dirs': include_dirs,
'library_dirs': library_dirs,
'libraries': libraries,
'define_macros': define_macros,
'undef_macros': undef_macros,
'extra_objects': extra_objects,
'f2py_options': f2py_flags,
}
if sysinfo_flags:
from numpy.distutils.misc_util import dict_append
for n in sysinfo_flags:
i = get_info(n)
if not i:
outmess('No %s resources found in system'\
' (try `f2py --help-link`)\n' % (repr(n)))
dict_append(ext_args,**i)
ext = Extension(**ext_args)
sys.argv = [sys.argv[0]] + setup_flags
sys.argv.extend(['build',
'--build-temp', build_dir,
'--build-base', build_dir,
'--build-platlib', '.'])
if fc_flags:
sys.argv.extend(['config_fc']+fc_flags)
if flib_flags:
sys.argv.extend(['build_ext']+flib_flags)
setup(ext_modules = [ext])
if remove_build_dir and os.path.exists(build_dir):
import shutil
outmess('Removing build directory %s\n'%(build_dir))
shutil.rmtree(build_dir)
def main():
if '--help-link' in sys.argv[1:]:
sys.argv.remove('--help-link')
from numpy.distutils.system_info import show_all
show_all()
return
if '-c' in sys.argv[1:]:
run_compile()
else:
run_main(sys.argv[1:])
#if __name__ == "__main__":
# main()
# EOF
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_2078_3 |
crossvul-python_data_bad_3499_4 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3499_4 |
crossvul-python_data_good_50_7 | # -*- coding: utf-8 -*-
#
# (c) Cornelius Kölbel
# License: AGPLv3
# contact: http://www.privacyidea.org
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
import functools
from privacyidea.lib.error import TokenAdminError
from privacyidea.lib.error import ParameterError
from privacyidea.lib import _
log = logging.getLogger(__name__)
def check_token_locked(func):
"""
Decorator to check if a token is locked or not.
The decorator is to be used in token class methods.
It can be used to avoid performing an action on a locked token.
If the token is locked, a TokenAdminError is raised.
"""
@functools.wraps(func)
def token_locked_wrapper(*args, **kwds):
# The token object
token = args[0]
if token.is_locked():
raise TokenAdminError(_("This action is not possible, since the "
"token is locked"), id=1007)
f_result = func(*args, **kwds)
return f_result
return token_locked_wrapper
def check_user_or_serial(func):
"""
Decorator to check user and serial at the beginning of a function
The wrapper will check the parameters user and serial and verify that
not both parameters are None. Otherwise it will throw an exception
ParameterError.
"""
@functools.wraps(func)
def user_or_serial_wrapper(*args, **kwds):
# If there is no user and serial keyword parameter and if
# there is no normal argument, we do not have enough information
serial = kwds.get("serial")
user = kwds.get("user")
# We have no serial! The serial would be the first arg
if (serial is None and (len(args) == 0 or args[0] is None) and
(user is None or (user is not None and user.is_empty()))):
# We either have an empty User object or None
raise ParameterError(ParameterError.USER_OR_SERIAL)
f_result = func(*args, **kwds)
return f_result
return user_or_serial_wrapper
class check_user_or_serial_in_request(object):
"""
Decorator to check user and serial in a request.
If the request does not contain a serial number (serial) or a user
(user) it will throw a ParameterError.
"""
def __init__(self, request):
self.request = request
def __call__(self, func):
@functools.wraps(func)
def check_user_or_serial_in_request_wrapper(*args, **kwds):
user = self.request.all_data.get("user", "").strip()
serial = self.request.all_data.get("serial", "").strip()
if not serial and not user:
raise ParameterError(_("You need to specify a serial or a user."))
if "*" in serial:
raise ParameterError(_("Invalid serial number."))
if "%" in user:
raise ParameterError(_("Invalid user."))
f_result = func(*args, **kwds)
return f_result
return check_user_or_serial_in_request_wrapper
def check_copy_serials(func):
"""
Decorator to check if the serial_from and serial_to exist.
If the serials are not unique, we raise an error
"""
from privacyidea.lib.token import get_tokens
@functools.wraps(func)
def check_serial_wrapper(*args, **kwds):
tokenobject_list_from = get_tokens(serial=args[0])
tokenobject_list_to = get_tokens(serial=args[1])
if len(tokenobject_list_from) != 1:
log.error("not a unique token to copy from found")
raise(TokenAdminError("No unique token to copy from found",
id=1016))
if len(tokenobject_list_to) != 1:
log.error("not a unique token to copy to found")
raise(TokenAdminError("No unique token to copy to found",
id=1017))
f_result = func(*args, **kwds)
return f_result
return check_serial_wrapper
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_50_7 |
crossvul-python_data_good_3766_0 | from django.conf.urls import patterns, url
from django.contrib.auth import context_processors
from django.contrib.auth.urls import urlpatterns
from django.contrib.auth.views import password_reset
from django.contrib.auth.decorators import login_required
from django.contrib.messages.api import info
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import Template, RequestContext
from django.views.decorators.cache import never_cache
@never_cache
def remote_user_auth_view(request):
"Dummy view for remote user tests"
t = Template("Username is {{ user }}.")
c = RequestContext(request, {})
return HttpResponse(t.render(c))
def auth_processor_no_attr_access(request):
r1 = render_to_response('context_processors/auth_attrs_no_access.html',
RequestContext(request, {}, processors=[context_processors.auth]))
# *After* rendering, we check whether the session was accessed
return render_to_response('context_processors/auth_attrs_test_access.html',
{'session_accessed':request.session.accessed})
def auth_processor_attr_access(request):
r1 = render_to_response('context_processors/auth_attrs_access.html',
RequestContext(request, {}, processors=[context_processors.auth]))
return render_to_response('context_processors/auth_attrs_test_access.html',
{'session_accessed':request.session.accessed})
def auth_processor_user(request):
return render_to_response('context_processors/auth_attrs_user.html',
RequestContext(request, {}, processors=[context_processors.auth]))
def auth_processor_perms(request):
return render_to_response('context_processors/auth_attrs_perms.html',
RequestContext(request, {}, processors=[context_processors.auth]))
def auth_processor_messages(request):
info(request, "Message 1")
return render_to_response('context_processors/auth_attrs_messages.html',
RequestContext(request, {}, processors=[context_processors.auth]))
def userpage(request):
pass
# special urls for auth test cases
urlpatterns = urlpatterns + patterns('',
(r'^logout/custom_query/$', 'django.contrib.auth.views.logout', dict(redirect_field_name='follow')),
(r'^logout/next_page/$', 'django.contrib.auth.views.logout', dict(next_page='/somewhere/')),
(r'^remote_user/$', remote_user_auth_view),
(r'^password_reset_from_email/$', 'django.contrib.auth.views.password_reset', dict(from_email='staffmember@example.com')),
(r'^admin_password_reset/$', 'django.contrib.auth.views.password_reset', dict(is_admin_site=True)),
(r'^login_required/$', login_required(password_reset)),
(r'^login_required_login_url/$', login_required(password_reset, login_url='/somewhere/')),
(r'^auth_processor_no_attr_access/$', auth_processor_no_attr_access),
(r'^auth_processor_attr_access/$', auth_processor_attr_access),
(r'^auth_processor_user/$', auth_processor_user),
(r'^auth_processor_perms/$', auth_processor_perms),
(r'^auth_processor_messages/$', auth_processor_messages),
url(r'^userpage/(.+)/$', userpage, name="userpage"),
)
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3766_0 |
crossvul-python_data_bad_872_3 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_872_3 |
crossvul-python_data_bad_3499_3 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3499_3 |
crossvul-python_data_good_3500_2 | '''This module manages ssh key files for bcfg2'''
__revision__ = '$Revision$'
import binascii
import os
import socket
import shutil
import sys
import tempfile
from subprocess import Popen, PIPE
import Bcfg2.Server.Plugin
class SSHbase(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Generator,
Bcfg2.Server.Plugin.DirectoryBacked,
Bcfg2.Server.Plugin.PullTarget):
"""
The sshbase generator manages ssh host keys (both v1 and v2)
for hosts. It also manages the ssh_known_hosts file. It can
integrate host keys from other management domains and similarly
export its keys. The repository contains files in the following
formats:
ssh_host_key.H_(hostname) -> the v1 host private key for
(hostname)
ssh_host_key.pub.H_(hostname) -> the v1 host public key
for (hostname)
ssh_host_(dr)sa_key.H_(hostname) -> the v2 ssh host
private key for (hostname)
ssh_host_(dr)sa_key.pub.H_(hostname) -> the v2 ssh host
public key for (hostname)
ssh_known_hosts -> the current known hosts file. this
is regenerated each time a new key is generated.
"""
name = 'SSHbase'
__version__ = '$Id$'
__author__ = 'bcfg-dev@mcs.anl.gov'
pubkeys = ["ssh_host_dsa_key.pub.H_%s",
"ssh_host_rsa_key.pub.H_%s",
"ssh_host_key.pub.H_%s"]
hostkeys = ["ssh_host_dsa_key.H_%s",
"ssh_host_rsa_key.H_%s",
"ssh_host_key.H_%s"]
keypatterns = ["ssh_host_dsa_key",
"ssh_host_rsa_key",
"ssh_host_key",
"ssh_host_dsa_key.pub",
"ssh_host_rsa_key.pub",
"ssh_host_key.pub"]
def __init__(self, core, datastore):
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Generator.__init__(self)
Bcfg2.Server.Plugin.PullTarget.__init__(self)
try:
Bcfg2.Server.Plugin.DirectoryBacked.__init__(self, self.data,
self.core.fam)
except OSError:
ioerr = sys.exc_info()[1]
self.logger.error("Failed to load SSHbase repository from %s" \
% (self.data))
self.logger.error(ioerr)
raise Bcfg2.Server.Plugin.PluginInitError
self.Entries = {'Path':
{'/etc/ssh/ssh_known_hosts': self.build_skn,
'/etc/ssh/ssh_host_dsa_key': self.build_hk,
'/etc/ssh/ssh_host_rsa_key': self.build_hk,
'/etc/ssh/ssh_host_dsa_key.pub': self.build_hk,
'/etc/ssh/ssh_host_rsa_key.pub': self.build_hk,
'/etc/ssh/ssh_host_key': self.build_hk,
'/etc/ssh/ssh_host_key.pub': self.build_hk}}
self.ipcache = {}
self.namecache = {}
self.__skn = False
def get_skn(self):
"""Build memory cache of the ssh known hosts file."""
if not self.__skn:
self.__skn = "\n".join([value.data.decode() for key, value in \
list(self.entries.items()) if \
key.endswith('.static')])
names = dict()
# if no metadata is registered yet, defer
if len(self.core.metadata.query.all()) == 0:
self.__skn = False
return self.__skn
for cmeta in self.core.metadata.query.all():
names[cmeta.hostname] = set([cmeta.hostname])
names[cmeta.hostname].update(cmeta.aliases)
newnames = set()
newips = set()
for name in names[cmeta.hostname]:
newnames.add(name.split('.')[0])
try:
newips.add(self.get_ipcache_entry(name)[0])
except:
continue
names[cmeta.hostname].update(newnames)
names[cmeta.hostname].update(cmeta.addresses)
names[cmeta.hostname].update(newips)
# TODO: Only perform reverse lookups on IPs if an option is set.
if True:
for ip in newips:
try:
names[cmeta.hostname].update(self.get_namecache_entry(ip))
except:
continue
names[cmeta.hostname] = sorted(names[cmeta.hostname])
# now we have our name cache
pubkeys = [pubk for pubk in list(self.entries.keys()) \
if pubk.find('.pub.H_') != -1]
pubkeys.sort()
badnames = set()
for pubkey in pubkeys:
hostname = pubkey.split('H_')[1]
if hostname not in names:
if hostname not in badnames:
badnames.add(hostname)
self.logger.error("SSHbase: Unknown host %s; ignoring public keys" % hostname)
continue
self.__skn += "%s %s" % (','.join(names[hostname]),
self.entries[pubkey].data.decode())
return self.__skn
def set_skn(self, value):
"""Set backing data for skn."""
self.__skn = value
skn = property(get_skn, set_skn)
def HandleEvent(self, event=None):
"""Local event handler that does skn regen on pubkey change."""
Bcfg2.Server.Plugin.DirectoryBacked.HandleEvent(self, event)
if event and '_key.pub.H_' in event.filename:
self.skn = False
if event and event.filename.endswith('.static'):
self.skn = False
if not self.__skn:
if (len(list(self.entries.keys()))) >= (len(os.listdir(self.data)) - 1):
_ = self.skn
def HandlesEntry(self, entry, _):
"""Handle key entries dynamically."""
return entry.tag == 'Path' and \
([fpat for fpat in self.keypatterns
if entry.get('name').endswith(fpat)]
or entry.get('name').endswith('ssh_known_hosts'))
def HandleEntry(self, entry, metadata):
"""Bind data."""
if entry.get('name').endswith('ssh_known_hosts'):
return self.build_skn(entry, metadata)
else:
return self.build_hk(entry, metadata)
def get_ipcache_entry(self, client):
"""Build a cache of dns results."""
if client in self.ipcache:
if self.ipcache[client]:
return self.ipcache[client]
else:
raise socket.gaierror
else:
# need to add entry
try:
ipaddr = socket.gethostbyname(client)
self.ipcache[client] = (ipaddr, client)
return (ipaddr, client)
except socket.gaierror:
ipaddr = Popen(["getent", "hosts", client],
stdout=PIPE).stdout.read().strip().split()
if ipaddr:
self.ipcache[client] = (ipaddr, client)
return (ipaddr, client)
self.ipcache[client] = False
self.logger.error("Failed to find IP address for %s" % client)
raise socket.gaierror
def get_namecache_entry(self, cip):
"""Build a cache of name lookups from client IP addresses."""
if cip in self.namecache:
# lookup cached name from IP
if self.namecache[cip]:
return self.namecache[cip]
else:
raise socket.gaierror
else:
# add an entry that has not been cached
try:
rvlookup = socket.gethostbyaddr(cip)
if rvlookup[0]:
self.namecache[cip] = [rvlookup[0]]
else:
self.namecache[cip] = []
self.namecache[cip].extend(rvlookup[1])
return self.namecache[cip]
except socket.gaierror:
self.namecache[cip] = False
self.logger.error("Failed to find any names associated with IP address %s" % cip)
raise
def build_skn(self, entry, metadata):
"""This function builds builds a host specific known_hosts file."""
client = metadata.hostname
entry.text = self.skn
hostkeys = [keytmpl % client for keytmpl in self.pubkeys \
if (keytmpl % client) in self.entries]
hostkeys.sort()
for hostkey in hostkeys:
entry.text += "localhost,localhost.localdomain,127.0.0.1 %s" % (
self.entries[hostkey].data.decode())
permdata = {'owner': 'root',
'group': 'root',
'type': 'file',
'perms': '0644'}
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
def build_hk(self, entry, metadata):
"""This binds host key data into entries."""
client = metadata.hostname
filename = "%s.H_%s" % (entry.get('name').split('/')[-1], client)
if filename not in list(self.entries.keys()):
self.GenerateHostKeys(client)
if not filename in self.entries:
self.logger.error("%s still not registered" % filename)
raise Bcfg2.Server.Plugin.PluginExecutionError
keydata = self.entries[filename].data
permdata = {'owner': 'root',
'group': 'root',
'type': 'file'}
if entry.get('name')[-4:] == '.pub':
permdata['perms'] = '0644'
else:
permdata['perms'] = '0600'
permdata['sensitive'] = 'true'
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
if "ssh_host_key.H_" == filename[:15]:
entry.attrib['encoding'] = 'base64'
entry.text = binascii.b2a_base64(keydata)
else:
entry.text = keydata
def GenerateHostKeys(self, client):
"""Generate new host keys for client."""
keylist = [keytmpl % client for keytmpl in self.hostkeys]
for hostkey in keylist:
if 'ssh_host_rsa_key.H_' == hostkey[:19]:
keytype = 'rsa'
elif 'ssh_host_dsa_key.H_' == hostkey[:19]:
keytype = 'dsa'
else:
keytype = 'rsa1'
if hostkey not in list(self.entries.keys()):
fileloc = "%s/%s" % (self.data, hostkey)
publoc = self.data + '/' + ".".join([hostkey.split('.')[0],
'pub',
"H_%s" % client])
tempdir = tempfile.mkdtemp()
temploc = "%s/%s" % (tempdir, hostkey)
cmd = 'ssh-keygen -q -f %s -N "" -t %s -C root@%s < /dev/null'
os.system(cmd % (temploc, keytype, client))
shutil.copy(temploc, fileloc)
shutil.copy("%s.pub" % temploc, publoc)
self.AddEntry(hostkey)
self.AddEntry(".".join([hostkey.split('.')[0]] + ['pub', "H_%s" \
% client]))
try:
os.unlink(temploc)
os.unlink("%s.pub" % temploc)
os.rmdir(tempdir)
except OSError:
self.logger.error("Failed to unlink temporary ssh keys")
def AcceptChoices(self, _, metadata):
return [Bcfg2.Server.Plugin.Specificity(hostname=metadata.hostname)]
def AcceptPullData(self, specific, entry, log):
"""Per-plugin bcfg2-admin pull support."""
# specific will always be host specific
filename = "%s/%s.H_%s" % (self.data, entry['name'].split('/')[-1],
specific.hostname)
try:
open(filename, 'w').write(entry['text'])
if log:
print("Wrote file %s" % filename)
except KeyError:
self.logger.error("Failed to pull %s. This file does not currently "
"exist on the client" % entry.get('name'))
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3500_2 |
crossvul-python_data_good_3767_2 | try:
from urllib.parse import urlparse, urlunparse
except ImportError: # Python 2
from urlparse import urlparse, urlunparse
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, QueryDict
from django.template.response import TemplateResponse
from django.utils.http import base36_to_int
from django.utils.translation import ugettext as _
from django.shortcuts import resolve_url
from django.views.decorators.debug import sensitive_post_parameters
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
# Avoid shadowing the login() and logout() views below.
from django.contrib.auth import REDIRECT_FIELD_NAME, login as auth_login, logout as auth_logout, get_user_model
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AuthenticationForm, PasswordResetForm, SetPasswordForm, PasswordChangeForm
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.models import get_current_site
@sensitive_post_parameters()
@csrf_protect
@never_cache
def login(request, template_name='registration/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm,
current_app=None, extra_context=None):
"""
Displays the login form and handles the login action.
"""
redirect_to = request.REQUEST.get(redirect_field_name, '')
if request.method == "POST":
form = authentication_form(data=request.POST)
if form.is_valid():
# Use default setting if redirect_to is empty
if not redirect_to:
redirect_to = settings.LOGIN_REDIRECT_URL
redirect_to = resolve_url(redirect_to)
netloc = urlparse(redirect_to)[1]
# Heavier security check -- don't allow redirection to a different
# host.
if netloc and netloc != request.get_host():
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
# Okay, security checks complete. Log the user in.
auth_login(request, form.get_user())
if request.session.test_cookie_worked():
request.session.delete_test_cookie()
return HttpResponseRedirect(redirect_to)
else:
form = authentication_form(request)
request.session.set_test_cookie()
current_site = get_current_site(request)
context = {
'form': form,
redirect_field_name: redirect_to,
'site': current_site,
'site_name': current_site.name,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
def logout(request, next_page=None,
template_name='registration/logged_out.html',
redirect_field_name=REDIRECT_FIELD_NAME,
current_app=None, extra_context=None):
"""
Logs out the user and displays 'You are logged out' message.
"""
auth_logout(request)
redirect_to = request.REQUEST.get(redirect_field_name, '')
if redirect_to:
netloc = urlparse(redirect_to)[1]
# Security check -- don't allow redirection to a different host.
if not (netloc and netloc != request.get_host()):
return HttpResponseRedirect(redirect_to)
if next_page is None:
current_site = get_current_site(request)
context = {
'site': current_site,
'site_name': current_site.name,
'title': _('Logged out')
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
else:
# Redirect to this page until the session has been cleared.
return HttpResponseRedirect(next_page or request.path)
def logout_then_login(request, login_url=None, current_app=None, extra_context=None):
"""
Logs out the user if he is logged in. Then redirects to the log-in page.
"""
if not login_url:
login_url = settings.LOGIN_URL
login_url = resolve_url(login_url)
return logout(request, login_url, current_app=current_app, extra_context=extra_context)
def redirect_to_login(next, login_url=None,
redirect_field_name=REDIRECT_FIELD_NAME):
"""
Redirects the user to the login page, passing the given 'next' page
"""
resolved_url = resolve_url(login_url or settings.LOGIN_URL)
login_url_parts = list(urlparse(resolved_url))
if redirect_field_name:
querystring = QueryDict(login_url_parts[4], mutable=True)
querystring[redirect_field_name] = next
login_url_parts[4] = querystring.urlencode(safe='/')
return HttpResponseRedirect(urlunparse(login_url_parts))
# 4 views for password reset:
# - password_reset sends the mail
# - password_reset_done shows a success message for the above
# - password_reset_confirm checks the link the user clicked and
# prompts for a new password
# - password_reset_complete shows a success message for the above
@csrf_protect
def password_reset(request, is_admin_site=False,
template_name='registration/password_reset_form.html',
email_template_name='registration/password_reset_email.html',
subject_template_name='registration/password_reset_subject.txt',
password_reset_form=PasswordResetForm,
token_generator=default_token_generator,
post_reset_redirect=None,
from_email=None,
current_app=None,
extra_context=None):
if post_reset_redirect is None:
post_reset_redirect = reverse('django.contrib.auth.views.password_reset_done')
if request.method == "POST":
form = password_reset_form(request.POST)
if form.is_valid():
opts = {
'use_https': request.is_secure(),
'token_generator': token_generator,
'from_email': from_email,
'email_template_name': email_template_name,
'subject_template_name': subject_template_name,
'request': request,
}
if is_admin_site:
opts = dict(opts, domain_override=request.get_host())
form.save(**opts)
return HttpResponseRedirect(post_reset_redirect)
else:
form = password_reset_form()
context = {
'form': form,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
def password_reset_done(request,
template_name='registration/password_reset_done.html',
current_app=None, extra_context=None):
context = {}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
# Doesn't need csrf_protect since no-one can guess the URL
@sensitive_post_parameters()
@never_cache
def password_reset_confirm(request, uidb36=None, token=None,
template_name='registration/password_reset_confirm.html',
token_generator=default_token_generator,
set_password_form=SetPasswordForm,
post_reset_redirect=None,
current_app=None, extra_context=None):
"""
View that checks the hash in a password reset link and presents a
form for entering a new password.
"""
UserModel = get_user_model()
assert uidb36 is not None and token is not None # checked by URLconf
if post_reset_redirect is None:
post_reset_redirect = reverse('django.contrib.auth.views.password_reset_complete')
try:
uid_int = base36_to_int(uidb36)
user = UserModel.objects.get(id=uid_int)
except (ValueError, OverflowError, UserModel.DoesNotExist):
user = None
if user is not None and token_generator.check_token(user, token):
validlink = True
if request.method == 'POST':
form = set_password_form(user, request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(post_reset_redirect)
else:
form = set_password_form(None)
else:
validlink = False
form = None
context = {
'form': form,
'validlink': validlink,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
def password_reset_complete(request,
template_name='registration/password_reset_complete.html',
current_app=None, extra_context=None):
context = {
'login_url': resolve_url(settings.LOGIN_URL)
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
@sensitive_post_parameters()
@csrf_protect
@login_required
def password_change(request,
template_name='registration/password_change_form.html',
post_change_redirect=None,
password_change_form=PasswordChangeForm,
current_app=None, extra_context=None):
if post_change_redirect is None:
post_change_redirect = reverse('django.contrib.auth.views.password_change_done')
if request.method == "POST":
form = password_change_form(user=request.user, data=request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(post_change_redirect)
else:
form = password_change_form(user=request.user)
context = {
'form': form,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
@login_required
def password_change_done(request,
template_name='registration/password_change_done.html',
current_app=None, extra_context=None):
context = {}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3767_2 |
crossvul-python_data_bad_3268_3 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import contextlib
import datetime
import os
import pwd
import re
import time
from io import StringIO
from numbers import Number
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
from jinja2 import Environment
from jinja2.loaders import FileSystemLoader
from jinja2.exceptions import TemplateSyntaxError, UndefinedError
from jinja2.utils import concat as j2_concat
from jinja2.runtime import Context, StrictUndefined
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable
from ansible.module_utils.six import string_types, text_type
from ansible.module_utils._text import to_native, to_text, to_bytes
from ansible.plugins import filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.unsafe_proxy import UnsafeProxy, wrap_var
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['Templar', 'generate_ansible_template_vars']
# A regex for checking to see if a variable we're trying to
# expand is just a single variable name.
# Primitive Types which we don't want Jinja to convert to strings.
NON_TEMPLATED_TYPES = ( bool, Number )
JINJA2_OVERRIDE = '#jinja2:'
def generate_ansible_template_vars(path):
b_path = to_bytes(path)
try:
template_uid = pwd.getpwuid(os.stat(b_path).st_uid).pw_name
except:
template_uid = os.stat(b_path).st_uid
temp_vars = {}
temp_vars['template_host'] = os.uname()[1]
temp_vars['template_path'] = b_path
temp_vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(b_path))
temp_vars['template_uid'] = template_uid
temp_vars['template_fullpath'] = os.path.abspath(path)
temp_vars['template_run_date'] = datetime.datetime.now()
managed_default = C.DEFAULT_MANAGED_STR
managed_str = managed_default.format(
host = temp_vars['template_host'],
uid = temp_vars['template_uid'],
file = temp_vars['template_path'],
)
temp_vars['ansible_managed'] = time.strftime( managed_str, time.localtime(os.path.getmtime(b_path)))
return temp_vars
def _escape_backslashes(data, jinja_env):
"""Double backslashes within jinja2 expressions
A user may enter something like this in a playbook::
debug:
msg: "Test Case 1\\3; {{ test1_name | regex_replace('^(.*)_name$', '\\1')}}"
The string inside of the {{ gets interpreted multiple times First by yaml.
Then by python. And finally by jinja2 as part of it's variable. Because
it is processed by both python and jinja2, the backslash escaped
characters get unescaped twice. This means that we'd normally have to use
four backslashes to escape that. This is painful for playbook authors as
they have to remember different rules for inside vs outside of a jinja2
expression (The backslashes outside of the "{{ }}" only get processed by
yaml and python. So they only need to be escaped once). The following
code fixes this by automatically performing the extra quoting of
backslashes inside of a jinja2 expression.
"""
if '\\' in data and '{{' in data:
new_data = []
d2 = jinja_env.preprocess(data)
in_var = False
for token in jinja_env.lex(d2):
if token[1] == 'variable_begin':
in_var = True
new_data.append(token[2])
elif token[1] == 'variable_end':
in_var = False
new_data.append(token[2])
elif in_var and token[1] == 'string':
# Double backslashes only if we're inside of a jinja2 variable
new_data.append(token[2].replace('\\','\\\\'))
else:
new_data.append(token[2])
data = ''.join(new_data)
return data
def _count_newlines_from_end(in_str):
'''
Counts the number of newlines at the end of a string. This is used during
the jinja2 templating to ensure the count matches the input, since some newlines
may be thrown away during the templating.
'''
try:
i = len(in_str)
j = i -1
while in_str[j] == '\n':
j -= 1
return i - 1 - j
except IndexError:
# Uncommon cases: zero length string and string containing only newlines
return i
class AnsibleContext(Context):
'''
A custom context, which intercepts resolve() calls and sets a flag
internally if any variable lookup returns an AnsibleUnsafe value. This
flag is checked post-templating, and (when set) will result in the
final templated result being wrapped via UnsafeProxy.
'''
def __init__(self, *args, **kwargs):
super(AnsibleContext, self).__init__(*args, **kwargs)
self.unsafe = False
def _is_unsafe(self, val):
'''
Our helper function, which will also recursively check dict and
list entries due to the fact that they may be repr'd and contain
a key or value which contains jinja2 syntax and would otherwise
lose the AnsibleUnsafe value.
'''
if isinstance(val, dict):
for key in val.keys():
if self._is_unsafe(val[key]):
return True
elif isinstance(val, list):
for item in val:
if self._is_unsafe(item):
return True
elif isinstance(val, string_types) and hasattr(val, '__UNSAFE__'):
return True
return False
def _update_unsafe(self, val):
if val is not None and not self.unsafe and self._is_unsafe(val):
self.unsafe = True
def resolve(self, key):
'''
The intercepted resolve(), which uses the helper above to set the
internal flag whenever an unsafe variable value is returned.
'''
val = super(AnsibleContext, self).resolve(key)
self._update_unsafe(val)
return val
def resolve_or_missing(self, key):
val = super(AnsibleContext, self).resolve_or_missing(key)
self._update_unsafe(val)
return val
class AnsibleEnvironment(Environment):
'''
Our custom environment, which simply allows us to override the class-level
values for the Template and Context classes used by jinja2 internally.
'''
context_class = AnsibleContext
template_class = AnsibleJ2Template
class Templar:
'''
The main class for templating, with the main entry-point of template().
'''
def __init__(self, loader, shared_loader_obj=None, variables=dict()):
self._loader = loader
self._filters = None
self._tests = None
self._available_variables = variables
self._cached_result = {}
if loader:
self._basedir = loader.get_basedir()
else:
self._basedir = './'
if shared_loader_obj:
self._filter_loader = getattr(shared_loader_obj, 'filter_loader')
self._test_loader = getattr(shared_loader_obj, 'test_loader')
self._lookup_loader = getattr(shared_loader_obj, 'lookup_loader')
else:
self._filter_loader = filter_loader
self._test_loader = test_loader
self._lookup_loader = lookup_loader
# flags to determine whether certain failures during templating
# should result in fatal errors being raised
self._fail_on_lookup_errors = True
self._fail_on_filter_errors = True
self._fail_on_undefined_errors = C.DEFAULT_UNDEFINED_VAR_BEHAVIOR
self.environment = AnsibleEnvironment(
trim_blocks=True,
undefined=StrictUndefined,
extensions=self._get_extensions(),
finalize=self._finalize,
loader=FileSystemLoader(self._basedir),
)
self.SINGLE_VAR = re.compile(r"^%s\s*(\w*)\s*%s$" % (self.environment.variable_start_string, self.environment.variable_end_string))
self._clean_regex = re.compile(r'(?:%s|%s|%s|%s)' % (
self.environment.variable_start_string,
self.environment.block_start_string,
self.environment.block_end_string,
self.environment.variable_end_string
))
self._no_type_regex = re.compile(r'.*\|\s*(?:%s)\s*(?:%s)?$' % ('|'.join(C.STRING_TYPE_FILTERS), self.environment.variable_end_string))
def _get_filters(self):
'''
Returns filter plugins, after loading and caching them if need be
'''
if self._filters is not None:
return self._filters.copy()
plugins = [x for x in self._filter_loader.all()]
self._filters = dict()
for fp in plugins:
self._filters.update(fp.filters())
self._filters.update(self._get_tests())
return self._filters.copy()
def _get_tests(self):
'''
Returns tests plugins, after loading and caching them if need be
'''
if self._tests is not None:
return self._tests.copy()
plugins = [x for x in self._test_loader.all()]
self._tests = dict()
for fp in plugins:
self._tests.update(fp.tests())
return self._tests.copy()
def _get_extensions(self):
'''
Return jinja2 extensions to load.
If some extensions are set via jinja_extensions in ansible.cfg, we try
to load them with the jinja environment.
'''
jinja_exts = []
if C.DEFAULT_JINJA2_EXTENSIONS:
# make sure the configuration directive doesn't contain spaces
# and split extensions in an array
jinja_exts = C.DEFAULT_JINJA2_EXTENSIONS.replace(" ", "").split(',')
return jinja_exts
def _clean_data(self, orig_data):
''' remove jinja2 template tags from data '''
if hasattr(orig_data, '__ENCRYPTED__'):
ret = orig_data
elif isinstance(orig_data, list):
clean_list = []
for list_item in orig_data:
clean_list.append(self._clean_data(list_item))
ret = clean_list
elif isinstance(orig_data, dict):
clean_dict = {}
for k in orig_data:
clean_dict[self._clean_data(k)] = self._clean_data(orig_data[k])
ret = clean_dict
elif isinstance(orig_data, string_types):
# This will error with str data (needs unicode), but all strings should already be converted already.
# If you get exception, the problem is at the data origin, do not add to_text here.
with contextlib.closing(StringIO(orig_data)) as data:
# these variables keep track of opening block locations, as we only
# want to replace matched pairs of print/block tags
print_openings = []
block_openings = []
for mo in self._clean_regex.finditer(orig_data):
token = mo.group(0)
token_start = mo.start(0)
if token[0] == self.environment.variable_start_string[0]:
if token == self.environment.block_start_string:
block_openings.append(token_start)
elif token == self.environment.variable_start_string:
print_openings.append(token_start)
elif token[1] == self.environment.variable_end_string[1]:
prev_idx = None
if token == self.environment.block_end_string and block_openings:
prev_idx = block_openings.pop()
elif token == self.environment.variable_end_string and print_openings:
prev_idx = print_openings.pop()
if prev_idx is not None:
# replace the opening
data.seek(prev_idx, os.SEEK_SET)
data.write(to_text(self.environment.comment_start_string))
# replace the closing
data.seek(token_start, os.SEEK_SET)
data.write(to_text(self.environment.comment_end_string))
else:
raise AnsibleError("Error while cleaning data for safety: unhandled regex match")
ret = data.getvalue()
else:
ret = orig_data
return ret
def set_available_variables(self, variables):
'''
Sets the list of template variables this Templar instance will use
to template things, so we don't have to pass them around between
internal methods. We also clear the template cache here, as the variables
are being changed.
'''
assert isinstance(variables, dict)
self._available_variables = variables
self._cached_result = {}
def template(self, variable, convert_bare=False, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None,
convert_data=True, static_vars=[''], cache=True, bare_deprecated=True, disable_lookups=False):
'''
Templates (possibly recursively) any given data as input. If convert_bare is
set to True, the given data will be wrapped as a jinja2 variable ('{{foo}}')
before being sent through the template engine.
'''
# Don't template unsafe variables, just return them.
if hasattr(variable, '__UNSAFE__'):
return variable
if fail_on_undefined is None:
fail_on_undefined = self._fail_on_undefined_errors
try:
if convert_bare:
variable = self._convert_bare_variable(variable, bare_deprecated=bare_deprecated)
if isinstance(variable, string_types):
result = variable
if self._contains_vars(variable):
# Check to see if the string we are trying to render is just referencing a single
# var. In this case we don't want to accidentally change the type of the variable
# to a string by using the jinja template renderer. We just want to pass it.
only_one = self.SINGLE_VAR.match(variable)
if only_one:
var_name = only_one.group(1)
if var_name in self._available_variables:
resolved_val = self._available_variables[var_name]
if isinstance(resolved_val, NON_TEMPLATED_TYPES):
return resolved_val
elif resolved_val is None:
return C.DEFAULT_NULL_REPRESENTATION
# Using a cache in order to prevent template calls with already templated variables
sha1_hash = None
if cache:
variable_hash = sha1(text_type(variable).encode('utf-8'))
options_hash = sha1(
(
text_type(preserve_trailing_newlines) +
text_type(escape_backslashes) +
text_type(fail_on_undefined) +
text_type(overrides)
).encode('utf-8')
)
sha1_hash = variable_hash.hexdigest() + options_hash.hexdigest()
if cache and sha1_hash in self._cached_result:
result = self._cached_result[sha1_hash]
else:
result = self.do_template(
variable,
preserve_trailing_newlines=preserve_trailing_newlines,
escape_backslashes=escape_backslashes,
fail_on_undefined=fail_on_undefined,
overrides=overrides,
disable_lookups=disable_lookups,
)
unsafe = hasattr(result, '__UNSAFE__')
if convert_data and not self._no_type_regex.match(variable):
# if this looks like a dictionary or list, convert it to such using the safe_eval method
if (result.startswith("{") and not result.startswith(self.environment.variable_start_string)) or \
result.startswith("[") or result in ("True", "False"):
eval_results = safe_eval(result, locals=self._available_variables, include_exceptions=True)
if eval_results[1] is None:
result = eval_results[0]
if unsafe:
result = wrap_var(result)
else:
# FIXME: if the safe_eval raised an error, should we do something with it?
pass
# we only cache in the case where we have a single variable
# name, to make sure we're not putting things which may otherwise
# be dynamic in the cache (filters, lookups, etc.)
if cache:
self._cached_result[sha1_hash] = result
return result
elif isinstance(variable, (list, tuple)):
return [self.template(
v,
preserve_trailing_newlines=preserve_trailing_newlines,
fail_on_undefined=fail_on_undefined,
overrides=overrides,
disable_lookups=disable_lookups,
) for v in variable]
elif isinstance(variable, dict):
d = {}
# we don't use iteritems() here to avoid problems if the underlying dict
# changes sizes due to the templating, which can happen with hostvars
for k in variable.keys():
if k not in static_vars:
d[k] = self.template(
variable[k],
preserve_trailing_newlines=preserve_trailing_newlines,
fail_on_undefined=fail_on_undefined,
overrides=overrides,
disable_lookups=disable_lookups,
)
else:
d[k] = variable[k]
return d
else:
return variable
except AnsibleFilterError:
if self._fail_on_filter_errors:
raise
else:
return variable
def is_template(self, data):
''' lets us know if data has a template'''
if isinstance(data, string_types):
try:
new = self.do_template(data, fail_on_undefined=True)
except (AnsibleUndefinedVariable, UndefinedError):
return True
except:
return False
return (new != data)
elif isinstance(data, (list, tuple)):
for v in data:
if self.is_template(v):
return True
elif isinstance(data, dict):
for k in data:
if self.is_template(k) or self.is_template(data[k]):
return True
return False
def templatable(self, data):
'''
returns True if the data can be templated w/o errors
'''
templatable = True
try:
self.template(data)
except:
templatable = False
return templatable
def _contains_vars(self, data):
'''
returns True if the data contains a variable pattern
'''
if isinstance(data, string_types):
for marker in (self.environment.block_start_string, self.environment.variable_start_string, self.environment.comment_start_string):
if marker in data:
return True
return False
def _convert_bare_variable(self, variable, bare_deprecated):
'''
Wraps a bare string, which may have an attribute portion (ie. foo.bar)
in jinja2 variable braces so that it is evaluated properly.
'''
if isinstance(variable, string_types):
contains_filters = "|" in variable
first_part = variable.split("|")[0].split(".")[0].split("[")[0]
if (contains_filters or first_part in self._available_variables) and self.environment.variable_start_string not in variable:
if bare_deprecated:
display.deprecated("Using bare variables is deprecated."
" Update your playbooks so that the environment value uses the full variable syntax ('%s%s%s')" %
(self.environment.variable_start_string, variable, self.environment.variable_end_string), version='2.7')
return "%s%s%s" % (self.environment.variable_start_string, variable, self.environment.variable_end_string)
# the variable didn't meet the conditions to be converted,
# so just return it as-is
return variable
def _finalize(self, thing):
'''
A custom finalize method for jinja2, which prevents None from being returned
'''
return thing if thing is not None else ''
def _fail_lookup(self, name, *args, **kwargs):
raise AnsibleError("The lookup `%s` was found, however lookups were disabled from templating" % name)
def _lookup(self, name, *args, **kwargs):
instance = self._lookup_loader.get(name.lower(), loader=self._loader, templar=self)
if instance is not None:
wantlist = kwargs.pop('wantlist', False)
from ansible.utils.listify import listify_lookup_plugin_terms
loop_terms = listify_lookup_plugin_terms(terms=args, templar=self, loader=self._loader, fail_on_undefined=True, convert_bare=False)
# safely catch run failures per #5059
try:
ran = instance.run(loop_terms, variables=self._available_variables, **kwargs)
except (AnsibleUndefinedVariable, UndefinedError) as e:
raise AnsibleUndefinedVariable(e)
except Exception as e:
if self._fail_on_lookup_errors:
raise AnsibleError("An unhandled exception occurred while running the lookup plugin '%s'. Error was a %s, "
"original message: %s" % (name, type(e), e))
ran = None
if ran:
if wantlist:
ran = wrap_var(ran)
else:
try:
ran = UnsafeProxy(",".join(ran))
except TypeError:
if isinstance(ran, list) and len(ran) == 1:
ran = wrap_var(ran[0])
else:
ran = wrap_var(ran)
return ran
else:
raise AnsibleError("lookup plugin (%s) not found" % name)
def do_template(self, data, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None, disable_lookups=False):
# For preserving the number of input newlines in the output (used
# later in this method)
data_newlines = _count_newlines_from_end(data)
if fail_on_undefined is None:
fail_on_undefined = self._fail_on_undefined_errors
try:
# allows template header overrides to change jinja2 options.
if overrides is None:
myenv = self.environment.overlay()
else:
myenv = self.environment.overlay(overrides)
# Get jinja env overrides from template
if data.startswith(JINJA2_OVERRIDE):
eol = data.find('\n')
line = data[len(JINJA2_OVERRIDE):eol]
data = data[eol+1:]
for pair in line.split(','):
(key,val) = pair.split(':')
key = key.strip()
setattr(myenv, key, ast.literal_eval(val.strip()))
# Adds Ansible custom filters and tests
myenv.filters.update(self._get_filters())
myenv.tests.update(self._get_tests())
if escape_backslashes:
# Allow users to specify backslashes in playbooks as "\\" instead of as "\\\\".
data = _escape_backslashes(data, myenv)
try:
t = myenv.from_string(data)
except TemplateSyntaxError as e:
raise AnsibleError("template error while templating string: %s. String: %s" % (to_native(e), to_native(data)))
except Exception as e:
if 'recursion' in to_native(e):
raise AnsibleError("recursive loop detected in template string: %s" % to_native(data))
else:
return data
if disable_lookups:
t.globals['lookup'] = self._fail_lookup
else:
t.globals['lookup'] = self._lookup
t.globals['finalize'] = self._finalize
jvars = AnsibleJ2Vars(self, t.globals)
new_context = t.new_context(jvars, shared=True)
rf = t.root_render_func(new_context)
try:
res = j2_concat(rf)
if new_context.unsafe:
res = wrap_var(res)
except TypeError as te:
if 'StrictUndefined' in to_native(te):
errmsg = "Unable to look up a name or access an attribute in template string (%s).\n" % to_native(data)
errmsg += "Make sure your variable name does not contain invalid characters like '-': %s" % to_native(te)
raise AnsibleUndefinedVariable(errmsg)
else:
display.debug("failing because of a type error, template data is: %s" % to_native(data))
raise AnsibleError("Unexpected templating type error occurred on (%s): %s" % (to_native(data),to_native(te)))
if preserve_trailing_newlines:
# The low level calls above do not preserve the newline
# characters at the end of the input data, so we use the
# calculate the difference in newlines and append them
# to the resulting output for parity
#
# jinja2 added a keep_trailing_newline option in 2.7 when
# creating an Environment. That would let us make this code
# better (remove a single newline if
# preserve_trailing_newlines is False). Once we can depend on
# that version being present, modify our code to set that when
# initializing self.environment and remove a single trailing
# newline here if preserve_newlines is False.
res_newlines = _count_newlines_from_end(res)
if data_newlines > res_newlines:
res += self.environment.newline_sequence * (data_newlines - res_newlines)
return res
except (UndefinedError, AnsibleUndefinedVariable) as e:
if fail_on_undefined:
raise AnsibleUndefinedVariable(e)
else:
#TODO: return warning about undefined var
return data
# for backwards compatibility in case anyone is using old private method directly
_do_template = do_template
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3268_3 |
crossvul-python_data_bad_3659_2 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2011 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import context
from nova import db
from nova import flags
from nova import log as logging
from nova.openstack.common import cfg
from nova import utils
from nova.virt import netutils
LOG = logging.getLogger(__name__)
allow_same_net_traffic_opt = cfg.BoolOpt('allow_same_net_traffic',
default=True,
help='Whether to allow network traffic from same network')
FLAGS = flags.FLAGS
FLAGS.register_opt(allow_same_net_traffic_opt)
class FirewallDriver(object):
""" Firewall Driver base class.
Defines methods that any driver providing security groups
and provider fireall functionality should implement.
"""
def prepare_instance_filter(self, instance, network_info):
"""Prepare filters for the instance.
At this point, the instance isn't running yet."""
raise NotImplementedError()
def unfilter_instance(self, instance, network_info):
"""Stop filtering instance"""
raise NotImplementedError()
def apply_instance_filter(self, instance, network_info):
"""Apply instance filter.
Once this method returns, the instance should be firewalled
appropriately. This method should as far as possible be a
no-op. It's vastly preferred to get everything set up in
prepare_instance_filter.
"""
raise NotImplementedError()
def refresh_security_group_rules(self, security_group_id):
"""Refresh security group rules from data store
Gets called when a rule has been added to or removed from
the security group."""
raise NotImplementedError()
def refresh_security_group_members(self, security_group_id):
"""Refresh security group members from data store
Gets called when an instance gets added to or removed from
the security group."""
raise NotImplementedError()
def refresh_provider_fw_rules(self):
"""Refresh common rules for all hosts/instances from data store.
Gets called when a rule has been added to or removed from
the list of rules (via admin api).
"""
raise NotImplementedError()
def setup_basic_filtering(self, instance, network_info):
"""Create rules to block spoofing and allow dhcp.
This gets called when spawning an instance, before
:py:meth:`prepare_instance_filter`.
"""
raise NotImplementedError()
def instance_filter_exists(self, instance, network_info):
"""Check nova-instance-instance-xxx exists"""
raise NotImplementedError()
class IptablesFirewallDriver(FirewallDriver):
"""Driver which enforces security groups through iptables rules."""
def __init__(self, **kwargs):
from nova.network import linux_net
self.iptables = linux_net.iptables_manager
self.instances = {}
self.network_infos = {}
self.basicly_filtered = False
self.iptables.ipv4['filter'].add_chain('sg-fallback')
self.iptables.ipv4['filter'].add_rule('sg-fallback', '-j DROP')
self.iptables.ipv6['filter'].add_chain('sg-fallback')
self.iptables.ipv6['filter'].add_rule('sg-fallback', '-j DROP')
def setup_basic_filtering(self, instance, network_info):
pass
def apply_instance_filter(self, instance, network_info):
"""No-op. Everything is done in prepare_instance_filter."""
pass
def unfilter_instance(self, instance, network_info):
if self.instances.pop(instance['id'], None):
# NOTE(vish): use the passed info instead of the stored info
self.network_infos.pop(instance['id'])
self.remove_filters_for_instance(instance)
self.iptables.apply()
else:
LOG.info(_('Attempted to unfilter instance %s which is not '
'filtered'), instance['id'])
def prepare_instance_filter(self, instance, network_info):
self.instances[instance['id']] = instance
self.network_infos[instance['id']] = network_info
self.add_filters_for_instance(instance)
LOG.debug(_('Filters added to instance %s'), instance['uuid'])
self.refresh_provider_fw_rules()
LOG.debug(_('Provider Firewall Rules refreshed'))
self.iptables.apply()
def _create_filter(self, ips, chain_name):
return ['-d %s -j $%s' % (ip, chain_name) for ip in ips]
def _filters_for_instance(self, chain_name, network_info):
"""Creates a rule corresponding to each ip that defines a
jump to the corresponding instance - chain for all the traffic
destined to that ip."""
ips_v4 = [ip['ip'] for (_n, mapping) in network_info
for ip in mapping['ips']]
ipv4_rules = self._create_filter(ips_v4, chain_name)
ipv6_rules = []
if FLAGS.use_ipv6:
ips_v6 = [ip['ip'] for (_n, mapping) in network_info
for ip in mapping['ip6s']]
ipv6_rules = self._create_filter(ips_v6, chain_name)
return ipv4_rules, ipv6_rules
def _add_filters(self, chain_name, ipv4_rules, ipv6_rules):
for rule in ipv4_rules:
self.iptables.ipv4['filter'].add_rule(chain_name, rule)
if FLAGS.use_ipv6:
for rule in ipv6_rules:
self.iptables.ipv6['filter'].add_rule(chain_name, rule)
def add_filters_for_instance(self, instance):
network_info = self.network_infos[instance['id']]
chain_name = self._instance_chain_name(instance)
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].add_chain(chain_name)
self.iptables.ipv4['filter'].add_chain(chain_name)
ipv4_rules, ipv6_rules = self._filters_for_instance(chain_name,
network_info)
self._add_filters('local', ipv4_rules, ipv6_rules)
ipv4_rules, ipv6_rules = self.instance_rules(instance, network_info)
self._add_filters(chain_name, ipv4_rules, ipv6_rules)
def remove_filters_for_instance(self, instance):
chain_name = self._instance_chain_name(instance)
self.iptables.ipv4['filter'].remove_chain(chain_name)
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].remove_chain(chain_name)
@staticmethod
def _security_group_chain_name(security_group_id):
return 'nova-sg-%s' % (security_group_id,)
def _instance_chain_name(self, instance):
return 'inst-%s' % (instance['id'],)
def _do_basic_rules(self, ipv4_rules, ipv6_rules, network_info):
# Always drop invalid packets
ipv4_rules += ['-m state --state ' 'INVALID -j DROP']
ipv6_rules += ['-m state --state ' 'INVALID -j DROP']
# Allow established connections
ipv4_rules += ['-m state --state ESTABLISHED,RELATED -j ACCEPT']
ipv6_rules += ['-m state --state ESTABLISHED,RELATED -j ACCEPT']
# Pass through provider-wide drops
ipv4_rules += ['-j $provider']
ipv6_rules += ['-j $provider']
def _do_dhcp_rules(self, ipv4_rules, network_info):
dhcp_servers = [info['dhcp_server'] for (_n, info) in network_info]
for dhcp_server in dhcp_servers:
ipv4_rules.append('-s %s -p udp --sport 67 --dport 68 '
'-j ACCEPT' % (dhcp_server,))
def _do_project_network_rules(self, ipv4_rules, ipv6_rules, network_info):
cidrs = [network['cidr'] for (network, _i) in network_info]
for cidr in cidrs:
ipv4_rules.append('-s %s -j ACCEPT' % (cidr,))
if FLAGS.use_ipv6:
cidrv6s = [network['cidr_v6'] for (network, _i) in
network_info]
for cidrv6 in cidrv6s:
ipv6_rules.append('-s %s -j ACCEPT' % (cidrv6,))
def _do_ra_rules(self, ipv6_rules, network_info):
gateways_v6 = [mapping['gateway_v6'] for (_n, mapping) in
network_info]
for gateway_v6 in gateways_v6:
ipv6_rules.append(
'-s %s/128 -p icmpv6 -j ACCEPT' % (gateway_v6,))
def _build_icmp_rule(self, rule, version):
icmp_type = rule.from_port
icmp_code = rule.to_port
if icmp_type == -1:
icmp_type_arg = None
else:
icmp_type_arg = '%s' % icmp_type
if not icmp_code == -1:
icmp_type_arg += '/%s' % icmp_code
if icmp_type_arg:
if version == 4:
return ['-m', 'icmp', '--icmp-type', icmp_type_arg]
elif version == 6:
return ['-m', 'icmp6', '--icmpv6-type', icmp_type_arg]
# return empty list if icmp_type == -1
return []
def _build_tcp_udp_rule(self, rule, version):
if rule.from_port == rule.to_port:
return ['--dport', '%s' % (rule.from_port,)]
else:
return ['-m', 'multiport',
'--dports', '%s:%s' % (rule.from_port,
rule.to_port)]
def instance_rules(self, instance, network_info):
ctxt = context.get_admin_context()
ipv4_rules = []
ipv6_rules = []
# Initialize with basic rules
self._do_basic_rules(ipv4_rules, ipv6_rules, network_info)
# Set up rules to allow traffic to/from DHCP server
self._do_dhcp_rules(ipv4_rules, network_info)
#Allow project network traffic
if FLAGS.allow_same_net_traffic:
self._do_project_network_rules(ipv4_rules, ipv6_rules,
network_info)
# We wrap these in FLAGS.use_ipv6 because they might cause
# a DB lookup. The other ones are just list operations, so
# they're not worth the clutter.
if FLAGS.use_ipv6:
# Allow RA responses
self._do_ra_rules(ipv6_rules, network_info)
security_groups = db.security_group_get_by_instance(ctxt,
instance['id'])
# then, security group chains and rules
for security_group in security_groups:
rules = db.security_group_rule_get_by_security_group(ctxt,
security_group['id'])
for rule in rules:
LOG.debug(_('Adding security group rule: %r'), rule)
if not rule.cidr:
version = 4
else:
version = netutils.get_ip_version(rule.cidr)
if version == 4:
fw_rules = ipv4_rules
else:
fw_rules = ipv6_rules
protocol = rule.protocol
if version == 6 and rule.protocol == 'icmp':
protocol = 'icmpv6'
args = ['-j ACCEPT']
if protocol:
args += ['-p', protocol]
if protocol in ['udp', 'tcp']:
args += self._build_tcp_udp_rule(rule, version)
elif protocol == 'icmp':
args += self._build_icmp_rule(rule, version)
if rule.cidr:
LOG.info('Using cidr %r', rule.cidr)
args += ['-s', rule.cidr]
fw_rules += [' '.join(args)]
else:
if rule['grantee_group']:
# FIXME(jkoelker) This needs to be ported up into
# the compute manager which already
# has access to a nw_api handle,
# and should be the only one making
# making rpc calls.
import nova.network
nw_api = nova.network.API()
for instance in rule['grantee_group']['instances']:
LOG.info('instance: %r', instance)
nw_info = nw_api.get_instance_nw_info(ctxt,
instance)
ips = [ip['address']
for ip in nw_info.fixed_ips()
if ip['version'] == version]
LOG.info('ips: %r', ips)
for ip in ips:
subrule = args + ['-s %s' % ip]
fw_rules += [' '.join(subrule)]
LOG.info('Using fw_rules: %r', fw_rules)
ipv4_rules += ['-j $sg-fallback']
ipv6_rules += ['-j $sg-fallback']
return ipv4_rules, ipv6_rules
def instance_filter_exists(self, instance, network_info):
pass
def refresh_security_group_members(self, security_group):
self.do_refresh_security_group_rules(security_group)
self.iptables.apply()
def refresh_security_group_rules(self, security_group):
self.do_refresh_security_group_rules(security_group)
self.iptables.apply()
@utils.synchronized('iptables', external=True)
def do_refresh_security_group_rules(self, security_group):
for instance in self.instances.values():
self.remove_filters_for_instance(instance)
self.add_filters_for_instance(instance)
def refresh_provider_fw_rules(self):
"""See :class:`FirewallDriver` docs."""
self._do_refresh_provider_fw_rules()
self.iptables.apply()
@utils.synchronized('iptables', external=True)
def _do_refresh_provider_fw_rules(self):
"""Internal, synchronized version of refresh_provider_fw_rules."""
self._purge_provider_fw_rules()
self._build_provider_fw_rules()
def _purge_provider_fw_rules(self):
"""Remove all rules from the provider chains."""
self.iptables.ipv4['filter'].empty_chain('provider')
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].empty_chain('provider')
def _build_provider_fw_rules(self):
"""Create all rules for the provider IP DROPs."""
self.iptables.ipv4['filter'].add_chain('provider')
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].add_chain('provider')
ipv4_rules, ipv6_rules = self._provider_rules()
for rule in ipv4_rules:
self.iptables.ipv4['filter'].add_rule('provider', rule)
if FLAGS.use_ipv6:
for rule in ipv6_rules:
self.iptables.ipv6['filter'].add_rule('provider', rule)
@staticmethod
def _provider_rules():
"""Generate a list of rules from provider for IP4 & IP6."""
ctxt = context.get_admin_context()
ipv4_rules = []
ipv6_rules = []
rules = db.provider_fw_rule_get_all(ctxt)
for rule in rules:
LOG.debug(_('Adding provider rule: %s'), rule['cidr'])
version = netutils.get_ip_version(rule['cidr'])
if version == 4:
fw_rules = ipv4_rules
else:
fw_rules = ipv6_rules
protocol = rule['protocol']
if version == 6 and protocol == 'icmp':
protocol = 'icmpv6'
args = ['-p', protocol, '-s', rule['cidr']]
if protocol in ['udp', 'tcp']:
if rule['from_port'] == rule['to_port']:
args += ['--dport', '%s' % (rule['from_port'],)]
else:
args += ['-m', 'multiport',
'--dports', '%s:%s' % (rule['from_port'],
rule['to_port'])]
elif protocol == 'icmp':
icmp_type = rule['from_port']
icmp_code = rule['to_port']
if icmp_type == -1:
icmp_type_arg = None
else:
icmp_type_arg = '%s' % icmp_type
if not icmp_code == -1:
icmp_type_arg += '/%s' % icmp_code
if icmp_type_arg:
if version == 4:
args += ['-m', 'icmp', '--icmp-type',
icmp_type_arg]
elif version == 6:
args += ['-m', 'icmp6', '--icmpv6-type',
icmp_type_arg]
args += ['-j DROP']
fw_rules += [' '.join(args)]
return ipv4_rules, ipv6_rules
class NoopFirewallDriver(object):
"""Firewall driver which just provides No-op methods."""
def __init__(*args, **kwargs):
pass
def _noop(*args, **kwargs):
pass
def __getattr__(self, key):
return self._noop
def instance_filter_exists(self, instance, network_info):
return True
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3659_2 |
crossvul-python_data_good_2141_3 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import sys
import re
import os
import shlex
import yaml
import copy
import optparse
import operator
from ansible import errors
from ansible import __version__
from ansible.utils import template
from ansible.utils.display_functions import *
from ansible.utils.plugins import *
from ansible.callbacks import display
import ansible.constants as C
import ast
import time
import StringIO
import stat
import termios
import tty
import pipes
import random
import difflib
import warnings
import traceback
import getpass
import sys
import json
from vault import VaultLib
VERBOSITY=0
MAX_FILE_SIZE_FOR_DIFF=1*1024*1024
try:
import json
except ImportError:
import simplejson as json
try:
from hashlib import md5 as _md5
except ImportError:
from md5 import md5 as _md5
PASSLIB_AVAILABLE = False
try:
import passlib.hash
PASSLIB_AVAILABLE = True
except:
pass
try:
import builtin
except ImportError:
import __builtin__ as builtin
KEYCZAR_AVAILABLE=False
try:
try:
# some versions of pycrypto may not have this?
from Crypto.pct_warnings import PowmInsecureWarning
except ImportError:
PowmInsecureWarning = RuntimeWarning
with warnings.catch_warnings(record=True) as warning_handler:
warnings.simplefilter("error", PowmInsecureWarning)
try:
import keyczar.errors as key_errors
from keyczar.keys import AesKey
except PowmInsecureWarning:
system_warning(
"The version of gmp you have installed has a known issue regarding " + \
"timing vulnerabilities when used with pycrypto. " + \
"If possible, you should update it (ie. yum update gmp)."
)
warnings.resetwarnings()
warnings.simplefilter("ignore")
import keyczar.errors as key_errors
from keyczar.keys import AesKey
KEYCZAR_AVAILABLE=True
except ImportError:
pass
###############################################################
# Abstractions around keyczar
###############################################################
def key_for_hostname(hostname):
# fireball mode is an implementation of ansible firing up zeromq via SSH
# to use no persistent daemons or key management
if not KEYCZAR_AVAILABLE:
raise errors.AnsibleError("python-keyczar must be installed on the control machine to use accelerated modes")
key_path = os.path.expanduser(C.ACCELERATE_KEYS_DIR)
if not os.path.exists(key_path):
os.makedirs(key_path, mode=0700)
os.chmod(key_path, int(C.ACCELERATE_KEYS_DIR_PERMS, 8))
elif not os.path.isdir(key_path):
raise errors.AnsibleError('ACCELERATE_KEYS_DIR is not a directory.')
if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_DIR_PERMS, 8):
raise errors.AnsibleError('Incorrect permissions on the private key directory. Use `chmod 0%o %s` to correct this issue, and make sure any of the keys files contained within that directory are set to 0%o' % (int(C.ACCELERATE_KEYS_DIR_PERMS, 8), C.ACCELERATE_KEYS_DIR, int(C.ACCELERATE_KEYS_FILE_PERMS, 8)))
key_path = os.path.join(key_path, hostname)
# use new AES keys every 2 hours, which means fireball must not allow running for longer either
if not os.path.exists(key_path) or (time.time() - os.path.getmtime(key_path) > 60*60*2):
key = AesKey.Generate()
fd = os.open(key_path, os.O_WRONLY | os.O_CREAT, int(C.ACCELERATE_KEYS_FILE_PERMS, 8))
fh = os.fdopen(fd, 'w')
fh.write(str(key))
fh.close()
return key
else:
if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_FILE_PERMS, 8):
raise errors.AnsibleError('Incorrect permissions on the key file for this host. Use `chmod 0%o %s` to correct this issue.' % (int(C.ACCELERATE_KEYS_FILE_PERMS, 8), key_path))
fh = open(key_path)
key = AesKey.Read(fh.read())
fh.close()
return key
def encrypt(key, msg):
return key.Encrypt(msg)
def decrypt(key, msg):
try:
return key.Decrypt(msg)
except key_errors.InvalidSignatureError:
raise errors.AnsibleError("decryption failed")
###############################################################
# UTILITY FUNCTIONS FOR COMMAND LINE TOOLS
###############################################################
def err(msg):
''' print an error message to stderr '''
print >> sys.stderr, msg
def exit(msg, rc=1):
''' quit with an error to stdout and a failure code '''
err(msg)
sys.exit(rc)
def jsonify(result, format=False):
''' format JSON output (uncompressed or uncompressed) '''
if result is None:
return "{}"
result2 = result.copy()
for key, value in result2.items():
if type(value) is str:
result2[key] = value.decode('utf-8', 'ignore')
if format:
return json.dumps(result2, sort_keys=True, indent=4)
else:
return json.dumps(result2, sort_keys=True)
def write_tree_file(tree, hostname, buf):
''' write something into treedir/hostname '''
# TODO: might be nice to append playbook runs per host in a similar way
# in which case, we'd want append mode.
path = os.path.join(tree, hostname)
fd = open(path, "w+")
fd.write(buf)
fd.close()
def is_failed(result):
''' is a given JSON result a failed result? '''
return ((result.get('rc', 0) != 0) or (result.get('failed', False) in [ True, 'True', 'true']))
def is_changed(result):
''' is a given JSON result a changed result? '''
return (result.get('changed', False) in [ True, 'True', 'true'])
def check_conditional(conditional, basedir, inject, fail_on_undefined=False):
if conditional is None or conditional == '':
return True
if isinstance(conditional, list):
for x in conditional:
if not check_conditional(x, basedir, inject, fail_on_undefined=fail_on_undefined):
return False
return True
if not isinstance(conditional, basestring):
return conditional
conditional = conditional.replace("jinja2_compare ","")
# allow variable names
if conditional in inject and '-' not in str(inject[conditional]):
conditional = inject[conditional]
conditional = template.template(basedir, conditional, inject, fail_on_undefined=fail_on_undefined)
original = str(conditional).replace("jinja2_compare ","")
# a Jinja2 evaluation that results in something Python can eval!
presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional
conditional = template.template(basedir, presented, inject)
val = conditional.strip()
if val == presented:
# the templating failed, meaning most likely a
# variable was undefined. If we happened to be
# looking for an undefined variable, return True,
# otherwise fail
if "is undefined" in conditional:
return True
elif "is defined" in conditional:
return False
else:
raise errors.AnsibleError("error while evaluating conditional: %s" % original)
elif val == "True":
return True
elif val == "False":
return False
else:
raise errors.AnsibleError("unable to evaluate conditional: %s" % original)
def is_executable(path):
'''is the given path executable?'''
return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE]
or stat.S_IXGRP & os.stat(path)[stat.ST_MODE]
or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
def unfrackpath(path):
'''
returns a path that is free of symlinks, environment
variables, relative path traversals and symbols (~)
example:
'$HOME/../../var/mail' becomes '/var/spool/mail'
'''
return os.path.normpath(os.path.realpath(os.path.expandvars(os.path.expanduser(path))))
def prepare_writeable_dir(tree,mode=0777):
''' make sure a directory exists and is writeable '''
# modify the mode to ensure the owner at least
# has read/write access to this directory
mode |= 0700
# make sure the tree path is always expanded
# and normalized and free of symlinks
tree = unfrackpath(tree)
if not os.path.exists(tree):
try:
os.makedirs(tree, mode)
except (IOError, OSError), e:
raise errors.AnsibleError("Could not make dir %s: %s" % (tree, e))
if not os.access(tree, os.W_OK):
raise errors.AnsibleError("Cannot write to path %s" % tree)
return tree
def path_dwim(basedir, given):
'''
make relative paths work like folks expect.
'''
if given.startswith("/"):
return os.path.abspath(given)
elif given.startswith("~"):
return os.path.abspath(os.path.expanduser(given))
else:
if basedir is None:
basedir = "."
return os.path.abspath(os.path.join(basedir, given))
def path_dwim_relative(original, dirname, source, playbook_base, check=True):
''' find one file in a directory one level up in a dir named dirname relative to current '''
# (used by roles code)
basedir = os.path.dirname(original)
if os.path.islink(basedir):
basedir = unfrackpath(basedir)
template2 = os.path.join(basedir, dirname, source)
else:
template2 = os.path.join(basedir, '..', dirname, source)
source2 = path_dwim(basedir, template2)
if os.path.exists(source2):
return source2
obvious_local_path = path_dwim(playbook_base, source)
if os.path.exists(obvious_local_path):
return obvious_local_path
if check:
raise errors.AnsibleError("input file not found at %s or %s" % (source2, obvious_local_path))
return source2 # which does not exist
def json_loads(data):
''' parse a JSON string and return a data structure '''
return json.loads(data)
def _clean_data(orig_data):
''' remove template tags from a string '''
data = orig_data
if isinstance(orig_data, basestring):
for pattern,replacement in (('{{','{#'), ('}}','#}'), ('{%','{#'), ('%}','#}')):
data = data.replace(pattern, replacement)
return data
def _clean_data_struct(orig_data):
'''
walk a complex data structure, and use _clean_data() to
remove any template tags that may exist
'''
if isinstance(orig_data, dict):
data = orig_data.copy()
for key in data:
new_key = _clean_data_struct(key)
new_val = _clean_data_struct(data[key])
if key != new_key:
del data[key]
data[new_key] = new_val
elif isinstance(orig_data, list):
data = orig_data[:]
for i in range(0, len(data)):
data[i] = _clean_data_struct(data[i])
elif isinstance(orig_data, basestring):
data = _clean_data(orig_data)
else:
data = orig_data
return data
def parse_json(raw_data, from_remote=False):
''' this version for module return data only '''
orig_data = raw_data
# ignore stuff like tcgetattr spewage or other warnings
data = filter_leading_non_json_lines(raw_data)
try:
results = json.loads(data)
except:
# not JSON, but try "Baby JSON" which allows many of our modules to not
# require JSON and makes writing modules in bash much simpler
results = {}
try:
tokens = shlex.split(data)
except:
print "failed to parse json: "+ data
raise
for t in tokens:
if "=" not in t:
raise errors.AnsibleError("failed to parse: %s" % orig_data)
(key,value) = t.split("=", 1)
if key == 'changed' or 'failed':
if value.lower() in [ 'true', '1' ]:
value = True
elif value.lower() in [ 'false', '0' ]:
value = False
if key == 'rc':
value = int(value)
results[key] = value
if len(results.keys()) == 0:
return { "failed" : True, "parsed" : False, "msg" : orig_data }
if from_remote:
results = _clean_data_struct(results)
return results
def smush_braces(data):
''' smush Jinaj2 braces so unresolved templates like {{ foo }} don't get parsed weird by key=value code '''
while '{{ ' in data:
data = data.replace('{{ ', '{{')
while ' }}' in data:
data = data.replace(' }}', '}}')
return data
def smush_ds(data):
# things like key={{ foo }} are not handled by shlex.split well, so preprocess any YAML we load
# so we do not have to call smush elsewhere
if type(data) == list:
return [ smush_ds(x) for x in data ]
elif type(data) == dict:
for (k,v) in data.items():
data[k] = smush_ds(v)
return data
elif isinstance(data, basestring):
return smush_braces(data)
else:
return data
def parse_yaml(data, path_hint=None):
''' convert a yaml string to a data structure. Also supports JSON, ssssssh!!!'''
stripped_data = data.lstrip()
loaded = None
if stripped_data.startswith("{") or stripped_data.startswith("["):
# since the line starts with { or [ we can infer this is a JSON document.
try:
loaded = json.loads(data)
except ValueError, ve:
if path_hint:
raise errors.AnsibleError(path_hint + ": " + str(ve))
else:
raise errors.AnsibleError(str(ve))
else:
# else this is pretty sure to be a YAML document
loaded = yaml.safe_load(data)
return smush_ds(loaded)
def process_common_errors(msg, probline, column):
replaced = probline.replace(" ","")
if ":{{" in replaced and "}}" in replaced:
msg = msg + """
This one looks easy to fix. YAML thought it was looking for the start of a
hash/dictionary and was confused to see a second "{". Most likely this was
meant to be an ansible template evaluation instead, so we have to give the
parser a small hint that we wanted a string instead. The solution here is to
just quote the entire value.
For instance, if the original line was:
app_path: {{ base_path }}/foo
It should be written as:
app_path: "{{ base_path }}/foo"
"""
return msg
elif len(probline) and len(probline) > 1 and len(probline) > column and probline[column] == ":" and probline.count(':') > 1:
msg = msg + """
This one looks easy to fix. There seems to be an extra unquoted colon in the line
and this is confusing the parser. It was only expecting to find one free
colon. The solution is just add some quotes around the colon, or quote the
entire line after the first colon.
For instance, if the original line was:
copy: src=file.txt dest=/path/filename:with_colon.txt
It can be written as:
copy: src=file.txt dest='/path/filename:with_colon.txt'
Or:
copy: 'src=file.txt dest=/path/filename:with_colon.txt'
"""
return msg
else:
parts = probline.split(":")
if len(parts) > 1:
middle = parts[1].strip()
match = False
unbalanced = False
if middle.startswith("'") and not middle.endswith("'"):
match = True
elif middle.startswith('"') and not middle.endswith('"'):
match = True
if len(middle) > 0 and middle[0] in [ '"', "'" ] and middle[-1] in [ '"', "'" ] and probline.count("'") > 2 or probline.count('"') > 2:
unbalanced = True
if match:
msg = msg + """
This one looks easy to fix. It seems that there is a value started
with a quote, and the YAML parser is expecting to see the line ended
with the same kind of quote. For instance:
when: "ok" in result.stdout
Could be written as:
when: '"ok" in result.stdout'
or equivalently:
when: "'ok' in result.stdout"
"""
return msg
if unbalanced:
msg = msg + """
We could be wrong, but this one looks like it might be an issue with
unbalanced quotes. If starting a value with a quote, make sure the
line ends with the same set of quotes. For instance this arbitrary
example:
foo: "bad" "wolf"
Could be written as:
foo: '"bad" "wolf"'
"""
return msg
return msg
def process_yaml_error(exc, data, path=None, show_content=True):
if hasattr(exc, 'problem_mark'):
mark = exc.problem_mark
if show_content:
if mark.line -1 >= 0:
before_probline = data.split("\n")[mark.line-1]
else:
before_probline = ''
probline = data.split("\n")[mark.line]
arrow = " " * mark.column + "^"
msg = """Syntax Error while loading YAML script, %s
Note: The error may actually appear before this position: line %s, column %s
%s
%s
%s""" % (path, mark.line + 1, mark.column + 1, before_probline, probline, arrow)
unquoted_var = None
if '{{' in probline and '}}' in probline:
if '"{{' not in probline or "'{{" not in probline:
unquoted_var = True
if not unquoted_var:
msg = process_common_errors(msg, probline, mark.column)
else:
msg = msg + """
We could be wrong, but this one looks like it might be an issue with
missing quotes. Always quote template expression brackets when they
start a value. For instance:
with_items:
- {{ foo }}
Should be written as:
with_items:
- "{{ foo }}"
"""
else:
# most likely displaying a file with sensitive content,
# so don't show any of the actual lines of yaml just the
# line number itself
msg = """Syntax error while loading YAML script, %s
The error appears to have been on line %s, column %s, but may actually
be before there depending on the exact syntax problem.
""" % (path, mark.line + 1, mark.column + 1)
else:
# No problem markers means we have to throw a generic
# "stuff messed up" type message. Sry bud.
if path:
msg = "Could not parse YAML. Check over %s again." % path
else:
msg = "Could not parse YAML."
raise errors.AnsibleYAMLValidationFailed(msg)
def parse_yaml_from_file(path, vault_password=None):
''' convert a yaml file to a data structure '''
data = None
show_content = True
try:
data = open(path).read()
except IOError:
raise errors.AnsibleError("file could not read: %s" % path)
vault = VaultLib(password=vault_password)
if vault.is_encrypted(data):
data = vault.decrypt(data)
show_content = False
try:
return parse_yaml(data, path_hint=path)
except yaml.YAMLError, exc:
process_yaml_error(exc, data, path, show_content)
def parse_kv(args):
''' convert a string of key/value items to a dict '''
options = {}
if args is not None:
# attempting to split a unicode here does bad things
args = args.encode('utf-8')
try:
vargs = shlex.split(args, posix=True)
except ValueError, ve:
if 'no closing quotation' in str(ve).lower():
raise errors.AnsibleError("error parsing argument string, try quoting the entire line.")
else:
raise
vargs = [x.decode('utf-8') for x in vargs]
for x in vargs:
if "=" in x:
k, v = x.split("=",1)
options[k]=v
return options
def merge_hash(a, b):
''' recursively merges hash b into a
keys from b take precedence over keys from a '''
result = copy.deepcopy(a)
# next, iterate over b keys and values
for k, v in b.iteritems():
# if there's already such key in a
# and that key contains dict
if k in result and isinstance(result[k], dict):
# merge those dicts recursively
result[k] = merge_hash(a[k], v)
else:
# otherwise, just copy a value from b to a
result[k] = v
return result
def md5s(data):
''' Return MD5 hex digest of data. '''
digest = _md5()
try:
digest.update(data)
except UnicodeEncodeError:
digest.update(data.encode('utf-8'))
return digest.hexdigest()
def md5(filename):
''' Return MD5 hex digest of local file, None if file is not present or a directory. '''
if not os.path.exists(filename) or os.path.isdir(filename):
return None
digest = _md5()
blocksize = 64 * 1024
try:
infile = open(filename, 'rb')
block = infile.read(blocksize)
while block:
digest.update(block)
block = infile.read(blocksize)
infile.close()
except IOError, e:
raise errors.AnsibleError("error while accessing the file %s, error was: %s" % (filename, e))
return digest.hexdigest()
def default(value, function):
''' syntactic sugar around lazy evaluation of defaults '''
if value is None:
return function()
return value
def _gitinfo():
''' returns a string containing git branch, commit id and commit date '''
result = None
repo_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', '.git')
if os.path.exists(repo_path):
# Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
if os.path.isfile(repo_path):
try:
gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
# There is a posibility the .git file to have an absolute path.
if os.path.isabs(gitdir):
repo_path = gitdir
else:
repo_path = os.path.join(repo_path.split('.git')[0], gitdir)
except (IOError, AttributeError):
return ''
f = open(os.path.join(repo_path, "HEAD"))
branch = f.readline().split('/')[-1].rstrip("\n")
f.close()
branch_path = os.path.join(repo_path, "refs", "heads", branch)
if os.path.exists(branch_path):
f = open(branch_path)
commit = f.readline()[:10]
f.close()
date = time.localtime(os.stat(branch_path).st_mtime)
if time.daylight == 0:
offset = time.timezone
else:
offset = time.altzone
result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit,
time.strftime("%Y/%m/%d %H:%M:%S", date), offset / -36)
else:
result = ''
return result
def version(prog):
result = "{0} {1}".format(prog, __version__)
gitinfo = _gitinfo()
if gitinfo:
result = result + " {0}".format(gitinfo)
return result
def getch():
''' read in a single character '''
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
def sanitize_output(str):
''' strips private info out of a string '''
private_keys = ['password', 'login_password']
filter_re = [
# filter out things like user:pass@foo/whatever
# and http://username:pass@wherever/foo
re.compile('^(?P<before>.*:)(?P<password>.*)(?P<after>\@.*)$'),
]
parts = str.split()
output = ''
for part in parts:
try:
(k,v) = part.split('=', 1)
if k in private_keys:
output += " %s=VALUE_HIDDEN" % k
else:
found = False
for filter in filter_re:
m = filter.match(v)
if m:
d = m.groupdict()
output += " %s=%s" % (k, d['before'] + "********" + d['after'])
found = True
break
if not found:
output += " %s" % part
except:
output += " %s" % part
return output.strip()
####################################################################
# option handling code for /usr/bin/ansible and ansible-playbook
# below this line
class SortedOptParser(optparse.OptionParser):
'''Optparser which sorts the options by opt before outputting --help'''
def format_help(self, formatter=None):
self.option_list.sort(key=operator.methodcaller('get_opt_string'))
return optparse.OptionParser.format_help(self, formatter=None)
def increment_debug(option, opt, value, parser):
global VERBOSITY
VERBOSITY += 1
def base_parser(constants=C, usage="", output_opts=False, runas_opts=False,
async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, diff_opts=False):
''' create an options parser for any ansible script '''
parser = SortedOptParser(usage, version=version("%prog"))
parser.add_option('-v','--verbose', default=False, action="callback",
callback=increment_debug, help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")
parser.add_option('-f','--forks', dest='forks', default=constants.DEFAULT_FORKS, type='int',
help="specify number of parallel processes to use (default=%s)" % constants.DEFAULT_FORKS)
parser.add_option('-i', '--inventory-file', dest='inventory',
help="specify inventory host file (default=%s)" % constants.DEFAULT_HOST_LIST,
default=constants.DEFAULT_HOST_LIST)
parser.add_option('-k', '--ask-pass', default=False, dest='ask_pass', action='store_true',
help='ask for SSH password')
parser.add_option('--private-key', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
help='use this file to authenticate the connection')
parser.add_option('-K', '--ask-sudo-pass', default=False, dest='ask_sudo_pass', action='store_true',
help='ask for sudo password')
parser.add_option('--ask-su-pass', default=False, dest='ask_su_pass', action='store_true',
help='ask for su password')
parser.add_option('--ask-vault-pass', default=False, dest='ask_vault_pass', action='store_true',
help='ask for vault password')
parser.add_option('--vault-password-file', default=None, dest='vault_password_file',
help="vault password file")
parser.add_option('--list-hosts', dest='listhosts', action='store_true',
help='outputs a list of matching hosts; does not execute anything else')
parser.add_option('-M', '--module-path', dest='module_path',
help="specify path(s) to module library (default=%s)" % constants.DEFAULT_MODULE_PATH,
default=None)
if subset_opts:
parser.add_option('-l', '--limit', default=constants.DEFAULT_SUBSET, dest='subset',
help='further limit selected hosts to an additional pattern')
parser.add_option('-T', '--timeout', default=constants.DEFAULT_TIMEOUT, type='int',
dest='timeout',
help="override the SSH timeout in seconds (default=%s)" % constants.DEFAULT_TIMEOUT)
if output_opts:
parser.add_option('-o', '--one-line', dest='one_line', action='store_true',
help='condense output')
parser.add_option('-t', '--tree', dest='tree', default=None,
help='log output to this directory')
if runas_opts:
parser.add_option("-s", "--sudo", default=constants.DEFAULT_SUDO, action="store_true",
dest='sudo', help="run operations with sudo (nopasswd)")
parser.add_option('-U', '--sudo-user', dest='sudo_user', default=None,
help='desired sudo user (default=root)') # Can't default to root because we need to detect when this option was given
parser.add_option('-u', '--user', default=constants.DEFAULT_REMOTE_USER,
dest='remote_user', help='connect as this user (default=%s)' % constants.DEFAULT_REMOTE_USER)
parser.add_option('-S', '--su', default=constants.DEFAULT_SU,
action='store_true', help='run operations with su')
parser.add_option('-R', '--su-user', help='run operations with su as this '
'user (default=%s)' % constants.DEFAULT_SU_USER)
if connect_opts:
parser.add_option('-c', '--connection', dest='connection',
default=C.DEFAULT_TRANSPORT,
help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT)
if async_opts:
parser.add_option('-P', '--poll', default=constants.DEFAULT_POLL_INTERVAL, type='int',
dest='poll_interval',
help="set the poll interval if using -B (default=%s)" % constants.DEFAULT_POLL_INTERVAL)
parser.add_option('-B', '--background', dest='seconds', type='int', default=0,
help='run asynchronously, failing after X seconds (default=N/A)')
if check_opts:
parser.add_option("-C", "--check", default=False, dest='check', action='store_true',
help="don't make any changes; instead, try to predict some of the changes that may occur"
)
if diff_opts:
parser.add_option("-D", "--diff", default=False, dest='diff', action='store_true',
help="when changing (small) files and templates, show the differences in those files; works great with --check"
)
return parser
def ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=False, confirm_vault=False, confirm_new=False):
vault_pass = None
new_vault_pass = None
if ask_vault_pass:
vault_pass = getpass.getpass(prompt="Vault password: ")
if ask_vault_pass and confirm_vault:
vault_pass2 = getpass.getpass(prompt="Confirm Vault password: ")
if vault_pass != vault_pass2:
raise errors.AnsibleError("Passwords do not match")
if ask_new_vault_pass:
new_vault_pass = getpass.getpass(prompt="New Vault password: ")
if ask_new_vault_pass and confirm_new:
new_vault_pass2 = getpass.getpass(prompt="Confirm New Vault password: ")
if new_vault_pass != new_vault_pass2:
raise errors.AnsibleError("Passwords do not match")
# enforce no newline chars at the end of passwords
if vault_pass:
vault_pass = vault_pass.strip()
if new_vault_pass:
new_vault_pass = new_vault_pass.strip()
return vault_pass, new_vault_pass
def ask_passwords(ask_pass=False, ask_sudo_pass=False, ask_su_pass=False, ask_vault_pass=False):
sshpass = None
sudopass = None
su_pass = None
vault_pass = None
sudo_prompt = "sudo password: "
su_prompt = "su password: "
if ask_pass:
sshpass = getpass.getpass(prompt="SSH password: ")
sudo_prompt = "sudo password [defaults to SSH password]: "
if ask_sudo_pass:
sudopass = getpass.getpass(prompt=sudo_prompt)
if ask_pass and sudopass == '':
sudopass = sshpass
if ask_su_pass:
su_pass = getpass.getpass(prompt=su_prompt)
if ask_vault_pass:
vault_pass = getpass.getpass(prompt="Vault password: ")
return (sshpass, sudopass, su_pass, vault_pass)
def do_encrypt(result, encrypt, salt_size=None, salt=None):
if PASSLIB_AVAILABLE:
try:
crypt = getattr(passlib.hash, encrypt)
except:
raise errors.AnsibleError("passlib does not support '%s' algorithm" % encrypt)
if salt_size:
result = crypt.encrypt(result, salt_size=salt_size)
elif salt:
result = crypt.encrypt(result, salt=salt)
else:
result = crypt.encrypt(result)
else:
raise errors.AnsibleError("passlib must be installed to encrypt vars_prompt values")
return result
def last_non_blank_line(buf):
all_lines = buf.splitlines()
all_lines.reverse()
for line in all_lines:
if (len(line) > 0):
return line
# shouldn't occur unless there's no output
return ""
def filter_leading_non_json_lines(buf):
'''
used to avoid random output from SSH at the top of JSON output, like messages from
tcagetattr, or where dropbear spews MOTD on every single command (which is nuts).
need to filter anything which starts not with '{', '[', ', '=' or is an empty line.
filter only leading lines since multiline JSON is valid.
'''
kv_regex = re.compile(r'.*\w+=\w+.*')
filtered_lines = StringIO.StringIO()
stop_filtering = False
for line in buf.splitlines():
if stop_filtering or kv_regex.match(line) or line.startswith('{') or line.startswith('['):
stop_filtering = True
filtered_lines.write(line + '\n')
return filtered_lines.getvalue()
def boolean(value):
val = str(value)
if val.lower() in [ "true", "t", "y", "1", "yes" ]:
return True
else:
return False
def make_sudo_cmd(sudo_user, executable, cmd):
"""
helper function for connection plugins to create sudo commands
"""
# Rather than detect if sudo wants a password this time, -k makes
# sudo always ask for a password if one is required.
# Passing a quoted compound command to sudo (or sudo -s)
# directly doesn't work, so we shellquote it with pipes.quote()
# and pass the quoted string to the user's shell. We loop reading
# output until we see the randomly-generated sudo prompt set with
# the -p option.
randbits = ''.join(chr(random.randint(ord('a'), ord('z'))) for x in xrange(32))
prompt = '[sudo via ansible, key=%s] password: ' % randbits
success_key = 'SUDO-SUCCESS-%s' % randbits
sudocmd = '%s -k && %s %s -S -p "%s" -u %s %s -c %s' % (
C.DEFAULT_SUDO_EXE, C.DEFAULT_SUDO_EXE, C.DEFAULT_SUDO_FLAGS,
prompt, sudo_user, executable or '$SHELL', pipes.quote('echo %s; %s' % (success_key, cmd)))
return ('/bin/sh -c ' + pipes.quote(sudocmd), prompt, success_key)
def make_su_cmd(su_user, executable, cmd):
"""
Helper function for connection plugins to create direct su commands
"""
# TODO: work on this function
randbits = ''.join(chr(random.randint(ord('a'), ord('z'))) for x in xrange(32))
prompt = '[Pp]assword: ?$'
success_key = 'SUDO-SUCCESS-%s' % randbits
sudocmd = '%s %s %s -c "%s -c %s"' % (
C.DEFAULT_SU_EXE, C.DEFAULT_SU_FLAGS, su_user, executable or '$SHELL',
pipes.quote('echo %s; %s' % (success_key, cmd))
)
return ('/bin/sh -c ' + pipes.quote(sudocmd), prompt, success_key)
_TO_UNICODE_TYPES = (unicode, type(None))
def to_unicode(value):
if isinstance(value, _TO_UNICODE_TYPES):
return value
return value.decode("utf-8")
def get_diff(diff):
# called by --diff usage in playbook and runner via callbacks
# include names in diffs 'before' and 'after' and do diff -U 10
try:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
ret = []
if 'dst_binary' in diff:
ret.append("diff skipped: destination file appears to be binary\n")
if 'src_binary' in diff:
ret.append("diff skipped: source file appears to be binary\n")
if 'dst_larger' in diff:
ret.append("diff skipped: destination file size is greater than %d\n" % diff['dst_larger'])
if 'src_larger' in diff:
ret.append("diff skipped: source file size is greater than %d\n" % diff['src_larger'])
if 'before' in diff and 'after' in diff:
if 'before_header' in diff:
before_header = "before: %s" % diff['before_header']
else:
before_header = 'before'
if 'after_header' in diff:
after_header = "after: %s" % diff['after_header']
else:
after_header = 'after'
differ = difflib.unified_diff(to_unicode(diff['before']).splitlines(True), to_unicode(diff['after']).splitlines(True), before_header, after_header, '', '', 10)
for line in list(differ):
ret.append(line)
return u"".join(ret)
except UnicodeDecodeError:
return ">> the files are different, but the diff library cannot compare unicode strings"
def is_list_of_strings(items):
for x in items:
if not isinstance(x, basestring):
return False
return True
def list_union(a, b):
result = []
for x in a:
if x not in result:
result.append(x)
for x in b:
if x not in result:
result.append(x)
return result
def list_intersection(a, b):
result = []
for x in a:
if x in b and x not in result:
result.append(x)
return result
def safe_eval(expr, locals={}, include_exceptions=False):
'''
this is intended for allowing things like:
with_items: a_list_variable
where Jinja2 would return a string
but we do not want to allow it to call functions (outside of Jinja2, where
the env is constrained)
Based on:
http://stackoverflow.com/questions/12523516/using-ast-and-whitelists-to-make-pythons-eval-safe
'''
# this is the whitelist of AST nodes we are going to
# allow in the evaluation. Any node type other than
# those listed here will raise an exception in our custom
# visitor class defined below.
SAFE_NODES = set(
(
ast.Add,
ast.BinOp,
ast.Call,
ast.Compare,
ast.Dict,
ast.Div,
ast.Expression,
ast.List,
ast.Load,
ast.Mult,
ast.Num,
ast.Name,
ast.Str,
ast.Sub,
ast.Tuple,
ast.UnaryOp,
)
)
# AST node types were expanded after 2.6
if not sys.version.startswith('2.6'):
SAFE_NODES.union(
set(
(ast.Set,)
)
)
filter_list = []
for filter in filter_loader.all():
filter_list.extend(filter.filters().keys())
CALL_WHITELIST = C.DEFAULT_CALLABLE_WHITELIST + filter_list
class CleansingNodeVisitor(ast.NodeVisitor):
def generic_visit(self, node, inside_call=False):
if type(node) not in SAFE_NODES:
raise Exception("invalid expression (%s)" % expr)
elif isinstance(node, ast.Call):
inside_call = True
elif isinstance(node, ast.Name) and inside_call:
if hasattr(builtin, node.id) and node.id not in CALL_WHITELIST:
raise Exception("invalid function: %s" % node.id)
# iterate over all child nodes
for child_node in ast.iter_child_nodes(node):
self.generic_visit(child_node, inside_call)
if not isinstance(expr, basestring):
# already templated to a datastructure, perhaps?
if include_exceptions:
return (expr, None)
return expr
cnv = CleansingNodeVisitor()
try:
parsed_tree = ast.parse(expr, mode='eval')
cnv.visit(parsed_tree)
compiled = compile(parsed_tree, expr, 'eval')
result = eval(compiled, {}, locals)
if include_exceptions:
return (result, None)
else:
return result
except SyntaxError, e:
# special handling for syntax errors, we just return
# the expression string back as-is
if include_exceptions:
return (expr, None)
return expr
except Exception, e:
if include_exceptions:
return (expr, e)
return expr
def listify_lookup_plugin_terms(terms, basedir, inject):
if isinstance(terms, basestring):
# someone did:
# with_items: alist
# OR
# with_items: {{ alist }}
stripped = terms.strip()
if not (stripped.startswith('{') or stripped.startswith('[')) and not stripped.startswith("/") and not stripped.startswith('set(['):
# if not already a list, get ready to evaluate with Jinja2
# not sure why the "/" is in above code :)
try:
new_terms = template.template(basedir, "{{ %s }}" % terms, inject)
if isinstance(new_terms, basestring) and "{{" in new_terms:
pass
else:
terms = new_terms
except:
pass
if '{' in terms or '[' in terms:
# Jinja2 already evaluated a variable to a list.
# Jinja2-ified list needs to be converted back to a real type
# TODO: something a bit less heavy than eval
return safe_eval(terms)
if isinstance(terms, basestring):
terms = [ terms ]
return terms
def combine_vars(a, b):
if C.DEFAULT_HASH_BEHAVIOUR == "merge":
return merge_hash(a, b)
else:
return dict(a.items() + b.items())
def random_password(length=20, chars=C.DEFAULT_PASSWORD_CHARS):
'''Return a random password string of length containing only chars.'''
password = []
while len(password) < length:
new_char = os.urandom(1)
if new_char in chars:
password.append(new_char)
return ''.join(password)
def before_comment(msg):
''' what's the part of a string before a comment? '''
msg = msg.replace("\#","**NOT_A_COMMENT**")
msg = msg.split("#")[0]
msg = msg.replace("**NOT_A_COMMENT**","#")
return msg
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_2141_3 |
crossvul-python_data_bad_3766_1 | from __future__ import with_statement
import os
import re
import urllib
from django.conf import settings
from django.contrib.sites.models import Site, RequestSite
from django.contrib.auth.models import User
from django.core import mail
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import QueryDict
from django.utils.encoding import force_unicode
from django.utils.html import escape
from django.test import TestCase
from django.test.utils import override_settings
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import (AuthenticationForm, PasswordChangeForm,
SetPasswordForm, PasswordResetForm)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
fixtures = ['authtestdata.json']
urls = 'django.contrib.auth.tests.urls'
def setUp(self):
self.old_LANGUAGES = settings.LANGUAGES
self.old_LANGUAGE_CODE = settings.LANGUAGE_CODE
settings.LANGUAGES = (('en', 'English'),)
settings.LANGUAGE_CODE = 'en'
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), 'templates'),
)
def tearDown(self):
settings.LANGUAGES = self.old_LANGUAGES
settings.LANGUAGE_CODE = self.old_LANGUAGE_CODE
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
self.assertTrue(SESSION_KEY in self.client.session)
def assertContainsEscaped(self, response, text, **kwargs):
return self.assertContains(response, escape(force_unicode(text)), **kwargs)
AuthViewsTestCase = override_settings(USE_TZ=False)(AuthViewsTestCase)
class AuthViewNamedURLTests(AuthViewsTestCase):
urls = 'django.contrib.auth.urls'
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb36': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"Error is raised if the provided email address isn't currently registered"
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertContainsEscaped(response, PasswordResetForm.error_messages['unknown'])
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertEqual(response.status_code, 200)
self.assertTrue("Please enter your new password" in response.content)
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404
response = self.client.get('/reset/123456-1-1/')
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz-1-1/')
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# It redirects us to a 'complete' page:
self.assertEqual(response.status_code, 302)
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'x'})
self.assertEqual(response.status_code, 200)
self.assertContainsEscaped(response, SetPasswordForm.error_messages['password_mismatch'])
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 200)
self.assertContainsEscaped(response, AuthenticationForm.error_messages['invalid_login'])
def logout(self):
response = self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 200)
self.assertContainsEscaped(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertEqual(response.status_code, 200)
self.assertContainsEscaped(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/password_change/done/'))
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/password_change/done/'))
def test_password_change_done_fails(self):
with self.settings(LOGIN_URL='/login/'):
response = self.client.get('/password_change/done/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/?next=/password_change/done/'))
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('django.contrib.auth.views.login'))
self.assertEqual(response.status_code, 200)
if Site._meta.installed:
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertTrue(isinstance(response.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def test_security_check(self, password='password'):
login_url = reverse('django.contrib.auth.views.login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urllib.quote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urllib.quote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
class LoginURLSettings(AuthViewsTestCase):
def setUp(self):
super(LoginURLSettings, self).setUp()
self.old_LOGIN_URL = settings.LOGIN_URL
def tearDown(self):
super(LoginURLSettings, self).tearDown()
settings.LOGIN_URL = self.old_LOGIN_URL
def get_login_required_url(self, login_url):
settings.LOGIN_URL = login_url
response = self.client.get('/login_required/')
self.assertEqual(response.status_code, 302)
return response['Location']
def test_standard_login_url(self):
login_url = '/login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver%s?%s' %
(login_url, querystring.urlencode('/')))
def test_remote_login_url(self):
login_url = 'http://remote.example.com/login'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_https_login_url(self):
login_url = 'https:///login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_login_url_with_querystring(self):
login_url = '/login/?pretty=1'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('pretty=1', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver/login/?%s' %
querystring.urlencode('/'))
def test_remote_login_url_with_next_querystring(self):
login_url = 'http://remote.example.com/login/'
login_required_url = self.get_login_required_url('%s?next=/default/' %
login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url, '%s?%s' % (login_url,
querystring.urlencode('/')))
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertTrue(SESSION_KEY not in self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertEqual(200, response.status_code)
self.assertTrue('Logged out' in response.content)
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertTrue('site' in response.context)
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
response = self.client.get('/logout/next_page/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_security_check(self, password='password'):
logout_url = reverse('django.contrib.auth.views.logout')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urllib.quote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
self.confirm_logged_out()
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urllib.quote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
self.confirm_logged_out()
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3766_1 |
crossvul-python_data_bad_2816_1 | # -*- coding: utf-8 -*-
'''
TCP transport classes
Wire protocol: "len(payload) msgpack({'head': SOMEHEADER, 'body': SOMEBODY})"
'''
# Import Python Libs
from __future__ import absolute_import
import logging
import msgpack
import socket
import os
import weakref
import time
import traceback
import errno
# Import Salt Libs
import salt.crypt
import salt.utils
import salt.utils.verify
import salt.utils.event
import salt.utils.async
import salt.payload
import salt.exceptions
import salt.transport.frame
import salt.transport.ipc
import salt.transport.client
import salt.transport.server
import salt.transport.mixins.auth
import salt.ext.six as six
from salt.exceptions import SaltReqTimeoutError, SaltClientError
from salt.transport import iter_transport_opts
# Import Tornado Libs
import tornado
import tornado.tcpserver
import tornado.gen
import tornado.concurrent
import tornado.tcpclient
import tornado.netutil
# pylint: disable=import-error,no-name-in-module
if six.PY2:
import urlparse
else:
import urllib.parse as urlparse
# pylint: enable=import-error,no-name-in-module
# Import third party libs
try:
from Cryptodome.Cipher import PKCS1_OAEP
except ImportError:
from Crypto.Cipher import PKCS1_OAEP
if six.PY3 and salt.utils.is_windows():
USE_LOAD_BALANCER = True
else:
USE_LOAD_BALANCER = False
if USE_LOAD_BALANCER:
import threading
import multiprocessing
import errno
import tornado.util
from salt.utils.process import SignalHandlingMultiprocessingProcess
log = logging.getLogger(__name__)
def _set_tcp_keepalive(sock, opts):
'''
Ensure that TCP keepalives are set for the socket.
'''
if hasattr(socket, 'SO_KEEPALIVE'):
if opts.get('tcp_keepalive', False):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
if hasattr(socket, 'SOL_TCP'):
if hasattr(socket, 'TCP_KEEPIDLE'):
tcp_keepalive_idle = opts.get('tcp_keepalive_idle', -1)
if tcp_keepalive_idle > 0:
sock.setsockopt(
socket.SOL_TCP, socket.TCP_KEEPIDLE,
int(tcp_keepalive_idle))
if hasattr(socket, 'TCP_KEEPCNT'):
tcp_keepalive_cnt = opts.get('tcp_keepalive_cnt', -1)
if tcp_keepalive_cnt > 0:
sock.setsockopt(
socket.SOL_TCP, socket.TCP_KEEPCNT,
int(tcp_keepalive_cnt))
if hasattr(socket, 'TCP_KEEPINTVL'):
tcp_keepalive_intvl = opts.get('tcp_keepalive_intvl', -1)
if tcp_keepalive_intvl > 0:
sock.setsockopt(
socket.SOL_TCP, socket.TCP_KEEPINTVL,
int(tcp_keepalive_intvl))
if hasattr(socket, 'SIO_KEEPALIVE_VALS'):
# Windows doesn't support TCP_KEEPIDLE, TCP_KEEPCNT, nor
# TCP_KEEPINTVL. Instead, it has its own proprietary
# SIO_KEEPALIVE_VALS.
tcp_keepalive_idle = opts.get('tcp_keepalive_idle', -1)
tcp_keepalive_intvl = opts.get('tcp_keepalive_intvl', -1)
# Windows doesn't support changing something equivalent to
# TCP_KEEPCNT.
if tcp_keepalive_idle > 0 or tcp_keepalive_intvl > 0:
# Windows defaults may be found by using the link below.
# Search for 'KeepAliveTime' and 'KeepAliveInterval'.
# https://technet.microsoft.com/en-us/library/bb726981.aspx#EDAA
# If one value is set and the other isn't, we still need
# to send both values to SIO_KEEPALIVE_VALS and they both
# need to be valid. So in that case, use the Windows
# default.
if tcp_keepalive_idle <= 0:
tcp_keepalive_idle = 7200
if tcp_keepalive_intvl <= 0:
tcp_keepalive_intvl = 1
# The values expected are in milliseconds, so multiply by
# 1000.
sock.ioctl(socket.SIO_KEEPALIVE_VALS, (
1, int(tcp_keepalive_idle * 1000),
int(tcp_keepalive_intvl * 1000)))
else:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 0)
if USE_LOAD_BALANCER:
class LoadBalancerServer(SignalHandlingMultiprocessingProcess):
'''
Raw TCP server which runs in its own process and will listen
for incoming connections. Each incoming connection will be
sent via multiprocessing queue to the workers.
Since the queue is shared amongst workers, only one worker will
handle a given connection.
'''
# TODO: opts!
# Based on default used in tornado.netutil.bind_sockets()
backlog = 128
def __init__(self, opts, socket_queue, log_queue=None):
super(LoadBalancerServer, self).__init__(log_queue=log_queue)
self.opts = opts
self.socket_queue = socket_queue
self._socket = None
# __setstate__ and __getstate__ are only used on Windows.
# We do this so that __init__ will be invoked on Windows in the child
# process so that a register_after_fork() equivalent will work on
# Windows.
def __setstate__(self, state):
self._is_child = True
self.__init__(
state['opts'],
state['socket_queue'],
log_queue=state['log_queue']
)
def __getstate__(self):
return {'opts': self.opts,
'socket_queue': self.socket_queue,
'log_queue': self.log_queue}
def close(self):
if self._socket is not None:
self._socket.shutdown(socket.SHUT_RDWR)
self._socket.close()
self._socket = None
def __del__(self):
self.close()
def run(self):
'''
Start the load balancer
'''
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
_set_tcp_keepalive(self._socket, self.opts)
self._socket.setblocking(1)
self._socket.bind((self.opts['interface'], int(self.opts['ret_port'])))
self._socket.listen(self.backlog)
while True:
try:
# Wait for a connection to occur since the socket is
# blocking.
connection, address = self._socket.accept()
# Wait for a free slot to be available to put
# the connection into.
# Sockets are picklable on Windows in Python 3.
self.socket_queue.put((connection, address), True, None)
except socket.error as e:
# ECONNABORTED indicates that there was a connection
# but it was closed while still in the accept queue.
# (observed on FreeBSD).
if tornado.util.errno_from_exception(e) == errno.ECONNABORTED:
continue
raise
# TODO: move serial down into message library
class AsyncTCPReqChannel(salt.transport.client.ReqChannel):
'''
Encapsulate sending routines to tcp.
Note: this class returns a singleton
'''
# This class is only a singleton per minion/master pair
# mapping of io_loop -> {key -> channel}
instance_map = weakref.WeakKeyDictionary()
def __new__(cls, opts, **kwargs):
'''
Only create one instance of channel per __key()
'''
# do we have any mapping for this io_loop
io_loop = kwargs.get('io_loop') or tornado.ioloop.IOLoop.current()
if io_loop not in cls.instance_map:
cls.instance_map[io_loop] = weakref.WeakValueDictionary()
loop_instance_map = cls.instance_map[io_loop]
key = cls.__key(opts, **kwargs)
obj = loop_instance_map.get(key)
if obj is None:
log.debug('Initializing new AsyncTCPReqChannel for {0}'.format(key))
# we need to make a local variable for this, as we are going to store
# it in a WeakValueDictionary-- which will remove the item if no one
# references it-- this forces a reference while we return to the caller
obj = object.__new__(cls)
obj.__singleton_init__(opts, **kwargs)
loop_instance_map[key] = obj
else:
log.debug('Re-using AsyncTCPReqChannel for {0}'.format(key))
return obj
@classmethod
def __key(cls, opts, **kwargs):
if 'master_uri' in kwargs:
opts['master_uri'] = kwargs['master_uri']
return (opts['pki_dir'], # where the keys are stored
opts['id'], # minion ID
opts['master_uri'],
kwargs.get('crypt', 'aes'), # TODO: use the same channel for crypt
)
# has to remain empty for singletons, since __init__ will *always* be called
def __init__(self, opts, **kwargs):
pass
# an init for the singleton instance to call
def __singleton_init__(self, opts, **kwargs):
self.opts = dict(opts)
self.serial = salt.payload.Serial(self.opts)
# crypt defaults to 'aes'
self.crypt = kwargs.get('crypt', 'aes')
self.io_loop = kwargs.get('io_loop') or tornado.ioloop.IOLoop.current()
if self.crypt != 'clear':
self.auth = salt.crypt.AsyncAuth(self.opts, io_loop=self.io_loop)
resolver = kwargs.get('resolver')
parse = urlparse.urlparse(self.opts['master_uri'])
host, port = parse.netloc.rsplit(':', 1)
self.master_addr = (host, int(port))
self._closing = False
self.message_client = SaltMessageClientPool(self.opts,
args=(self.opts, host, int(port),),
kwargs={'io_loop': self.io_loop, 'resolver': resolver})
def close(self):
if self._closing:
return
self._closing = True
self.message_client.close()
def __del__(self):
self.close()
def _package_load(self, load):
return {
'enc': self.crypt,
'load': load,
}
@tornado.gen.coroutine
def crypted_transfer_decode_dictentry(self, load, dictkey=None, tries=3, timeout=60):
if not self.auth.authenticated:
yield self.auth.authenticate()
ret = yield self.message_client.send(self._package_load(self.auth.crypticle.dumps(load)), timeout=timeout)
key = self.auth.get_keys()
cipher = PKCS1_OAEP.new(key)
aes = cipher.decrypt(ret['key'])
pcrypt = salt.crypt.Crypticle(self.opts, aes)
data = pcrypt.loads(ret[dictkey])
if six.PY3:
data = salt.transport.frame.decode_embedded_strs(data)
raise tornado.gen.Return(data)
@tornado.gen.coroutine
def _crypted_transfer(self, load, tries=3, timeout=60):
'''
In case of authentication errors, try to renegotiate authentication
and retry the method.
Indeed, we can fail too early in case of a master restart during a
minion state execution call
'''
@tornado.gen.coroutine
def _do_transfer():
data = yield self.message_client.send(self._package_load(self.auth.crypticle.dumps(load)),
timeout=timeout,
)
# we may not have always data
# as for example for saltcall ret submission, this is a blind
# communication, we do not subscribe to return events, we just
# upload the results to the master
if data:
data = self.auth.crypticle.loads(data)
if six.PY3:
data = salt.transport.frame.decode_embedded_strs(data)
raise tornado.gen.Return(data)
if not self.auth.authenticated:
yield self.auth.authenticate()
try:
ret = yield _do_transfer()
raise tornado.gen.Return(ret)
except salt.crypt.AuthenticationError:
yield self.auth.authenticate()
ret = yield _do_transfer()
raise tornado.gen.Return(ret)
@tornado.gen.coroutine
def _uncrypted_transfer(self, load, tries=3, timeout=60):
ret = yield self.message_client.send(self._package_load(load), timeout=timeout)
raise tornado.gen.Return(ret)
@tornado.gen.coroutine
def send(self, load, tries=3, timeout=60, raw=False):
'''
Send a request, return a future which will complete when we send the message
'''
try:
if self.crypt == 'clear':
ret = yield self._uncrypted_transfer(load, tries=tries, timeout=timeout)
else:
ret = yield self._crypted_transfer(load, tries=tries, timeout=timeout)
except tornado.iostream.StreamClosedError:
# Convert to 'SaltClientError' so that clients can handle this
# exception more appropriately.
raise SaltClientError('Connection to master lost')
raise tornado.gen.Return(ret)
class AsyncTCPPubChannel(salt.transport.mixins.auth.AESPubClientMixin, salt.transport.client.AsyncPubChannel):
def __init__(self,
opts,
**kwargs):
self.opts = opts
self.serial = salt.payload.Serial(self.opts)
self.crypt = kwargs.get('crypt', 'aes')
self.io_loop = kwargs.get('io_loop') or tornado.ioloop.IOLoop.current()
self.connected = False
self._closing = False
self._reconnected = False
self.event = salt.utils.event.get_event(
'minion',
opts=self.opts,
listen=False
)
def close(self):
if self._closing:
return
self._closing = True
if hasattr(self, 'message_client'):
self.message_client.close()
def __del__(self):
self.close()
def _package_load(self, load):
return {
'enc': self.crypt,
'load': load,
}
@tornado.gen.coroutine
def send_id(self, tok, force_auth):
'''
Send the minion id to the master so that the master may better
track the connection state of the minion.
In case of authentication errors, try to renegotiate authentication
and retry the method.
'''
load = {'id': self.opts['id'], 'tok': tok}
@tornado.gen.coroutine
def _do_transfer():
msg = self._package_load(self.auth.crypticle.dumps(load))
package = salt.transport.frame.frame_msg(msg, header=None)
yield self.message_client.write_to_stream(package)
raise tornado.gen.Return(True)
if force_auth or not self.auth.authenticated:
count = 0
while count <= self.opts['tcp_authentication_retries'] or self.opts['tcp_authentication_retries'] < 0:
try:
yield self.auth.authenticate()
break
except SaltClientError as exc:
log.debug(exc)
count += 1
try:
ret = yield _do_transfer()
raise tornado.gen.Return(ret)
except salt.crypt.AuthenticationError:
yield self.auth.authenticate()
ret = yield _do_transfer()
raise tornado.gen.Return(ret)
@tornado.gen.coroutine
def connect_callback(self, result):
if self._closing:
return
# Force re-auth on reconnect since the master
# may have been restarted
yield self.send_id(self.tok, self._reconnected)
self.connected = True
self.event.fire_event(
{'master': self.opts['master']},
'__master_connected'
)
if self._reconnected:
# On reconnects, fire a master event to notify that the minion is
# available.
if self.opts.get('__role') == 'syndic':
data = 'Syndic {0} started at {1}'.format(
self.opts['id'],
time.asctime()
)
tag = salt.utils.event.tagify(
[self.opts['id'], 'start'],
'syndic'
)
else:
data = 'Minion {0} started at {1}'.format(
self.opts['id'],
time.asctime()
)
tag = salt.utils.event.tagify(
[self.opts['id'], 'start'],
'minion'
)
load = {'id': self.opts['id'],
'cmd': '_minion_event',
'pretag': None,
'tok': self.tok,
'data': data,
'tag': tag}
req_channel = salt.utils.async.SyncWrapper(
AsyncTCPReqChannel, (self.opts,)
)
try:
req_channel.send(load, timeout=60)
except salt.exceptions.SaltReqTimeoutError:
log.info('fire_master failed: master could not be contacted. Request timed out.')
except Exception:
log.info('fire_master failed: {0}'.format(
traceback.format_exc())
)
else:
self._reconnected = True
def disconnect_callback(self):
if self._closing:
return
self.connected = False
self.event.fire_event(
{'master': self.opts['master']},
'__master_disconnected'
)
@tornado.gen.coroutine
def connect(self):
try:
self.auth = salt.crypt.AsyncAuth(self.opts, io_loop=self.io_loop)
self.tok = self.auth.gen_token('salt')
if not self.auth.authenticated:
yield self.auth.authenticate()
if self.auth.authenticated:
self.message_client = SaltMessageClientPool(
self.opts,
args=(self.opts, self.opts['master_ip'], int(self.auth.creds['publish_port']),),
kwargs={'io_loop': self.io_loop,
'connect_callback': self.connect_callback,
'disconnect_callback': self.disconnect_callback})
yield self.message_client.connect() # wait for the client to be connected
self.connected = True
# TODO: better exception handling...
except KeyboardInterrupt:
raise
except Exception as exc:
if '-|RETRY|-' not in str(exc):
raise SaltClientError('Unable to sign_in to master: {0}'.format(exc)) # TODO: better error message
def on_recv(self, callback):
'''
Register an on_recv callback
'''
if callback is None:
return self.message_client.on_recv(callback)
@tornado.gen.coroutine
def wrap_callback(body):
if not isinstance(body, dict):
# TODO: For some reason we need to decode here for things
# to work. Fix this.
body = msgpack.loads(body)
if six.PY3:
body = salt.transport.frame.decode_embedded_strs(body)
ret = yield self._decode_payload(body)
callback(ret)
return self.message_client.on_recv(wrap_callback)
class TCPReqServerChannel(salt.transport.mixins.auth.AESReqServerMixin, salt.transport.server.ReqServerChannel):
# TODO: opts!
backlog = 5
def __init__(self, opts):
salt.transport.server.ReqServerChannel.__init__(self, opts)
self._socket = None
@property
def socket(self):
return self._socket
def close(self):
if self._socket is not None:
try:
self._socket.shutdown(socket.SHUT_RDWR)
except socket.error as exc:
if exc.errno == errno.ENOTCONN:
# We may try to shutdown a socket which is already disconnected.
# Ignore this condition and continue.
pass
else:
raise exc
self._socket.close()
self._socket = None
def __del__(self):
self.close()
def pre_fork(self, process_manager):
'''
Pre-fork we need to create the zmq router device
'''
salt.transport.mixins.auth.AESReqServerMixin.pre_fork(self, process_manager)
if USE_LOAD_BALANCER:
self.socket_queue = multiprocessing.Queue()
process_manager.add_process(
LoadBalancerServer, args=(self.opts, self.socket_queue)
)
elif not salt.utils.is_windows():
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
_set_tcp_keepalive(self._socket, self.opts)
self._socket.setblocking(0)
self._socket.bind((self.opts['interface'], int(self.opts['ret_port'])))
def post_fork(self, payload_handler, io_loop):
'''
After forking we need to create all of the local sockets to listen to the
router
payload_handler: function to call with your payloads
'''
self.payload_handler = payload_handler
self.io_loop = io_loop
self.serial = salt.payload.Serial(self.opts)
if USE_LOAD_BALANCER:
self.req_server = LoadBalancerWorker(self.socket_queue,
self.handle_message,
io_loop=self.io_loop,
ssl_options=self.opts.get('ssl'))
else:
if salt.utils.is_windows():
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
_set_tcp_keepalive(self._socket, self.opts)
self._socket.setblocking(0)
self._socket.bind((self.opts['interface'], int(self.opts['ret_port'])))
self.req_server = SaltMessageServer(self.handle_message,
io_loop=self.io_loop,
ssl_options=self.opts.get('ssl'))
self.req_server.add_socket(self._socket)
self._socket.listen(self.backlog)
salt.transport.mixins.auth.AESReqServerMixin.post_fork(self, payload_handler, io_loop)
@tornado.gen.coroutine
def handle_message(self, stream, header, payload):
'''
Handle incoming messages from underylying tcp streams
'''
try:
try:
payload = self._decode_payload(payload)
except Exception:
stream.write(salt.transport.frame.frame_msg('bad load', header=header))
raise tornado.gen.Return()
# TODO helper functions to normalize payload?
if not isinstance(payload, dict) or not isinstance(payload.get('load'), dict):
yield stream.write(salt.transport.frame.frame_msg(
'payload and load must be a dict', header=header))
raise tornado.gen.Return()
# intercept the "_auth" commands, since the main daemon shouldn't know
# anything about our key auth
if payload['enc'] == 'clear' and payload.get('load', {}).get('cmd') == '_auth':
yield stream.write(salt.transport.frame.frame_msg(
self._auth(payload['load']), header=header))
raise tornado.gen.Return()
# TODO: test
try:
ret, req_opts = yield self.payload_handler(payload)
except Exception as e:
# always attempt to return an error to the minion
stream.write('Some exception handling minion payload')
log.error('Some exception handling a payload from minion', exc_info=True)
stream.close()
raise tornado.gen.Return()
req_fun = req_opts.get('fun', 'send')
if req_fun == 'send_clear':
stream.write(salt.transport.frame.frame_msg(ret, header=header))
elif req_fun == 'send':
stream.write(salt.transport.frame.frame_msg(self.crypticle.dumps(ret), header=header))
elif req_fun == 'send_private':
stream.write(salt.transport.frame.frame_msg(self._encrypt_private(ret,
req_opts['key'],
req_opts['tgt'],
), header=header))
else:
log.error('Unknown req_fun {0}'.format(req_fun))
# always attempt to return an error to the minion
stream.write('Server-side exception handling payload')
stream.close()
except tornado.gen.Return:
raise
except tornado.iostream.StreamClosedError:
# Stream was closed. This could happen if the remote side
# closed the connection on its end (eg in a timeout or shutdown
# situation).
log.error('Connection was unexpectedly closed', exc_info=True)
except Exception as exc: # pylint: disable=broad-except
# Absorb any other exceptions
log.error('Unexpected exception occurred: {0}'.format(exc), exc_info=True)
raise tornado.gen.Return()
class SaltMessageServer(tornado.tcpserver.TCPServer, object):
'''
Raw TCP server which will receive all of the TCP streams and re-assemble
messages that are sent through to us
'''
def __init__(self, message_handler, *args, **kwargs):
super(SaltMessageServer, self).__init__(*args, **kwargs)
self.clients = []
self.message_handler = message_handler
@tornado.gen.coroutine
def handle_stream(self, stream, address):
'''
Handle incoming streams and add messages to the incoming queue
'''
log.trace('Req client {0} connected'.format(address))
self.clients.append((stream, address))
unpacker = msgpack.Unpacker()
try:
while True:
wire_bytes = yield stream.read_bytes(4096, partial=True)
unpacker.feed(wire_bytes)
for framed_msg in unpacker:
if six.PY3:
framed_msg = salt.transport.frame.decode_embedded_strs(
framed_msg
)
header = framed_msg['head']
self.io_loop.spawn_callback(self.message_handler, stream, header, framed_msg['body'])
except tornado.iostream.StreamClosedError:
log.trace('req client disconnected {0}'.format(address))
self.clients.remove((stream, address))
except Exception as e:
log.trace('other master-side exception: {0}'.format(e))
self.clients.remove((stream, address))
stream.close()
def shutdown(self):
'''
Shutdown the whole server
'''
for item in self.clients:
client, address = item
client.close()
self.clients.remove(item)
if USE_LOAD_BALANCER:
class LoadBalancerWorker(SaltMessageServer):
'''
This will receive TCP connections from 'LoadBalancerServer' via
a multiprocessing queue.
Since the queue is shared amongst workers, only one worker will handle
a given connection.
'''
def __init__(self, socket_queue, message_handler, *args, **kwargs):
super(LoadBalancerWorker, self).__init__(
message_handler, *args, **kwargs)
self.socket_queue = socket_queue
t = threading.Thread(target=self.socket_queue_thread)
t.start()
def socket_queue_thread(self):
try:
while True:
client_socket, address = self.socket_queue.get(True, None)
# 'self.io_loop' initialized in super class
# 'tornado.tcpserver.TCPServer'.
# 'self._handle_connection' defined in same super class.
self.io_loop.spawn_callback(
self._handle_connection, client_socket, address)
except (KeyboardInterrupt, SystemExit):
pass
class TCPClientKeepAlive(tornado.tcpclient.TCPClient):
'''
Override _create_stream() in TCPClient to enable keep alive support.
'''
def __init__(self, opts, resolver=None, io_loop=None):
self.opts = opts
super(TCPClientKeepAlive, self).__init__(
resolver=resolver, io_loop=io_loop)
def _create_stream(self, max_buffer_size, af, addr, **kwargs): # pylint: disable=unused-argument
'''
Override _create_stream() in TCPClient.
Tornado 4.5 added the kwargs 'source_ip' and 'source_port'.
Due to this, use **kwargs to swallow these and any future
kwargs to maintain compatibility.
'''
# Always connect in plaintext; we'll convert to ssl if necessary
# after one connection has completed.
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
_set_tcp_keepalive(sock, self.opts)
stream = tornado.iostream.IOStream(
sock,
io_loop=self.io_loop,
max_buffer_size=max_buffer_size)
return stream.connect(addr)
class SaltMessageClientPool(salt.transport.MessageClientPool):
'''
Wrapper class of SaltMessageClient to avoid blocking waiting while writing data to socket.
'''
def __init__(self, opts, args=None, kwargs=None):
super(SaltMessageClientPool, self).__init__(SaltMessageClient, opts, args=args, kwargs=kwargs)
def __del__(self):
self.close()
def close(self):
for message_client in self.message_clients:
message_client.close()
self.message_clients = []
@tornado.gen.coroutine
def connect(self):
futures = []
for message_client in self.message_clients:
futures.append(message_client.connect())
for future in futures:
yield future
raise tornado.gen.Return(None)
def on_recv(self, *args, **kwargs):
for message_client in self.message_clients:
message_client.on_recv(*args, **kwargs)
def send(self, *args, **kwargs):
message_clients = sorted(self.message_clients, key=lambda x: len(x.send_queue))
return message_clients[0].send(*args, **kwargs)
def write_to_stream(self, *args, **kwargs):
message_clients = sorted(self.message_clients, key=lambda x: len(x.send_queue))
return message_clients[0]._stream.write(*args, **kwargs)
# TODO consolidate with IPCClient
# TODO: limit in-flight messages.
# TODO: singleton? Something to not re-create the tcp connection so much
class SaltMessageClient(object):
'''
Low-level message sending client
'''
def __init__(self, opts, host, port, io_loop=None, resolver=None,
connect_callback=None, disconnect_callback=None):
self.opts = opts
self.host = host
self.port = port
self.connect_callback = connect_callback
self.disconnect_callback = disconnect_callback
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
self._tcp_client = TCPClientKeepAlive(
opts, io_loop=self.io_loop, resolver=resolver)
self._mid = 1
self._max_messages = int((1 << 31) - 2) # number of IDs before we wrap
# TODO: max queue size
self.send_queue = [] # queue of messages to be sent
self.send_future_map = {} # mapping of request_id -> Future
self.send_timeout_map = {} # request_id -> timeout_callback
self._read_until_future = None
self._on_recv = None
self._closing = False
self._connecting_future = self.connect()
self._stream_return_future = tornado.concurrent.Future()
self.io_loop.spawn_callback(self._stream_return)
# TODO: timeout inflight sessions
def close(self):
if self._closing:
return
self._closing = True
if hasattr(self, '_stream') and not self._stream.closed():
self._stream.close()
if self._read_until_future is not None:
# This will prevent this message from showing up:
# '[ERROR ] Future exception was never retrieved:
# StreamClosedError'
# This happens because the logic is always waiting to read
# the next message and the associated read future is marked
# 'StreamClosedError' when the stream is closed.
self._read_until_future.exc_info()
if (not self._stream_return_future.done() and
self.io_loop != tornado.ioloop.IOLoop.current(
instance=False)):
# If _stream_return() hasn't completed, it means the IO
# Loop is stopped (such as when using
# 'salt.utils.async.SyncWrapper'). Ensure that
# _stream_return() completes by restarting the IO Loop.
# This will prevent potential errors on shutdown.
orig_loop = tornado.ioloop.IOLoop.current()
self.io_loop.make_current()
try:
self.io_loop.add_future(
self._stream_return_future,
lambda future: self.io_loop.stop()
)
self.io_loop.start()
finally:
orig_loop.make_current()
self._tcp_client.close()
# Clear callback references to allow the object that they belong to
# to be deleted.
self.connect_callback = None
self.disconnect_callback = None
def __del__(self):
self.close()
def connect(self):
'''
Ask for this client to reconnect to the origin
'''
if hasattr(self, '_connecting_future') and not self._connecting_future.done():
future = self._connecting_future
else:
future = tornado.concurrent.Future()
self._connecting_future = future
self.io_loop.add_callback(self._connect)
# Add the callback only when a new future is created
if self.connect_callback is not None:
def handle_future(future):
response = future.result()
self.io_loop.add_callback(self.connect_callback, response)
future.add_done_callback(handle_future)
return future
# TODO: tcp backoff opts
@tornado.gen.coroutine
def _connect(self):
'''
Try to connect for the rest of time!
'''
while True:
if self._closing:
break
try:
self._stream = yield self._tcp_client.connect(self.host,
self.port,
ssl_options=self.opts.get('ssl'))
self._connecting_future.set_result(True)
break
except Exception as e:
yield tornado.gen.sleep(1) # TODO: backoff
#self._connecting_future.set_exception(e)
@tornado.gen.coroutine
def _stream_return(self):
try:
while not self._closing and (
not self._connecting_future.done() or
self._connecting_future.result() is not True):
yield self._connecting_future
unpacker = msgpack.Unpacker()
while not self._closing:
try:
self._read_until_future = self._stream.read_bytes(4096, partial=True)
wire_bytes = yield self._read_until_future
unpacker.feed(wire_bytes)
for framed_msg in unpacker:
if six.PY3:
framed_msg = salt.transport.frame.decode_embedded_strs(
framed_msg
)
header = framed_msg['head']
body = framed_msg['body']
message_id = header.get('mid')
if message_id in self.send_future_map:
self.send_future_map.pop(message_id).set_result(body)
self.remove_message_timeout(message_id)
else:
if self._on_recv is not None:
self.io_loop.spawn_callback(self._on_recv, header, body)
else:
log.error('Got response for message_id {0} that we are not tracking'.format(message_id))
except tornado.iostream.StreamClosedError as e:
log.debug('tcp stream to {0}:{1} closed, unable to recv'.format(self.host, self.port))
for future in six.itervalues(self.send_future_map):
future.set_exception(e)
self.send_future_map = {}
if self._closing:
return
if self.disconnect_callback:
self.disconnect_callback()
# if the last connect finished, then we need to make a new one
if self._connecting_future.done():
self._connecting_future = self.connect()
yield self._connecting_future
except TypeError:
# This is an invalid transport
if 'detect_mode' in self.opts:
log.info('There was an error trying to use TCP transport; '
'attempting to fallback to another transport')
else:
raise SaltClientError
except Exception as e:
log.error('Exception parsing response', exc_info=True)
for future in six.itervalues(self.send_future_map):
future.set_exception(e)
self.send_future_map = {}
if self._closing:
return
if self.disconnect_callback:
self.disconnect_callback()
# if the last connect finished, then we need to make a new one
if self._connecting_future.done():
self._connecting_future = self.connect()
yield self._connecting_future
finally:
self._stream_return_future.set_result(True)
@tornado.gen.coroutine
def _stream_send(self):
while not self._connecting_future.done() or self._connecting_future.result() is not True:
yield self._connecting_future
while len(self.send_queue) > 0:
message_id, item = self.send_queue[0]
try:
yield self._stream.write(item)
del self.send_queue[0]
# if the connection is dead, lets fail this send, and make sure we
# attempt to reconnect
except tornado.iostream.StreamClosedError as e:
if message_id in self.send_future_map:
self.send_future_map.pop(message_id).set_exception(e)
self.remove_message_timeout(message_id)
del self.send_queue[0]
if self._closing:
return
if self.disconnect_callback:
self.disconnect_callback()
# if the last connect finished, then we need to make a new one
if self._connecting_future.done():
self._connecting_future = self.connect()
yield self._connecting_future
def _message_id(self):
wrap = False
while self._mid in self.send_future_map:
if self._mid >= self._max_messages:
if wrap:
# this shouldn't ever happen, but just in case
raise Exception('Unable to find available messageid')
self._mid = 1
wrap = True
else:
self._mid += 1
return self._mid
# TODO: return a message object which takes care of multiplexing?
def on_recv(self, callback):
'''
Register a callback for received messages (that we didn't initiate)
'''
if callback is None:
self._on_recv = callback
else:
def wrap_recv(header, body):
callback(body)
self._on_recv = wrap_recv
def remove_message_timeout(self, message_id):
if message_id not in self.send_timeout_map:
return
timeout = self.send_timeout_map.pop(message_id)
self.io_loop.remove_timeout(timeout)
def timeout_message(self, message_id):
if message_id in self.send_timeout_map:
del self.send_timeout_map[message_id]
if message_id in self.send_future_map:
self.send_future_map.pop(message_id).set_exception(
SaltReqTimeoutError('Message timed out')
)
def send(self, msg, timeout=None, callback=None, raw=False):
'''
Send given message, and return a future
'''
message_id = self._message_id()
header = {'mid': message_id}
future = tornado.concurrent.Future()
if callback is not None:
def handle_future(future):
response = future.result()
self.io_loop.add_callback(callback, response)
future.add_done_callback(handle_future)
# Add this future to the mapping
self.send_future_map[message_id] = future
if self.opts.get('detect_mode') is True:
timeout = 1
if timeout is not None:
send_timeout = self.io_loop.call_later(timeout, self.timeout_message, message_id)
self.send_timeout_map[message_id] = send_timeout
# if we don't have a send queue, we need to spawn the callback to do the sending
if len(self.send_queue) == 0:
self.io_loop.spawn_callback(self._stream_send)
self.send_queue.append((message_id, salt.transport.frame.frame_msg(msg, header=header)))
return future
class Subscriber(object):
'''
Client object for use with the TCP publisher server
'''
def __init__(self, stream, address):
self.stream = stream
self.address = address
self._closing = False
self._read_until_future = None
self.id_ = None
def close(self):
if self._closing:
return
self._closing = True
if not self.stream.closed():
self.stream.close()
if self._read_until_future is not None:
# This will prevent this message from showing up:
# '[ERROR ] Future exception was never retrieved:
# StreamClosedError'
# This happens because the logic is always waiting to read
# the next message and the associated read future is marked
# 'StreamClosedError' when the stream is closed.
self._read_until_future.exc_info()
def __del__(self):
self.close()
class PubServer(tornado.tcpserver.TCPServer, object):
'''
TCP publisher
'''
def __init__(self, opts, io_loop=None):
super(PubServer, self).__init__(io_loop=io_loop, ssl_options=opts.get('ssl'))
self.opts = opts
self._closing = False
self.clients = set()
self.aes_funcs = salt.master.AESFuncs(self.opts)
self.present = {}
self.presence_events = False
if self.opts.get('presence_events', False):
tcp_only = True
for transport, _ in iter_transport_opts(self.opts):
if transport != 'tcp':
tcp_only = False
if tcp_only:
# Only when the transport is TCP only, the presence events will
# be handled here. Otherwise, it will be handled in the
# 'Maintenance' process.
self.presence_events = True
if self.presence_events:
self.event = salt.utils.event.get_event(
'master',
opts=self.opts,
listen=False
)
def close(self):
if self._closing:
return
self._closing = True
def __del__(self):
self.close()
def _add_client_present(self, client):
id_ = client.id_
if id_ in self.present:
clients = self.present[id_]
clients.add(client)
else:
self.present[id_] = set([client])
if self.presence_events:
data = {'new': [id_],
'lost': []}
self.event.fire_event(
data,
salt.utils.event.tagify('change', 'presence')
)
data = {'present': list(self.present.keys())}
self.event.fire_event(
data,
salt.utils.event.tagify('present', 'presence')
)
def _remove_client_present(self, client):
id_ = client.id_
if id_ is None or id_ not in self.present:
# This is possible if _remove_client_present() is invoked
# before the minion's id is validated.
return
clients = self.present[id_]
if client not in clients:
# Since _remove_client_present() is potentially called from
# _stream_read() and/or publish_payload(), it is possible for
# it to be called twice, in which case we will get here.
# This is not an abnormal case, so no logging is required.
return
clients.remove(client)
if len(clients) == 0:
del self.present[id_]
if self.presence_events:
data = {'new': [],
'lost': [id_]}
self.event.fire_event(
data,
salt.utils.event.tagify('change', 'presence')
)
data = {'present': list(self.present.keys())}
self.event.fire_event(
data,
salt.utils.event.tagify('present', 'presence')
)
@tornado.gen.coroutine
def _stream_read(self, client):
unpacker = msgpack.Unpacker()
while not self._closing:
try:
client._read_until_future = client.stream.read_bytes(4096, partial=True)
wire_bytes = yield client._read_until_future
unpacker.feed(wire_bytes)
for framed_msg in unpacker:
if six.PY3:
framed_msg = salt.transport.frame.decode_embedded_strs(
framed_msg
)
body = framed_msg['body']
if body['enc'] != 'aes':
# We only accept 'aes' encoded messages for 'id'
continue
crypticle = salt.crypt.Crypticle(self.opts, salt.master.SMaster.secrets['aes']['secret'].value)
load = crypticle.loads(body['load'])
if six.PY3:
load = salt.transport.frame.decode_embedded_strs(load)
if not self.aes_funcs.verify_minion(load['id'], load['tok']):
continue
client.id_ = load['id']
self._add_client_present(client)
except tornado.iostream.StreamClosedError as e:
log.debug('tcp stream to {0} closed, unable to recv'.format(client.address))
client.close()
self._remove_client_present(client)
self.clients.discard(client)
break
except Exception as e:
log.error('Exception parsing response', exc_info=True)
continue
def handle_stream(self, stream, address):
log.trace('Subscriber at {0} connected'.format(address))
client = Subscriber(stream, address)
self.clients.add(client)
self.io_loop.spawn_callback(self._stream_read, client)
# TODO: ACK the publish through IPC
@tornado.gen.coroutine
def publish_payload(self, package, _):
log.debug('TCP PubServer sending payload: {0}'.format(package))
payload = salt.transport.frame.frame_msg(package['payload'])
to_remove = []
if 'topic_lst' in package:
topic_lst = package['topic_lst']
for topic in topic_lst:
if topic in self.present:
# This will rarely be a list of more than 1 item. It will
# be more than 1 item if the minion disconnects from the
# master in an unclean manner (eg cable yank), then
# restarts and the master is yet to detect the disconnect
# via TCP keep-alive.
for client in self.present[topic]:
try:
# Write the packed str
f = client.stream.write(payload)
self.io_loop.add_future(f, lambda f: True)
except tornado.iostream.StreamClosedError:
to_remove.append(client)
else:
log.debug('Publish target {0} not connected'.format(topic))
else:
for client in self.clients:
try:
# Write the packed str
f = client.stream.write(payload)
self.io_loop.add_future(f, lambda f: True)
except tornado.iostream.StreamClosedError:
to_remove.append(client)
for client in to_remove:
log.debug('Subscriber at {0} has disconnected from publisher'.format(client.address))
client.close()
self._remove_client_present(client)
self.clients.discard(client)
log.trace('TCP PubServer finished publishing payload')
class TCPPubServerChannel(salt.transport.server.PubServerChannel):
# TODO: opts!
# Based on default used in tornado.netutil.bind_sockets()
backlog = 128
def __init__(self, opts):
self.opts = opts
self.serial = salt.payload.Serial(self.opts) # TODO: in init?
self.io_loop = None
def __setstate__(self, state):
salt.master.SMaster.secrets = state['secrets']
self.__init__(state['opts'])
def __getstate__(self):
return {'opts': self.opts,
'secrets': salt.master.SMaster.secrets}
def _publish_daemon(self, log_queue=None):
'''
Bind to the interface specified in the configuration file
'''
salt.utils.appendproctitle(self.__class__.__name__)
if log_queue is not None:
salt.log.setup.set_multiprocessing_logging_queue(log_queue)
salt.log.setup.setup_multiprocessing_logging(log_queue)
# Check if io_loop was set outside
if self.io_loop is None:
self.io_loop = tornado.ioloop.IOLoop.current()
# Spin up the publisher
pub_server = PubServer(self.opts, io_loop=self.io_loop)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
_set_tcp_keepalive(sock, self.opts)
sock.setblocking(0)
sock.bind((self.opts['interface'], int(self.opts['publish_port'])))
sock.listen(self.backlog)
# pub_server will take ownership of the socket
pub_server.add_socket(sock)
# Set up Salt IPC server
if self.opts.get('ipc_mode', '') == 'tcp':
pull_uri = int(self.opts.get('tcp_master_publish_pull', 4514))
else:
pull_uri = os.path.join(self.opts['sock_dir'], 'publish_pull.ipc')
pull_sock = salt.transport.ipc.IPCMessageServer(
pull_uri,
io_loop=self.io_loop,
payload_handler=pub_server.publish_payload,
)
# Securely create socket
log.info('Starting the Salt Puller on {0}'.format(pull_uri))
old_umask = os.umask(0o177)
try:
pull_sock.start()
finally:
os.umask(old_umask)
# run forever
try:
self.io_loop.start()
except (KeyboardInterrupt, SystemExit):
salt.log.setup.shutdown_multiprocessing_logging()
def pre_fork(self, process_manager):
'''
Do anything necessary pre-fork. Since this is on the master side this will
primarily be used to create IPC channels and create our daemon process to
do the actual publishing
'''
kwargs = {}
if salt.utils.is_windows():
kwargs['log_queue'] = (
salt.log.setup.get_multiprocessing_logging_queue()
)
process_manager.add_process(self._publish_daemon, kwargs=kwargs)
def publish(self, load):
'''
Publish "load" to minions
'''
payload = {'enc': 'aes'}
crypticle = salt.crypt.Crypticle(self.opts, salt.master.SMaster.secrets['aes']['secret'].value)
payload['load'] = crypticle.dumps(load)
if self.opts['sign_pub_messages']:
master_pem_path = os.path.join(self.opts['pki_dir'], 'master.pem')
log.debug("Signing data packet")
payload['sig'] = salt.crypt.sign_message(master_pem_path, payload['load'])
# Use the Salt IPC server
if self.opts.get('ipc_mode', '') == 'tcp':
pull_uri = int(self.opts.get('tcp_master_publish_pull', 4514))
else:
pull_uri = os.path.join(self.opts['sock_dir'], 'publish_pull.ipc')
# TODO: switch to the actual async interface
#pub_sock = salt.transport.ipc.IPCMessageClient(self.opts, io_loop=self.io_loop)
pub_sock = salt.utils.async.SyncWrapper(
salt.transport.ipc.IPCMessageClient,
(pull_uri,)
)
pub_sock.connect()
int_payload = {'payload': self.serial.dumps(payload)}
# add some targeting stuff for lists only (for now)
if load['tgt_type'] == 'list':
int_payload['topic_lst'] = load['tgt']
# Send it over IPC!
pub_sock.send(int_payload)
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_2816_1 |
crossvul-python_data_good_1376_3 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from thrift.Thrift import *
class TProtocolException(TException):
"""Custom Protocol Exception class"""
UNKNOWN = 0
INVALID_DATA = 1
NEGATIVE_SIZE = 2
SIZE_LIMIT = 3
BAD_VERSION = 4
INVALID_PROTOCOL = 5
MISSING_REQUIRED_FIELD = 6
def __init__(self, type=UNKNOWN, message=None):
TException.__init__(self, message)
self.type = type
class TProtocolBase:
"""Base class for Thrift protocol driver."""
def __init__(self, trans):
self.trans = trans
def writeMessageBegin(self, name, ttype, seqid):
pass
def writeMessageEnd(self):
pass
def writeStructBegin(self, name):
pass
def writeStructEnd(self):
pass
def writeUnionBegin(self, name):
self.writeStructBegin(name)
def writeUnionEnd(self):
self.writeStructEnd()
def writeFieldBegin(self, name, type, id):
pass
def writeFieldEnd(self):
pass
def writeFieldStop(self):
pass
def writeMapBegin(self, ktype, vtype, size):
pass
def writeMapEnd(self):
pass
def writeListBegin(self, etype, size):
pass
def writeListEnd(self):
pass
def writeSetBegin(self, etype, size):
pass
def writeSetEnd(self):
pass
def writeBool(self, bool_val):
pass
def writeByte(self, byte):
pass
def writeI16(self, i16):
pass
def writeI32(self, i32):
pass
def writeI64(self, i64):
pass
def writeDouble(self, dub):
pass
def writeFloat(self, flt):
pass
def writeString(self, str):
pass
def readMessageBegin(self):
pass
def readMessageEnd(self):
pass
def readStructBegin(self):
pass
def readStructEnd(self):
pass
def readFieldBegin(self):
pass
def readFieldEnd(self):
pass
def readMapBegin(self):
pass
def readMapEnd(self):
pass
def readListBegin(self):
pass
def readListEnd(self):
pass
def readSetBegin(self):
pass
def readSetEnd(self):
pass
def readBool(self):
pass
def readByte(self):
pass
def readI16(self):
pass
def readI32(self):
pass
def readI64(self):
pass
def readDouble(self):
pass
def readFloat(self):
pass
def readString(self):
pass
def skip(self, type):
if type == TType.BOOL:
self.readBool()
elif type == TType.BYTE:
self.readByte()
elif type == TType.I16:
self.readI16()
elif type == TType.I32:
self.readI32()
elif type == TType.I64:
self.readI64()
elif type == TType.DOUBLE:
self.readDouble()
elif type == TType.FLOAT:
self.readFloat()
elif type == TType.STRING:
self.readString()
elif type == TType.STRUCT:
name = self.readStructBegin()
while True:
(name, type, id) = self.readFieldBegin()
if type == TType.STOP:
break
self.skip(type)
self.readFieldEnd()
self.readStructEnd()
elif type == TType.MAP:
(ktype, vtype, size) = self.readMapBegin()
for _ in range(size):
self.skip(ktype)
self.skip(vtype)
self.readMapEnd()
elif type == TType.SET:
(etype, size) = self.readSetBegin()
for _ in range(size):
self.skip(etype)
self.readSetEnd()
elif type == TType.LIST:
(etype, size) = self.readListBegin()
for _ in range(size):
self.skip(etype)
self.readListEnd()
else:
raise TProtocolException(
TProtocolException.INVALID_DATA,
"Unexpected type for skipping {}".format(type)
)
def readIntegral(self, type):
if type == TType.BOOL:
return self.readBool()
elif type == TType.BYTE:
return self.readByte()
elif type == TType.I16:
return self.readI16()
elif type == TType.I32:
return self.readI32()
elif type == TType.I64:
return self.readI64()
else:
raise Exception("Unknown integral type: %s" % str(type))
def readFloatingPoint(self, type):
if type == TType.FLOAT:
return self.readFloat()
elif type == TType.DOUBLE:
return self.readDouble()
else:
raise Exception("Unknown floating point type: %s" % str(type))
class TProtocolFactory:
def getProtocol(self, trans):
pass
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_1376_3 |
crossvul-python_data_bad_3499_2 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3499_2 |
crossvul-python_data_good_3499_3 | '''This module manages ssh key files for bcfg2'''
__revision__ = '$Revision$'
import binascii
import os
import sys
import socket
import shutil
import tempfile
from subprocess import Popen, PIPE
import Bcfg2.Server.Plugin
class SSHbase(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Generator,
Bcfg2.Server.Plugin.DirectoryBacked,
Bcfg2.Server.Plugin.PullTarget):
"""
The sshbase generator manages ssh host keys (both v1 and v2)
for hosts. It also manages the ssh_known_hosts file. It can
integrate host keys from other management domains and similarly
export its keys. The repository contains files in the following
formats:
ssh_host_key.H_(hostname) -> the v1 host private key for
(hostname)
ssh_host_key.pub.H_(hostname) -> the v1 host public key
for (hostname)
ssh_host_(dr)sa_key.H_(hostname) -> the v2 ssh host
private key for (hostname)
ssh_host_(dr)sa_key.pub.H_(hostname) -> the v2 ssh host
public key for (hostname)
ssh_known_hosts -> the current known hosts file. this
is regenerated each time a new key is generated.
"""
name = 'SSHbase'
__version__ = '$Id$'
__author__ = 'bcfg-dev@mcs.anl.gov'
pubkeys = ["ssh_host_dsa_key.pub.H_%s",
"ssh_host_rsa_key.pub.H_%s", "ssh_host_key.pub.H_%s"]
hostkeys = ["ssh_host_dsa_key.H_%s",
"ssh_host_rsa_key.H_%s", "ssh_host_key.H_%s"]
keypatterns = ['ssh_host_dsa_key', 'ssh_host_rsa_key', 'ssh_host_key',
'ssh_host_dsa_key.pub', 'ssh_host_rsa_key.pub',
'ssh_host_key.pub']
def __init__(self, core, datastore):
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Generator.__init__(self)
Bcfg2.Server.Plugin.PullTarget.__init__(self)
try:
Bcfg2.Server.Plugin.DirectoryBacked.__init__(self, self.data,
self.core.fam)
except OSError, ioerr:
self.logger.error("Failed to load SSHbase repository from %s" \
% (self.data))
self.logger.error(ioerr)
raise Bcfg2.Server.Plugin.PluginInitError
self.Entries = {'Path':
{'/etc/ssh/ssh_known_hosts': self.build_skn,
'/etc/ssh/ssh_host_dsa_key': self.build_hk,
'/etc/ssh/ssh_host_rsa_key': self.build_hk,
'/etc/ssh/ssh_host_dsa_key.pub': self.build_hk,
'/etc/ssh/ssh_host_rsa_key.pub': self.build_hk,
'/etc/ssh/ssh_host_key': self.build_hk,
'/etc/ssh/ssh_host_key.pub': self.build_hk}}
self.ipcache = {}
self.namecache = {}
self.__skn = False
def get_skn(self):
"""Build memory cache of the ssh known hosts file."""
if not self.__skn:
self.__skn = "\n".join([value.data for key, value in \
self.entries.iteritems() if \
key.endswith('.static')])
names = dict()
# if no metadata is registered yet, defer
if len(self.core.metadata.query.all()) == 0:
self.__skn = False
return self.__skn
for cmeta in self.core.metadata.query.all():
names[cmeta.hostname] = set([cmeta.hostname])
names[cmeta.hostname].update(cmeta.aliases)
newnames = set()
newips = set()
for name in names[cmeta.hostname]:
newnames.add(name.split('.')[0])
try:
newips.add(self.get_ipcache_entry(name)[0])
except:
continue
names[cmeta.hostname].update(newnames)
names[cmeta.hostname].update(cmeta.addresses)
names[cmeta.hostname].update(newips)
# TODO: Only perform reverse lookups on IPs if an option is set.
if True:
for ip in newips:
try:
names[cmeta.hostname].update(self.get_namecache_entry(ip))
except:
continue
names[cmeta.hostname] = sorted(names[cmeta.hostname])
# now we have our name cache
pubkeys = [pubk for pubk in self.entries.keys() \
if pubk.find('.pub.H_') != -1]
pubkeys.sort()
badnames = set()
for pubkey in pubkeys:
hostname = pubkey.split('H_')[1]
if hostname not in names:
if hostname not in badnames:
badnames.add(hostname)
self.logger.error("SSHbase: Unknown host %s; ignoring public keys" % hostname)
continue
self.__skn += "%s %s" % (','.join(names[hostname]),
self.entries[pubkey].data)
return self.__skn
def set_skn(self, value):
"""Set backing data for skn."""
self.__skn = value
skn = property(get_skn, set_skn)
def HandleEvent(self, event=None):
"""Local event handler that does skn regen on pubkey change."""
Bcfg2.Server.Plugin.DirectoryBacked.HandleEvent(self, event)
if event and '_key.pub.H_' in event.filename:
self.skn = False
if event and event.filename.endswith('.static'):
self.skn = False
if not self.__skn:
if (len(self.entries.keys())) >= (len(os.listdir(self.data))-1):
_ = self.skn
def HandlesEntry(self, entry, _):
"""Handle key entries dynamically."""
return entry.tag == 'Path' and \
([fpat for fpat in self.keypatterns
if entry.get('name').endswith(fpat)]
or entry.get('name').endswith('ssh_known_hosts'))
def HandleEntry(self, entry, metadata):
"""Bind data."""
if entry.get('name').endswith('ssh_known_hosts'):
return self.build_skn(entry, metadata)
else:
return self.build_hk(entry, metadata)
def get_ipcache_entry(self, client):
"""Build a cache of dns results."""
if client in self.ipcache:
if self.ipcache[client]:
return self.ipcache[client]
else:
raise socket.gaierror
else:
# need to add entry
try:
ipaddr = socket.gethostbyname(client)
self.ipcache[client] = (ipaddr, client)
return (ipaddr, client)
except socket.gaierror:
ipaddr = Popen(["getent", "hosts", client],
stdout=PIPE).stdout.read().strip().split()
if ipaddr:
self.ipcache[client] = (ipaddr, client)
return (ipaddr, client)
self.ipcache[client] = False
self.logger.error("Failed to find IP address for %s" % client)
raise socket.gaierror
def get_namecache_entry(self, cip):
"""Build a cache of name lookups from client IP addresses."""
if cip in self.namecache:
# lookup cached name from IP
if self.namecache[cip]:
return self.namecache[cip]
else:
raise socket.gaierror
else:
# add an entry that has not been cached
try:
rvlookup = socket.gethostbyaddr(cip)
if rvlookup[0]:
self.namecache[cip] = [rvlookup[0]]
else:
self.namecache[cip] = []
self.namecache[cip].extend(rvlookup[1])
return self.namecache[cip]
except socket.gaierror:
self.namecache[cip] = False
self.logger.error("Failed to find any names associated with IP address %s" % cip)
raise
def build_skn(self, entry, metadata):
"""This function builds builds a host specific known_hosts file."""
client = metadata.hostname
entry.text = self.skn
hostkeys = [keytmpl % client for keytmpl in self.pubkeys \
if (keytmpl % client) in self.entries]
hostkeys.sort()
for hostkey in hostkeys:
entry.text += "localhost,localhost.localdomain,127.0.0.1 %s" % (
self.entries[hostkey].data)
permdata = {'owner':'root',
'group':'root',
'type':'file',
'perms':'0644'}
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
def build_hk(self, entry, metadata):
"""This binds host key data into entries."""
client = metadata.hostname
filename = "%s.H_%s" % (entry.get('name').split('/')[-1], client)
if filename not in self.entries.keys():
self.GenerateHostKeys(client)
if not filename in self.entries:
self.logger.error("%s still not registered" % filename)
raise Bcfg2.Server.Plugin.PluginExecutionError
keydata = self.entries[filename].data
permdata = {'owner':'root',
'group':'root',
'type':'file',
'perms':'0600'}
if entry.get('name')[-4:] == '.pub':
permdata['perms'] = '0644'
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
if "ssh_host_key.H_" == filename[:15]:
entry.attrib['encoding'] = 'base64'
entry.text = binascii.b2a_base64(keydata)
else:
entry.text = keydata
def GenerateHostKeys(self, client):
"""Generate new host keys for client."""
keylist = [keytmpl % client for keytmpl in self.hostkeys]
for hostkey in keylist:
if 'ssh_host_rsa_key.H_' == hostkey[:19]:
keytype = 'rsa'
elif 'ssh_host_dsa_key.H_' == hostkey[:19]:
keytype = 'dsa'
else:
keytype = 'rsa1'
if hostkey not in self.entries.keys():
fileloc = "%s/%s" % (self.data, hostkey)
publoc = self.data + '/' + ".".join([hostkey.split('.')[0],
'pub',
"H_%s" % client])
tempdir = tempfile.mkdtemp()
temploc = "%s/%s" % (tempdir, hostkey)
cmd = ["ssh-keygen", "-q", "-f", temploc, "-N", "",
"-t", keytype, "-C", "root@%s" % client]
proc = Popen(cmd, stdout=PIPE, stdin=PIPE)
proc.communicate()
proc.wait()
try:
shutil.copy(temploc, fileloc)
shutil.copy("%s.pub" % temploc, publoc)
except IOError:
err = sys.exc_info()[1]
self.logger.error("Temporary SSH keys not found: %s" % err)
try:
os.unlink(temploc)
os.unlink("%s.pub" % temploc)
os.rmdir(tempdir)
except OSError:
err = sys.exc_info()[1]
self.logger.error("Failed to unlink temporary ssh keys: %s"
% err)
def AcceptChoices(self, _, metadata):
return [Bcfg2.Server.Plugin.Specificity(hostname=metadata.hostname)]
def AcceptPullData(self, specific, entry, log):
"""Per-plugin bcfg2-admin pull support."""
# specific will always be host specific
filename = "%s/%s.H_%s" % (self.data, entry['name'].split('/')[-1],
specific.hostname)
open(filename, 'w').write(entry['text'])
if log:
print "Wrote file %s" % filename
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3499_3 |
crossvul-python_data_good_3500_0 | import getopt
from subprocess import Popen, PIPE
import sys
import Bcfg2.Server.Admin
class Viz(Bcfg2.Server.Admin.MetadataCore):
__shorthelp__ = "Produce graphviz diagrams of metadata structures"
__longhelp__ = (__shorthelp__ + "\n\nbcfg2-admin viz [--includehosts] "
"[--includebundles] [--includekey] "
"[--only-client clientname] "
"[-o output.<ext>]\n")
__usage__ = ("bcfg2-admin viz [options]\n\n"
" %-32s%s\n"
" %-32s%s\n"
" %-32s%s\n"
" %-32s%s\n"
" %-32s%s\n" %
("-H, --includehosts",
"include hosts in the viz output",
"-b, --includebundles",
"include bundles in the viz output",
"-k, --includekey",
"show a key for different digraph shapes",
"-c, --only-client <clientname>",
"show only the groups, bundles for the named client",
"-o, --outfile <file>",
"write viz output to an output file"))
colors = ['steelblue1', 'chartreuse', 'gold', 'magenta',
'indianred1', 'limegreen', 'orange1', 'lightblue2',
'green1', 'blue1', 'yellow1', 'darkturquoise', 'gray66']
plugin_blacklist = ['DBStats', 'Snapshots', 'Cfg', 'Pkgmgr', 'Packages',
'Rules', 'Account', 'Decisions', 'Deps', 'Git', 'Svn',
'Fossil', 'Bzr', 'Bundler', 'TGenshi', 'SGenshi',
'Base']
def __init__(self, cfile):
Bcfg2.Server.Admin.MetadataCore.__init__(self, cfile,
self.__usage__,
pblacklist=self.plugin_blacklist)
def __call__(self, args):
Bcfg2.Server.Admin.MetadataCore.__call__(self, args)
# First get options to the 'viz' subcommand
try:
opts, args = getopt.getopt(args, 'Hbkc:o:',
['includehosts', 'includebundles',
'includekey', 'only-client=', 'outfile='])
except getopt.GetoptError:
msg = sys.exc_info()[1]
print(msg)
print(self.__longhelp__)
raise SystemExit(1)
hset = False
bset = False
kset = False
only_client = None
outputfile = False
for opt, arg in opts:
if opt in ("-H", "--includehosts"):
hset = True
elif opt in ("-b", "--includebundles"):
bset = True
elif opt in ("-k", "--includekey"):
kset = True
elif opt in ("-c", "--only-client"):
only_client = arg
elif opt in ("-o", "--outfile"):
outputfile = arg
data = self.Visualize(self.get_repo_path(), hset, bset,
kset, only_client, outputfile)
print(data)
raise SystemExit(0)
def Visualize(self, repopath, hosts=False,
bundles=False, key=False, only_client=None, output=False):
"""Build visualization of groups file."""
if output:
format = output.split('.')[-1]
else:
format = 'png'
cmd = ["dot", "-T", format]
if output:
cmd.extend(["-o", output])
dotpipe = Popen(cmd, stdin=PIPE, stdout=PIPE, close_fds=True)
try:
dotpipe.stdin.write("digraph groups {\n")
except:
print("write to dot process failed. Is graphviz installed?")
raise SystemExit(1)
dotpipe.stdin.write('\trankdir="LR";\n')
dotpipe.stdin.write(self.metadata.viz(hosts, bundles,
key, only_client, self.colors))
if key:
dotpipe.stdin.write("\tsubgraph cluster_key {\n")
dotpipe.stdin.write('''\tstyle="filled";\n''')
dotpipe.stdin.write('''\tcolor="lightblue";\n''')
dotpipe.stdin.write('''\tBundle [ shape="septagon" ];\n''')
dotpipe.stdin.write('''\tGroup [shape="ellipse"];\n''')
dotpipe.stdin.write('''\tProfile [style="bold", shape="ellipse"];\n''')
dotpipe.stdin.write('''\tHblock [label="Host1|Host2|Host3", shape="record"];\n''')
dotpipe.stdin.write('''\tlabel="Key";\n''')
dotpipe.stdin.write("\t}\n")
dotpipe.stdin.write("}\n")
dotpipe.stdin.close()
return dotpipe.stdout.read()
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3500_0 |
crossvul-python_data_bad_3500_3 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3500_3 |
crossvul-python_data_good_2078_3 | #!/usr/bin/env python
"""
f2py2e - Fortran to Python C/API generator. 2nd Edition.
See __usage__ below.
Copyright 1999--2011 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@cens.ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/05/06 08:31:19 $
Pearu Peterson
"""
from __future__ import division, absolute_import, print_function
import sys
import os
import pprint
import re
from . import crackfortran
from . import rules
from . import cb_rules
from . import auxfuncs
from . import cfuncs
from . import f90mod_rules
from . import __version__
f2py_version = __version__.version
errmess = sys.stderr.write
#outmess=sys.stdout.write
show = pprint.pprint
outmess = auxfuncs.outmess
try:
from numpy import __version__ as numpy_version
except ImportError:
numpy_version = 'N/A'
__usage__ = """\
Usage:
1) To construct extension module sources:
f2py [<options>] <fortran files> [[[only:]||[skip:]] \\
<fortran functions> ] \\
[: <fortran files> ...]
2) To compile fortran files and build extension modules:
f2py -c [<options>, <build_flib options>, <extra options>] <fortran files>
3) To generate signature files:
f2py -h <filename.pyf> ...< same options as in (1) >
Description: This program generates a Python C/API file (<modulename>module.c)
that contains wrappers for given fortran functions so that they
can be called from Python. With the -c option the corresponding
extension modules are built.
Options:
--2d-numpy Use numpy.f2py tool with NumPy support. [DEFAULT]
--2d-numeric Use f2py2e tool with Numeric support.
--2d-numarray Use f2py2e tool with Numarray support.
--g3-numpy Use 3rd generation f2py from the separate f2py package.
[NOT AVAILABLE YET]
-h <filename> Write signatures of the fortran routines to file <filename>
and exit. You can then edit <filename> and use it instead
of <fortran files>. If <filename>==stdout then the
signatures are printed to stdout.
<fortran functions> Names of fortran routines for which Python C/API
functions will be generated. Default is all that are found
in <fortran files>.
<fortran files> Paths to fortran/signature files that will be scanned for
<fortran functions> in order to determine their signatures.
skip: Ignore fortran functions that follow until `:'.
only: Use only fortran functions that follow until `:'.
: Get back to <fortran files> mode.
-m <modulename> Name of the module; f2py generates a Python/C API
file <modulename>module.c or extension module <modulename>.
Default is 'untitled'.
--[no-]lower Do [not] lower the cases in <fortran files>. By default,
--lower is assumed with -h key, and --no-lower without -h key.
--build-dir <dirname> All f2py generated files are created in <dirname>.
Default is tempfile.mkdtemp().
--overwrite-signature Overwrite existing signature file.
--[no-]latex-doc Create (or not) <modulename>module.tex.
Default is --no-latex-doc.
--short-latex Create 'incomplete' LaTeX document (without commands
\\documentclass, \\tableofcontents, and \\begin{document},
\\end{document}).
--[no-]rest-doc Create (or not) <modulename>module.rst.
Default is --no-rest-doc.
--debug-capi Create C/API code that reports the state of the wrappers
during runtime. Useful for debugging.
--[no-]wrap-functions Create Fortran subroutine wrappers to Fortran 77
functions. --wrap-functions is default because it ensures
maximum portability/compiler independence.
--include-paths <path1>:<path2>:... Search include files from the given
directories.
--help-link [..] List system resources found by system_info.py. See also
--link-<resource> switch below. [..] is optional list
of resources names. E.g. try 'f2py --help-link lapack_opt'.
--quiet Run quietly.
--verbose Run with extra verbosity.
-v Print f2py version ID and exit.
numpy.distutils options (only effective with -c):
--fcompiler= Specify Fortran compiler type by vendor
--compiler= Specify C compiler type (as defined by distutils)
--help-fcompiler List available Fortran compilers and exit
--f77exec= Specify the path to F77 compiler
--f90exec= Specify the path to F90 compiler
--f77flags= Specify F77 compiler flags
--f90flags= Specify F90 compiler flags
--opt= Specify optimization flags
--arch= Specify architecture specific optimization flags
--noopt Compile without optimization
--noarch Compile without arch-dependent optimization
--debug Compile with debugging information
Extra options (only effective with -c):
--link-<resource> Link extension module with <resource> as defined
by numpy.distutils/system_info.py. E.g. to link
with optimized LAPACK libraries (vecLib on MacOSX,
ATLAS elsewhere), use --link-lapack_opt.
See also --help-link switch.
-L/path/to/lib/ -l<libname>
-D<define> -U<name>
-I/path/to/include/
<filename>.o <filename>.so <filename>.a
Using the following macros may be required with non-gcc Fortran
compilers:
-DPREPEND_FORTRAN -DNO_APPEND_FORTRAN -DUPPERCASE_FORTRAN
-DUNDERSCORE_G77
When using -DF2PY_REPORT_ATEXIT, a performance report of F2PY
interface is printed out at exit (platforms: Linux).
When using -DF2PY_REPORT_ON_ARRAY_COPY=<int>, a message is
sent to stderr whenever F2PY interface makes a copy of an
array. Integer <int> sets the threshold for array sizes when
a message should be shown.
Version: %s
numpy Version: %s
Requires: Python 2.3 or higher.
License: NumPy license (see LICENSE.txt in the NumPy source code)
Copyright 1999 - 2011 Pearu Peterson all rights reserved.
http://cens.ioc.ee/projects/f2py2e/"""%(f2py_version, numpy_version)
def scaninputline(inputline):
files, funcs, skipfuncs, onlyfuncs, debug=[], [], [], [], []
f, f2, f3, f4, f5, f6, f7, f8, f9=1, 0, 0, 0, 0, 0, 0, 0, 0
verbose = 1
dolc=-1
dolatexdoc = 0
dorestdoc = 0
wrapfuncs = 1
buildpath = '.'
include_paths = []
signsfile, modulename=None, None
options = {'buildpath':buildpath,
'coutput': None,
'f2py_wrapper_output': None}
for l in inputline:
if l=='': pass
elif l=='only:': f=0
elif l=='skip:': f=-1
elif l==':': f=1;f4=0
elif l[:8]=='--debug-': debug.append(l[8:])
elif l=='--lower': dolc=1
elif l=='--build-dir': f6=1
elif l=='--no-lower': dolc=0
elif l=='--quiet': verbose = 0
elif l=='--verbose': verbose += 1
elif l=='--latex-doc': dolatexdoc=1
elif l=='--no-latex-doc': dolatexdoc=0
elif l=='--rest-doc': dorestdoc=1
elif l=='--no-rest-doc': dorestdoc=0
elif l=='--wrap-functions': wrapfuncs=1
elif l=='--no-wrap-functions': wrapfuncs=0
elif l=='--short-latex': options['shortlatex']=1
elif l=='--coutput': f8=1
elif l=='--f2py-wrapper-output': f9=1
elif l=='--overwrite-signature': options['h-overwrite']=1
elif l=='-h': f2=1
elif l=='-m': f3=1
elif l[:2]=='-v':
print(f2py_version)
sys.exit()
elif l=='--show-compilers':
f5=1
elif l[:8]=='-include':
cfuncs.outneeds['userincludes'].append(l[9:-1])
cfuncs.userincludes[l[9:-1]]='#include '+l[8:]
elif l[:15] in '--include_paths':
outmess('f2py option --include_paths is deprecated, use --include-paths instead.\n')
f7=1
elif l[:15] in '--include-paths':
f7=1
elif l[0]=='-':
errmess('Unknown option %s\n'%repr(l))
sys.exit()
elif f2: f2=0;signsfile=l
elif f3: f3=0;modulename=l
elif f6: f6=0;buildpath=l
elif f7: f7=0;include_paths.extend(l.split(os.pathsep))
elif f8: f8=0;options["coutput"]=l
elif f9: f9=0;options["f2py_wrapper_output"]=l
elif f==1:
try:
open(l).close()
files.append(l)
except IOError as detail:
errmess('IOError: %s. Skipping file "%s".\n'%(str(detail), l))
elif f==-1: skipfuncs.append(l)
elif f==0: onlyfuncs.append(l)
if not f5 and not files and not modulename:
print(__usage__)
sys.exit()
if not os.path.isdir(buildpath):
if not verbose:
outmess('Creating build directory %s'%(buildpath))
os.mkdir(buildpath)
if signsfile:
signsfile = os.path.join(buildpath, signsfile)
if signsfile and os.path.isfile(signsfile) and 'h-overwrite' not in options:
errmess('Signature file "%s" exists!!! Use --overwrite-signature to overwrite.\n'%(signsfile))
sys.exit()
options['debug']=debug
options['verbose']=verbose
if dolc==-1 and not signsfile: options['do-lower']=0
else: options['do-lower']=dolc
if modulename: options['module']=modulename
if signsfile: options['signsfile']=signsfile
if onlyfuncs: options['onlyfuncs']=onlyfuncs
if skipfuncs: options['skipfuncs']=skipfuncs
options['dolatexdoc'] = dolatexdoc
options['dorestdoc'] = dorestdoc
options['wrapfuncs'] = wrapfuncs
options['buildpath']=buildpath
options['include_paths']=include_paths
return files, options
def callcrackfortran(files, options):
rules.options=options
funcs=[]
crackfortran.debug=options['debug']
crackfortran.verbose=options['verbose']
if 'module' in options:
crackfortran.f77modulename=options['module']
if 'skipfuncs' in options:
crackfortran.skipfuncs=options['skipfuncs']
if 'onlyfuncs' in options:
crackfortran.onlyfuncs=options['onlyfuncs']
crackfortran.include_paths[:]=options['include_paths']
crackfortran.dolowercase=options['do-lower']
postlist=crackfortran.crackfortran(files)
if 'signsfile' in options:
outmess('Saving signatures to file "%s"\n'%(options['signsfile']))
pyf=crackfortran.crack2fortran(postlist)
if options['signsfile'][-6:]=='stdout':
sys.stdout.write(pyf)
else:
f=open(options['signsfile'], 'w')
f.write(pyf)
f.close()
if options["coutput"] is None:
for mod in postlist:
mod["coutput"] = "%smodule.c" % mod["name"]
else:
for mod in postlist:
mod["coutput"] = options["coutput"]
if options["f2py_wrapper_output"] is None:
for mod in postlist:
mod["f2py_wrapper_output"] = "%s-f2pywrappers.f" % mod["name"]
else:
for mod in postlist:
mod["f2py_wrapper_output"] = options["f2py_wrapper_output"]
return postlist
def buildmodules(lst):
cfuncs.buildcfuncs()
outmess('Building modules...\n')
modules, mnames, isusedby=[], [], {}
for i in range(len(lst)):
if '__user__' in lst[i]['name']:
cb_rules.buildcallbacks(lst[i])
else:
if 'use' in lst[i]:
for u in lst[i]['use'].keys():
if u not in isusedby:
isusedby[u]=[]
isusedby[u].append(lst[i]['name'])
modules.append(lst[i])
mnames.append(lst[i]['name'])
ret = {}
for i in range(len(mnames)):
if mnames[i] in isusedby:
outmess('\tSkipping module "%s" which is used by %s.\n'%(mnames[i], ','.join(['"%s"'%s for s in isusedby[mnames[i]]])))
else:
um=[]
if 'use' in modules[i]:
for u in modules[i]['use'].keys():
if u in isusedby and u in mnames:
um.append(modules[mnames.index(u)])
else:
outmess('\tModule "%s" uses nonexisting "%s" which will be ignored.\n'%(mnames[i], u))
ret[mnames[i]] = {}
dict_append(ret[mnames[i]], rules.buildmodule(modules[i], um))
return ret
def dict_append(d_out, d_in):
for (k, v) in d_in.items():
if k not in d_out:
d_out[k] = []
if isinstance(v, list):
d_out[k] = d_out[k] + v
else:
d_out[k].append(v)
def run_main(comline_list):
"""Run f2py as if string.join(comline_list,' ') is used as a command line.
In case of using -h flag, return None.
"""
crackfortran.reset_global_f2py_vars()
f2pydir=os.path.dirname(os.path.abspath(cfuncs.__file__))
fobjhsrc = os.path.join(f2pydir, 'src', 'fortranobject.h')
fobjcsrc = os.path.join(f2pydir, 'src', 'fortranobject.c')
files, options=scaninputline(comline_list)
auxfuncs.options=options
postlist=callcrackfortran(files, options)
isusedby={}
for i in range(len(postlist)):
if 'use' in postlist[i]:
for u in postlist[i]['use'].keys():
if u not in isusedby:
isusedby[u]=[]
isusedby[u].append(postlist[i]['name'])
for i in range(len(postlist)):
if postlist[i]['block']=='python module' and '__user__' in postlist[i]['name']:
if postlist[i]['name'] in isusedby:
#if not quiet:
outmess('Skipping Makefile build for module "%s" which is used by %s\n'%(postlist[i]['name'], ','.join(['"%s"'%s for s in isusedby[postlist[i]['name']]])))
if 'signsfile' in options:
if options['verbose']>1:
outmess('Stopping. Edit the signature file and then run f2py on the signature file: ')
outmess('%s %s\n'%(os.path.basename(sys.argv[0]), options['signsfile']))
return
for i in range(len(postlist)):
if postlist[i]['block']!='python module':
if 'python module' not in options:
errmess('Tip: If your original code is Fortran source then you must use -m option.\n')
raise TypeError('All blocks must be python module blocks but got %s'%(repr(postlist[i]['block'])))
auxfuncs.debugoptions=options['debug']
f90mod_rules.options=options
auxfuncs.wrapfuncs=options['wrapfuncs']
ret=buildmodules(postlist)
for mn in ret.keys():
dict_append(ret[mn], {'csrc':fobjcsrc,'h':fobjhsrc})
return ret
def filter_files(prefix,suffix,files,remove_prefix=None):
"""
Filter files by prefix and suffix.
"""
filtered, rest = [], []
match = re.compile(prefix+r'.*'+suffix+r'\Z').match
if remove_prefix:
ind = len(prefix)
else:
ind = 0
for file in [x.strip() for x in files]:
if match(file): filtered.append(file[ind:])
else: rest.append(file)
return filtered, rest
def get_prefix(module):
p = os.path.dirname(os.path.dirname(module.__file__))
return p
def run_compile():
"""
Do it all in one call!
"""
import tempfile
i = sys.argv.index('-c')
del sys.argv[i]
remove_build_dir = 0
try: i = sys.argv.index('--build-dir')
except ValueError: i=None
if i is not None:
build_dir = sys.argv[i+1]
del sys.argv[i+1]
del sys.argv[i]
else:
remove_build_dir = 1
build_dir = tempfile.mkdtemp()
_reg1 = re.compile(r'[-][-]link[-]')
sysinfo_flags = [_m for _m in sys.argv[1:] if _reg1.match(_m)]
sys.argv = [_m for _m in sys.argv if _m not in sysinfo_flags]
if sysinfo_flags:
sysinfo_flags = [f[7:] for f in sysinfo_flags]
_reg2 = re.compile(r'[-][-]((no[-]|)(wrap[-]functions|lower)|debug[-]capi|quiet)|[-]include')
f2py_flags = [_m for _m in sys.argv[1:] if _reg2.match(_m)]
sys.argv = [_m for _m in sys.argv if _m not in f2py_flags]
f2py_flags2 = []
fl = 0
for a in sys.argv[1:]:
if a in ['only:', 'skip:']:
fl = 1
elif a==':':
fl = 0
if fl or a==':':
f2py_flags2.append(a)
if f2py_flags2 and f2py_flags2[-1]!=':':
f2py_flags2.append(':')
f2py_flags.extend(f2py_flags2)
sys.argv = [_m for _m in sys.argv if _m not in f2py_flags2]
_reg3 = re.compile(r'[-][-]((f(90)?compiler([-]exec|)|compiler)=|help[-]compiler)')
flib_flags = [_m for _m in sys.argv[1:] if _reg3.match(_m)]
sys.argv = [_m for _m in sys.argv if _m not in flib_flags]
_reg4 = re.compile(r'[-][-]((f(77|90)(flags|exec)|opt|arch)=|(debug|noopt|noarch|help[-]fcompiler))')
fc_flags = [_m for _m in sys.argv[1:] if _reg4.match(_m)]
sys.argv = [_m for _m in sys.argv if _m not in fc_flags]
if 1:
del_list = []
for s in flib_flags:
v = '--fcompiler='
if s[:len(v)]==v:
from numpy.distutils import fcompiler
fcompiler.load_all_fcompiler_classes()
allowed_keys = list(fcompiler.fcompiler_class.keys())
nv = ov = s[len(v):].lower()
if ov not in allowed_keys:
vmap = {} # XXX
try:
nv = vmap[ov]
except KeyError:
if ov not in vmap.values():
print('Unknown vendor: "%s"' % (s[len(v):]))
nv = ov
i = flib_flags.index(s)
flib_flags[i] = '--fcompiler=' + nv
continue
for s in del_list:
i = flib_flags.index(s)
del flib_flags[i]
assert len(flib_flags)<=2, repr(flib_flags)
_reg5 = re.compile(r'[-][-](verbose)')
setup_flags = [_m for _m in sys.argv[1:] if _reg5.match(_m)]
sys.argv = [_m for _m in sys.argv if _m not in setup_flags]
if '--quiet' in f2py_flags:
setup_flags.append('--quiet')
modulename = 'untitled'
sources = sys.argv[1:]
for optname in ['--include_paths', '--include-paths']:
if optname in sys.argv:
i = sys.argv.index (optname)
f2py_flags.extend (sys.argv[i:i+2])
del sys.argv[i+1], sys.argv[i]
sources = sys.argv[1:]
if '-m' in sys.argv:
i = sys.argv.index('-m')
modulename = sys.argv[i+1]
del sys.argv[i+1], sys.argv[i]
sources = sys.argv[1:]
else:
from numpy.distutils.command.build_src import get_f2py_modulename
pyf_files, sources = filter_files('', '[.]pyf([.]src|)', sources)
sources = pyf_files + sources
for f in pyf_files:
modulename = get_f2py_modulename(f)
if modulename:
break
extra_objects, sources = filter_files('', '[.](o|a|so)', sources)
include_dirs, sources = filter_files('-I', '', sources, remove_prefix=1)
library_dirs, sources = filter_files('-L', '', sources, remove_prefix=1)
libraries, sources = filter_files('-l', '', sources, remove_prefix=1)
undef_macros, sources = filter_files('-U', '', sources, remove_prefix=1)
define_macros, sources = filter_files('-D', '', sources, remove_prefix=1)
using_numarray = 0
using_numeric = 0
for i in range(len(define_macros)):
name_value = define_macros[i].split('=', 1)
if len(name_value)==1:
name_value.append(None)
if len(name_value)==2:
define_macros[i] = tuple(name_value)
else:
print('Invalid use of -D:', name_value)
from numpy.distutils.system_info import get_info
num_include_dir = None
num_info = {}
#import numpy
#n = 'numpy'
#p = get_prefix(numpy)
#from numpy.distutils.misc_util import get_numpy_include_dirs
#num_info = {'include_dirs': get_numpy_include_dirs()}
if num_info:
include_dirs.extend(num_info.get('include_dirs', []))
from numpy.distutils.core import setup, Extension
ext_args = {'name': modulename, 'sources': sources,
'include_dirs': include_dirs,
'library_dirs': library_dirs,
'libraries': libraries,
'define_macros': define_macros,
'undef_macros': undef_macros,
'extra_objects': extra_objects,
'f2py_options': f2py_flags,
}
if sysinfo_flags:
from numpy.distutils.misc_util import dict_append
for n in sysinfo_flags:
i = get_info(n)
if not i:
outmess('No %s resources found in system'\
' (try `f2py --help-link`)\n' % (repr(n)))
dict_append(ext_args,**i)
ext = Extension(**ext_args)
sys.argv = [sys.argv[0]] + setup_flags
sys.argv.extend(['build',
'--build-temp', build_dir,
'--build-base', build_dir,
'--build-platlib', '.'])
if fc_flags:
sys.argv.extend(['config_fc']+fc_flags)
if flib_flags:
sys.argv.extend(['build_ext']+flib_flags)
setup(ext_modules = [ext])
if remove_build_dir and os.path.exists(build_dir):
import shutil
outmess('Removing build directory %s\n'%(build_dir))
shutil.rmtree(build_dir)
def main():
if '--help-link' in sys.argv[1:]:
sys.argv.remove('--help-link')
from numpy.distutils.system_info import show_all
show_all()
return
if '-c' in sys.argv[1:]:
run_compile()
else:
run_main(sys.argv[1:])
#if __name__ == "__main__":
# main()
# EOF
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_2078_3 |
crossvul-python_data_bad_1065_0 | """Header value parser implementing various email-related RFC parsing rules.
The parsing methods defined in this module implement various email related
parsing rules. Principal among them is RFC 5322, which is the followon
to RFC 2822 and primarily a clarification of the former. It also implements
RFC 2047 encoded word decoding.
RFC 5322 goes to considerable trouble to maintain backward compatibility with
RFC 822 in the parse phase, while cleaning up the structure on the generation
phase. This parser supports correct RFC 5322 generation by tagging white space
as folding white space only when folding is allowed in the non-obsolete rule
sets. Actually, the parser is even more generous when accepting input than RFC
5322 mandates, following the spirit of Postel's Law, which RFC 5322 encourages.
Where possible deviations from the standard are annotated on the 'defects'
attribute of tokens that deviate.
The general structure of the parser follows RFC 5322, and uses its terminology
where there is a direct correspondence. Where the implementation requires a
somewhat different structure than that used by the formal grammar, new terms
that mimic the closest existing terms are used. Thus, it really helps to have
a copy of RFC 5322 handy when studying this code.
Input to the parser is a string that has already been unfolded according to
RFC 5322 rules. According to the RFC this unfolding is the very first step, and
this parser leaves the unfolding step to a higher level message parser, which
will have already detected the line breaks that need unfolding while
determining the beginning and end of each header.
The output of the parser is a TokenList object, which is a list subclass. A
TokenList is a recursive data structure. The terminal nodes of the structure
are Terminal objects, which are subclasses of str. These do not correspond
directly to terminal objects in the formal grammar, but are instead more
practical higher level combinations of true terminals.
All TokenList and Terminal objects have a 'value' attribute, which produces the
semantically meaningful value of that part of the parse subtree. The value of
all whitespace tokens (no matter how many sub-tokens they may contain) is a
single space, as per the RFC rules. This includes 'CFWS', which is herein
included in the general class of whitespace tokens. There is one exception to
the rule that whitespace tokens are collapsed into single spaces in values: in
the value of a 'bare-quoted-string' (a quoted-string with no leading or
trailing whitespace), any whitespace that appeared between the quotation marks
is preserved in the returned value. Note that in all Terminal strings quoted
pairs are turned into their unquoted values.
All TokenList and Terminal objects also have a string value, which attempts to
be a "canonical" representation of the RFC-compliant form of the substring that
produced the parsed subtree, including minimal use of quoted pair quoting.
Whitespace runs are not collapsed.
Comment tokens also have a 'content' attribute providing the string found
between the parens (including any nested comments) with whitespace preserved.
All TokenList and Terminal objects have a 'defects' attribute which is a
possibly empty list all of the defects found while creating the token. Defects
may appear on any token in the tree, and a composite list of all defects in the
subtree is available through the 'all_defects' attribute of any node. (For
Terminal notes x.defects == x.all_defects.)
Each object in a parse tree is called a 'token', and each has a 'token_type'
attribute that gives the name from the RFC 5322 grammar that it represents.
Not all RFC 5322 nodes are produced, and there is one non-RFC 5322 node that
may be produced: 'ptext'. A 'ptext' is a string of printable ascii characters.
It is returned in place of lists of (ctext/quoted-pair) and
(qtext/quoted-pair).
XXX: provide complete list of token types.
"""
import re
import sys
import urllib # For urllib.parse.unquote
from string import hexdigits
from operator import itemgetter
from email import _encoded_words as _ew
from email import errors
from email import utils
#
# Useful constants and functions
#
WSP = set(' \t')
CFWS_LEADER = WSP | set('(')
SPECIALS = set(r'()<>@,:;.\"[]')
ATOM_ENDS = SPECIALS | WSP
DOT_ATOM_ENDS = ATOM_ENDS - set('.')
# '.', '"', and '(' do not end phrases in order to support obs-phrase
PHRASE_ENDS = SPECIALS - set('."(')
TSPECIALS = (SPECIALS | set('/?=')) - set('.')
TOKEN_ENDS = TSPECIALS | WSP
ASPECIALS = TSPECIALS | set("*'%")
ATTRIBUTE_ENDS = ASPECIALS | WSP
EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%')
def quote_string(value):
return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"'
# Match a RFC 2047 word, looks like =?utf-8?q?someword?=
rfc2047_matcher = re.compile(r'''
=\? # literal =?
[^?]* # charset
\? # literal ?
[qQbB] # literal 'q' or 'b', case insensitive
\? # literal ?
.*? # encoded word
\?= # literal ?=
''', re.VERBOSE | re.MULTILINE)
#
# TokenList and its subclasses
#
class TokenList(list):
token_type = None
syntactic_break = True
ew_combine_allowed = True
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.defects = []
def __str__(self):
return ''.join(str(x) for x in self)
def __repr__(self):
return '{}({})'.format(self.__class__.__name__,
super().__repr__())
@property
def value(self):
return ''.join(x.value for x in self if x.value)
@property
def all_defects(self):
return sum((x.all_defects for x in self), self.defects)
def startswith_fws(self):
return self[0].startswith_fws()
@property
def as_ew_allowed(self):
"""True if all top level tokens of this part may be RFC2047 encoded."""
return all(part.as_ew_allowed for part in self)
@property
def comments(self):
comments = []
for token in self:
comments.extend(token.comments)
return comments
def fold(self, *, policy):
return _refold_parse_tree(self, policy=policy)
def pprint(self, indent=''):
print(self.ppstr(indent=indent))
def ppstr(self, indent=''):
return '\n'.join(self._pp(indent=indent))
def _pp(self, indent=''):
yield '{}{}/{}('.format(
indent,
self.__class__.__name__,
self.token_type)
for token in self:
if not hasattr(token, '_pp'):
yield (indent + ' !! invalid element in token '
'list: {!r}'.format(token))
else:
yield from token._pp(indent+' ')
if self.defects:
extra = ' Defects: {}'.format(self.defects)
else:
extra = ''
yield '{}){}'.format(indent, extra)
class WhiteSpaceTokenList(TokenList):
@property
def value(self):
return ' '
@property
def comments(self):
return [x.content for x in self if x.token_type=='comment']
class UnstructuredTokenList(TokenList):
token_type = 'unstructured'
class Phrase(TokenList):
token_type = 'phrase'
class Word(TokenList):
token_type = 'word'
class CFWSList(WhiteSpaceTokenList):
token_type = 'cfws'
class Atom(TokenList):
token_type = 'atom'
class Token(TokenList):
token_type = 'token'
encode_as_ew = False
class EncodedWord(TokenList):
token_type = 'encoded-word'
cte = None
charset = None
lang = None
class QuotedString(TokenList):
token_type = 'quoted-string'
@property
def content(self):
for x in self:
if x.token_type == 'bare-quoted-string':
return x.value
@property
def quoted_value(self):
res = []
for x in self:
if x.token_type == 'bare-quoted-string':
res.append(str(x))
else:
res.append(x.value)
return ''.join(res)
@property
def stripped_value(self):
for token in self:
if token.token_type == 'bare-quoted-string':
return token.value
class BareQuotedString(QuotedString):
token_type = 'bare-quoted-string'
def __str__(self):
return quote_string(''.join(str(x) for x in self))
@property
def value(self):
return ''.join(str(x) for x in self)
class Comment(WhiteSpaceTokenList):
token_type = 'comment'
def __str__(self):
return ''.join(sum([
["("],
[self.quote(x) for x in self],
[")"],
], []))
def quote(self, value):
if value.token_type == 'comment':
return str(value)
return str(value).replace('\\', '\\\\').replace(
'(', r'\(').replace(
')', r'\)')
@property
def content(self):
return ''.join(str(x) for x in self)
@property
def comments(self):
return [self.content]
class AddressList(TokenList):
token_type = 'address-list'
@property
def addresses(self):
return [x for x in self if x.token_type=='address']
@property
def mailboxes(self):
return sum((x.mailboxes
for x in self if x.token_type=='address'), [])
@property
def all_mailboxes(self):
return sum((x.all_mailboxes
for x in self if x.token_type=='address'), [])
class Address(TokenList):
token_type = 'address'
@property
def display_name(self):
if self[0].token_type == 'group':
return self[0].display_name
@property
def mailboxes(self):
if self[0].token_type == 'mailbox':
return [self[0]]
elif self[0].token_type == 'invalid-mailbox':
return []
return self[0].mailboxes
@property
def all_mailboxes(self):
if self[0].token_type == 'mailbox':
return [self[0]]
elif self[0].token_type == 'invalid-mailbox':
return [self[0]]
return self[0].all_mailboxes
class MailboxList(TokenList):
token_type = 'mailbox-list'
@property
def mailboxes(self):
return [x for x in self if x.token_type=='mailbox']
@property
def all_mailboxes(self):
return [x for x in self
if x.token_type in ('mailbox', 'invalid-mailbox')]
class GroupList(TokenList):
token_type = 'group-list'
@property
def mailboxes(self):
if not self or self[0].token_type != 'mailbox-list':
return []
return self[0].mailboxes
@property
def all_mailboxes(self):
if not self or self[0].token_type != 'mailbox-list':
return []
return self[0].all_mailboxes
class Group(TokenList):
token_type = "group"
@property
def mailboxes(self):
if self[2].token_type != 'group-list':
return []
return self[2].mailboxes
@property
def all_mailboxes(self):
if self[2].token_type != 'group-list':
return []
return self[2].all_mailboxes
@property
def display_name(self):
return self[0].display_name
class NameAddr(TokenList):
token_type = 'name-addr'
@property
def display_name(self):
if len(self) == 1:
return None
return self[0].display_name
@property
def local_part(self):
return self[-1].local_part
@property
def domain(self):
return self[-1].domain
@property
def route(self):
return self[-1].route
@property
def addr_spec(self):
return self[-1].addr_spec
class AngleAddr(TokenList):
token_type = 'angle-addr'
@property
def local_part(self):
for x in self:
if x.token_type == 'addr-spec':
return x.local_part
@property
def domain(self):
for x in self:
if x.token_type == 'addr-spec':
return x.domain
@property
def route(self):
for x in self:
if x.token_type == 'obs-route':
return x.domains
@property
def addr_spec(self):
for x in self:
if x.token_type == 'addr-spec':
if x.local_part:
return x.addr_spec
else:
return quote_string(x.local_part) + x.addr_spec
else:
return '<>'
class ObsRoute(TokenList):
token_type = 'obs-route'
@property
def domains(self):
return [x.domain for x in self if x.token_type == 'domain']
class Mailbox(TokenList):
token_type = 'mailbox'
@property
def display_name(self):
if self[0].token_type == 'name-addr':
return self[0].display_name
@property
def local_part(self):
return self[0].local_part
@property
def domain(self):
return self[0].domain
@property
def route(self):
if self[0].token_type == 'name-addr':
return self[0].route
@property
def addr_spec(self):
return self[0].addr_spec
class InvalidMailbox(TokenList):
token_type = 'invalid-mailbox'
@property
def display_name(self):
return None
local_part = domain = route = addr_spec = display_name
class Domain(TokenList):
token_type = 'domain'
as_ew_allowed = False
@property
def domain(self):
return ''.join(super().value.split())
class DotAtom(TokenList):
token_type = 'dot-atom'
class DotAtomText(TokenList):
token_type = 'dot-atom-text'
as_ew_allowed = True
class NoFoldLiteral(TokenList):
token_type = 'no-fold-literal'
as_ew_allowed = False
class AddrSpec(TokenList):
token_type = 'addr-spec'
as_ew_allowed = False
@property
def local_part(self):
return self[0].local_part
@property
def domain(self):
if len(self) < 3:
return None
return self[-1].domain
@property
def value(self):
if len(self) < 3:
return self[0].value
return self[0].value.rstrip()+self[1].value+self[2].value.lstrip()
@property
def addr_spec(self):
nameset = set(self.local_part)
if len(nameset) > len(nameset-DOT_ATOM_ENDS):
lp = quote_string(self.local_part)
else:
lp = self.local_part
if self.domain is not None:
return lp + '@' + self.domain
return lp
class ObsLocalPart(TokenList):
token_type = 'obs-local-part'
as_ew_allowed = False
class DisplayName(Phrase):
token_type = 'display-name'
ew_combine_allowed = False
@property
def display_name(self):
res = TokenList(self)
if res[0].token_type == 'cfws':
res.pop(0)
else:
if res[0][0].token_type == 'cfws':
res[0] = TokenList(res[0][1:])
if res[-1].token_type == 'cfws':
res.pop()
else:
if res[-1][-1].token_type == 'cfws':
res[-1] = TokenList(res[-1][:-1])
return res.value
@property
def value(self):
quote = False
if self.defects:
quote = True
else:
for x in self:
if x.token_type == 'quoted-string':
quote = True
if quote:
pre = post = ''
if self[0].token_type=='cfws' or self[0][0].token_type=='cfws':
pre = ' '
if self[-1].token_type=='cfws' or self[-1][-1].token_type=='cfws':
post = ' '
return pre+quote_string(self.display_name)+post
else:
return super().value
class LocalPart(TokenList):
token_type = 'local-part'
as_ew_allowed = False
@property
def value(self):
if self[0].token_type == "quoted-string":
return self[0].quoted_value
else:
return self[0].value
@property
def local_part(self):
# Strip whitespace from front, back, and around dots.
res = [DOT]
last = DOT
last_is_tl = False
for tok in self[0] + [DOT]:
if tok.token_type == 'cfws':
continue
if (last_is_tl and tok.token_type == 'dot' and
last[-1].token_type == 'cfws'):
res[-1] = TokenList(last[:-1])
is_tl = isinstance(tok, TokenList)
if (is_tl and last.token_type == 'dot' and
tok[0].token_type == 'cfws'):
res.append(TokenList(tok[1:]))
else:
res.append(tok)
last = res[-1]
last_is_tl = is_tl
res = TokenList(res[1:-1])
return res.value
class DomainLiteral(TokenList):
token_type = 'domain-literal'
as_ew_allowed = False
@property
def domain(self):
return ''.join(super().value.split())
@property
def ip(self):
for x in self:
if x.token_type == 'ptext':
return x.value
class MIMEVersion(TokenList):
token_type = 'mime-version'
major = None
minor = None
class Parameter(TokenList):
token_type = 'parameter'
sectioned = False
extended = False
charset = 'us-ascii'
@property
def section_number(self):
# Because the first token, the attribute (name) eats CFWS, the second
# token is always the section if there is one.
return self[1].number if self.sectioned else 0
@property
def param_value(self):
# This is part of the "handle quoted extended parameters" hack.
for token in self:
if token.token_type == 'value':
return token.stripped_value
if token.token_type == 'quoted-string':
for token in token:
if token.token_type == 'bare-quoted-string':
for token in token:
if token.token_type == 'value':
return token.stripped_value
return ''
class InvalidParameter(Parameter):
token_type = 'invalid-parameter'
class Attribute(TokenList):
token_type = 'attribute'
@property
def stripped_value(self):
for token in self:
if token.token_type.endswith('attrtext'):
return token.value
class Section(TokenList):
token_type = 'section'
number = None
class Value(TokenList):
token_type = 'value'
@property
def stripped_value(self):
token = self[0]
if token.token_type == 'cfws':
token = self[1]
if token.token_type.endswith(
('quoted-string', 'attribute', 'extended-attribute')):
return token.stripped_value
return self.value
class MimeParameters(TokenList):
token_type = 'mime-parameters'
syntactic_break = False
@property
def params(self):
# The RFC specifically states that the ordering of parameters is not
# guaranteed and may be reordered by the transport layer. So we have
# to assume the RFC 2231 pieces can come in any order. However, we
# output them in the order that we first see a given name, which gives
# us a stable __str__.
params = {} # Using order preserving dict from Python 3.7+
for token in self:
if not token.token_type.endswith('parameter'):
continue
if token[0].token_type != 'attribute':
continue
name = token[0].value.strip()
if name not in params:
params[name] = []
params[name].append((token.section_number, token))
for name, parts in params.items():
parts = sorted(parts, key=itemgetter(0))
first_param = parts[0][1]
charset = first_param.charset
# Our arbitrary error recovery is to ignore duplicate parameters,
# to use appearance order if there are duplicate rfc 2231 parts,
# and to ignore gaps. This mimics the error recovery of get_param.
if not first_param.extended and len(parts) > 1:
if parts[1][0] == 0:
parts[1][1].defects.append(errors.InvalidHeaderDefect(
'duplicate parameter name; duplicate(s) ignored'))
parts = parts[:1]
# Else assume the *0* was missing...note that this is different
# from get_param, but we registered a defect for this earlier.
value_parts = []
i = 0
for section_number, param in parts:
if section_number != i:
# We could get fancier here and look for a complete
# duplicate extended parameter and ignore the second one
# seen. But we're not doing that. The old code didn't.
if not param.extended:
param.defects.append(errors.InvalidHeaderDefect(
'duplicate parameter name; duplicate ignored'))
continue
else:
param.defects.append(errors.InvalidHeaderDefect(
"inconsistent RFC2231 parameter numbering"))
i += 1
value = param.param_value
if param.extended:
try:
value = urllib.parse.unquote_to_bytes(value)
except UnicodeEncodeError:
# source had surrogate escaped bytes. What we do now
# is a bit of an open question. I'm not sure this is
# the best choice, but it is what the old algorithm did
value = urllib.parse.unquote(value, encoding='latin-1')
else:
try:
value = value.decode(charset, 'surrogateescape')
except LookupError:
# XXX: there should really be a custom defect for
# unknown character set to make it easy to find,
# because otherwise unknown charset is a silent
# failure.
value = value.decode('us-ascii', 'surrogateescape')
if utils._has_surrogates(value):
param.defects.append(errors.UndecodableBytesDefect())
value_parts.append(value)
value = ''.join(value_parts)
yield name, value
def __str__(self):
params = []
for name, value in self.params:
if value:
params.append('{}={}'.format(name, quote_string(value)))
else:
params.append(name)
params = '; '.join(params)
return ' ' + params if params else ''
class ParameterizedHeaderValue(TokenList):
# Set this false so that the value doesn't wind up on a new line even
# if it and the parameters would fit there but not on the first line.
syntactic_break = False
@property
def params(self):
for token in reversed(self):
if token.token_type == 'mime-parameters':
return token.params
return {}
class ContentType(ParameterizedHeaderValue):
token_type = 'content-type'
as_ew_allowed = False
maintype = 'text'
subtype = 'plain'
class ContentDisposition(ParameterizedHeaderValue):
token_type = 'content-disposition'
as_ew_allowed = False
content_disposition = None
class ContentTransferEncoding(TokenList):
token_type = 'content-transfer-encoding'
as_ew_allowed = False
cte = '7bit'
class HeaderLabel(TokenList):
token_type = 'header-label'
as_ew_allowed = False
class MsgID(TokenList):
token_type = 'msg-id'
as_ew_allowed = False
def fold(self, policy):
# message-id tokens may not be folded.
return str(self) + policy.linesep
class MessageID(MsgID):
token_type = 'message-id'
class Header(TokenList):
token_type = 'header'
#
# Terminal classes and instances
#
class Terminal(str):
as_ew_allowed = True
ew_combine_allowed = True
syntactic_break = True
def __new__(cls, value, token_type):
self = super().__new__(cls, value)
self.token_type = token_type
self.defects = []
return self
def __repr__(self):
return "{}({})".format(self.__class__.__name__, super().__repr__())
def pprint(self):
print(self.__class__.__name__ + '/' + self.token_type)
@property
def all_defects(self):
return list(self.defects)
def _pp(self, indent=''):
return ["{}{}/{}({}){}".format(
indent,
self.__class__.__name__,
self.token_type,
super().__repr__(),
'' if not self.defects else ' {}'.format(self.defects),
)]
def pop_trailing_ws(self):
# This terminates the recursion.
return None
@property
def comments(self):
return []
def __getnewargs__(self):
return(str(self), self.token_type)
class WhiteSpaceTerminal(Terminal):
@property
def value(self):
return ' '
def startswith_fws(self):
return True
class ValueTerminal(Terminal):
@property
def value(self):
return self
def startswith_fws(self):
return False
class EWWhiteSpaceTerminal(WhiteSpaceTerminal):
@property
def value(self):
return ''
def __str__(self):
return ''
# XXX these need to become classes and used as instances so
# that a program can't change them in a parse tree and screw
# up other parse trees. Maybe should have tests for that, too.
DOT = ValueTerminal('.', 'dot')
ListSeparator = ValueTerminal(',', 'list-separator')
RouteComponentMarker = ValueTerminal('@', 'route-component-marker')
#
# Parser
#
# Parse strings according to RFC822/2047/2822/5322 rules.
#
# This is a stateless parser. Each get_XXX function accepts a string and
# returns either a Terminal or a TokenList representing the RFC object named
# by the method and a string containing the remaining unparsed characters
# from the input. Thus a parser method consumes the next syntactic construct
# of a given type and returns a token representing the construct plus the
# unparsed remainder of the input string.
#
# For example, if the first element of a structured header is a 'phrase',
# then:
#
# phrase, value = get_phrase(value)
#
# returns the complete phrase from the start of the string value, plus any
# characters left in the string after the phrase is removed.
_wsp_splitter = re.compile(r'([{}]+)'.format(''.join(WSP))).split
_non_atom_end_matcher = re.compile(r"[^{}]+".format(
re.escape(''.join(ATOM_ENDS)))).match
_non_printable_finder = re.compile(r"[\x00-\x20\x7F]").findall
_non_token_end_matcher = re.compile(r"[^{}]+".format(
re.escape(''.join(TOKEN_ENDS)))).match
_non_attribute_end_matcher = re.compile(r"[^{}]+".format(
re.escape(''.join(ATTRIBUTE_ENDS)))).match
_non_extended_attribute_end_matcher = re.compile(r"[^{}]+".format(
re.escape(''.join(EXTENDED_ATTRIBUTE_ENDS)))).match
def _validate_xtext(xtext):
"""If input token contains ASCII non-printables, register a defect."""
non_printables = _non_printable_finder(xtext)
if non_printables:
xtext.defects.append(errors.NonPrintableDefect(non_printables))
if utils._has_surrogates(xtext):
xtext.defects.append(errors.UndecodableBytesDefect(
"Non-ASCII characters found in header token"))
def _get_ptext_to_endchars(value, endchars):
"""Scan printables/quoted-pairs until endchars and return unquoted ptext.
This function turns a run of qcontent, ccontent-without-comments, or
dtext-with-quoted-printables into a single string by unquoting any
quoted printables. It returns the string, the remaining value, and
a flag that is True iff there were any quoted printables decoded.
"""
fragment, *remainder = _wsp_splitter(value, 1)
vchars = []
escape = False
had_qp = False
for pos in range(len(fragment)):
if fragment[pos] == '\\':
if escape:
escape = False
had_qp = True
else:
escape = True
continue
if escape:
escape = False
elif fragment[pos] in endchars:
break
vchars.append(fragment[pos])
else:
pos = pos + 1
return ''.join(vchars), ''.join([fragment[pos:]] + remainder), had_qp
def get_fws(value):
"""FWS = 1*WSP
This isn't the RFC definition. We're using fws to represent tokens where
folding can be done, but when we are parsing the *un*folding has already
been done so we don't need to watch out for CRLF.
"""
newvalue = value.lstrip()
fws = WhiteSpaceTerminal(value[:len(value)-len(newvalue)], 'fws')
return fws, newvalue
def get_encoded_word(value):
""" encoded-word = "=?" charset "?" encoding "?" encoded-text "?="
"""
ew = EncodedWord()
if not value.startswith('=?'):
raise errors.HeaderParseError(
"expected encoded word but found {}".format(value))
tok, *remainder = value[2:].split('?=', 1)
if tok == value[2:]:
raise errors.HeaderParseError(
"expected encoded word but found {}".format(value))
remstr = ''.join(remainder)
if len(remstr) > 1 and remstr[0] in hexdigits and remstr[1] in hexdigits:
# The ? after the CTE was followed by an encoded word escape (=XX).
rest, *remainder = remstr.split('?=', 1)
tok = tok + '?=' + rest
if len(tok.split()) > 1:
ew.defects.append(errors.InvalidHeaderDefect(
"whitespace inside encoded word"))
ew.cte = value
value = ''.join(remainder)
try:
text, charset, lang, defects = _ew.decode('=?' + tok + '?=')
except ValueError:
raise errors.HeaderParseError(
"encoded word format invalid: '{}'".format(ew.cte))
ew.charset = charset
ew.lang = lang
ew.defects.extend(defects)
while text:
if text[0] in WSP:
token, text = get_fws(text)
ew.append(token)
continue
chars, *remainder = _wsp_splitter(text, 1)
vtext = ValueTerminal(chars, 'vtext')
_validate_xtext(vtext)
ew.append(vtext)
text = ''.join(remainder)
# Encoded words should be followed by a WS
if value and value[0] not in WSP:
ew.defects.append(errors.InvalidHeaderDefect(
"missing trailing whitespace after encoded-word"))
return ew, value
def get_unstructured(value):
"""unstructured = (*([FWS] vchar) *WSP) / obs-unstruct
obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS)
obs-utext = %d0 / obs-NO-WS-CTL / LF / CR
obs-NO-WS-CTL is control characters except WSP/CR/LF.
So, basically, we have printable runs, plus control characters or nulls in
the obsolete syntax, separated by whitespace. Since RFC 2047 uses the
obsolete syntax in its specification, but requires whitespace on either
side of the encoded words, I can see no reason to need to separate the
non-printable-non-whitespace from the printable runs if they occur, so we
parse this into xtext tokens separated by WSP tokens.
Because an 'unstructured' value must by definition constitute the entire
value, this 'get' routine does not return a remaining value, only the
parsed TokenList.
"""
# XXX: but what about bare CR and LF? They might signal the start or
# end of an encoded word. YAGNI for now, since our current parsers
# will never send us strings with bare CR or LF.
unstructured = UnstructuredTokenList()
while value:
if value[0] in WSP:
token, value = get_fws(value)
unstructured.append(token)
continue
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: Need to figure out how to register defects when
# appropriate here.
pass
else:
have_ws = True
if len(unstructured) > 0:
if unstructured[-1].token_type != 'fws':
unstructured.defects.append(errors.InvalidHeaderDefect(
"missing whitespace before encoded word"))
have_ws = False
if have_ws and len(unstructured) > 1:
if unstructured[-2].token_type == 'encoded-word':
unstructured[-1] = EWWhiteSpaceTerminal(
unstructured[-1], 'fws')
unstructured.append(token)
continue
tok, *remainder = _wsp_splitter(value, 1)
# Split in the middle of an atom if there is a rfc2047 encoded word
# which does not have WSP on both sides. The defect will be registered
# the next time through the loop.
if rfc2047_matcher.search(tok):
tok, *remainder = value.partition('=?')
vtext = ValueTerminal(tok, 'vtext')
_validate_xtext(vtext)
unstructured.append(vtext)
value = ''.join(remainder)
return unstructured
def get_qp_ctext(value):
r"""ctext = <printable ascii except \ ( )>
This is not the RFC ctext, since we are handling nested comments in comment
and unquoting quoted-pairs here. We allow anything except the '()'
characters, but if we find any ASCII other than the RFC defined printable
ASCII, a NonPrintableDefect is added to the token's defects list. Since
quoted pairs are converted to their unquoted values, what is returned is
a 'ptext' token. In this case it is a WhiteSpaceTerminal, so it's value
is ' '.
"""
ptext, value, _ = _get_ptext_to_endchars(value, '()')
ptext = WhiteSpaceTerminal(ptext, 'ptext')
_validate_xtext(ptext)
return ptext, value
def get_qcontent(value):
"""qcontent = qtext / quoted-pair
We allow anything except the DQUOTE character, but if we find any ASCII
other than the RFC defined printable ASCII, a NonPrintableDefect is
added to the token's defects list. Any quoted pairs are converted to their
unquoted values, so what is returned is a 'ptext' token. In this case it
is a ValueTerminal.
"""
ptext, value, _ = _get_ptext_to_endchars(value, '"')
ptext = ValueTerminal(ptext, 'ptext')
_validate_xtext(ptext)
return ptext, value
def get_atext(value):
"""atext = <matches _atext_matcher>
We allow any non-ATOM_ENDS in atext, but add an InvalidATextDefect to
the token's defects list if we find non-atext characters.
"""
m = _non_atom_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected atext but found '{}'".format(value))
atext = m.group()
value = value[len(atext):]
atext = ValueTerminal(atext, 'atext')
_validate_xtext(atext)
return atext, value
def get_bare_quoted_string(value):
"""bare-quoted-string = DQUOTE *([FWS] qcontent) [FWS] DQUOTE
A quoted-string without the leading or trailing white space. Its
value is the text between the quote marks, with whitespace
preserved and quoted pairs decoded.
"""
if value[0] != '"':
raise errors.HeaderParseError(
"expected '\"' but found '{}'".format(value))
bare_quoted_string = BareQuotedString()
value = value[1:]
if value and value[0] == '"':
token, value = get_qcontent(value)
bare_quoted_string.append(token)
while value and value[0] != '"':
if value[0] in WSP:
token, value = get_fws(value)
elif value[:2] == '=?':
try:
token, value = get_encoded_word(value)
bare_quoted_string.defects.append(errors.InvalidHeaderDefect(
"encoded word inside quoted string"))
except errors.HeaderParseError:
token, value = get_qcontent(value)
else:
token, value = get_qcontent(value)
bare_quoted_string.append(token)
if not value:
bare_quoted_string.defects.append(errors.InvalidHeaderDefect(
"end of header inside quoted string"))
return bare_quoted_string, value
return bare_quoted_string, value[1:]
def get_comment(value):
"""comment = "(" *([FWS] ccontent) [FWS] ")"
ccontent = ctext / quoted-pair / comment
We handle nested comments here, and quoted-pair in our qp-ctext routine.
"""
if value and value[0] != '(':
raise errors.HeaderParseError(
"expected '(' but found '{}'".format(value))
comment = Comment()
value = value[1:]
while value and value[0] != ")":
if value[0] in WSP:
token, value = get_fws(value)
elif value[0] == '(':
token, value = get_comment(value)
else:
token, value = get_qp_ctext(value)
comment.append(token)
if not value:
comment.defects.append(errors.InvalidHeaderDefect(
"end of header inside comment"))
return comment, value
return comment, value[1:]
def get_cfws(value):
"""CFWS = (1*([FWS] comment) [FWS]) / FWS
"""
cfws = CFWSList()
while value and value[0] in CFWS_LEADER:
if value[0] in WSP:
token, value = get_fws(value)
else:
token, value = get_comment(value)
cfws.append(token)
return cfws, value
def get_quoted_string(value):
"""quoted-string = [CFWS] <bare-quoted-string> [CFWS]
'bare-quoted-string' is an intermediate class defined by this
parser and not by the RFC grammar. It is the quoted string
without any attached CFWS.
"""
quoted_string = QuotedString()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
quoted_string.append(token)
token, value = get_bare_quoted_string(value)
quoted_string.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
quoted_string.append(token)
return quoted_string, value
def get_atom(value):
"""atom = [CFWS] 1*atext [CFWS]
An atom could be an rfc2047 encoded word.
"""
atom = Atom()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
atom.append(token)
if value and value[0] in ATOM_ENDS:
raise errors.HeaderParseError(
"expected atom but found '{}'".format(value))
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: need to figure out how to register defects when
# appropriate here.
token, value = get_atext(value)
else:
token, value = get_atext(value)
atom.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
atom.append(token)
return atom, value
def get_dot_atom_text(value):
""" dot-text = 1*atext *("." 1*atext)
"""
dot_atom_text = DotAtomText()
if not value or value[0] in ATOM_ENDS:
raise errors.HeaderParseError("expected atom at a start of "
"dot-atom-text but found '{}'".format(value))
while value and value[0] not in ATOM_ENDS:
token, value = get_atext(value)
dot_atom_text.append(token)
if value and value[0] == '.':
dot_atom_text.append(DOT)
value = value[1:]
if dot_atom_text[-1] is DOT:
raise errors.HeaderParseError("expected atom at end of dot-atom-text "
"but found '{}'".format('.'+value))
return dot_atom_text, value
def get_dot_atom(value):
""" dot-atom = [CFWS] dot-atom-text [CFWS]
Any place we can have a dot atom, we could instead have an rfc2047 encoded
word.
"""
dot_atom = DotAtom()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
dot_atom.append(token)
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: need to figure out how to register defects when
# appropriate here.
token, value = get_dot_atom_text(value)
else:
token, value = get_dot_atom_text(value)
dot_atom.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
dot_atom.append(token)
return dot_atom, value
def get_word(value):
"""word = atom / quoted-string
Either atom or quoted-string may start with CFWS. We have to peel off this
CFWS first to determine which type of word to parse. Afterward we splice
the leading CFWS, if any, into the parsed sub-token.
If neither an atom or a quoted-string is found before the next special, a
HeaderParseError is raised.
The token returned is either an Atom or a QuotedString, as appropriate.
This means the 'word' level of the formal grammar is not represented in the
parse tree; this is because having that extra layer when manipulating the
parse tree is more confusing than it is helpful.
"""
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
else:
leader = None
if not value:
raise errors.HeaderParseError(
"Expected 'atom' or 'quoted-string' but found nothing.")
if value[0]=='"':
token, value = get_quoted_string(value)
elif value[0] in SPECIALS:
raise errors.HeaderParseError("Expected 'atom' or 'quoted-string' "
"but found '{}'".format(value))
else:
token, value = get_atom(value)
if leader is not None:
token[:0] = [leader]
return token, value
def get_phrase(value):
""" phrase = 1*word / obs-phrase
obs-phrase = word *(word / "." / CFWS)
This means a phrase can be a sequence of words, periods, and CFWS in any
order as long as it starts with at least one word. If anything other than
words is detected, an ObsoleteHeaderDefect is added to the token's defect
list. We also accept a phrase that starts with CFWS followed by a dot;
this is registered as an InvalidHeaderDefect, since it is not supported by
even the obsolete grammar.
"""
phrase = Phrase()
try:
token, value = get_word(value)
phrase.append(token)
except errors.HeaderParseError:
phrase.defects.append(errors.InvalidHeaderDefect(
"phrase does not start with word"))
while value and value[0] not in PHRASE_ENDS:
if value[0]=='.':
phrase.append(DOT)
phrase.defects.append(errors.ObsoleteHeaderDefect(
"period in 'phrase'"))
value = value[1:]
else:
try:
token, value = get_word(value)
except errors.HeaderParseError:
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
phrase.defects.append(errors.ObsoleteHeaderDefect(
"comment found without atom"))
else:
raise
phrase.append(token)
return phrase, value
def get_local_part(value):
""" local-part = dot-atom / quoted-string / obs-local-part
"""
local_part = LocalPart()
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected local-part but found '{}'".format(value))
try:
token, value = get_dot_atom(value)
except errors.HeaderParseError:
try:
token, value = get_word(value)
except errors.HeaderParseError:
if value[0] != '\\' and value[0] in PHRASE_ENDS:
raise
token = TokenList()
if leader is not None:
token[:0] = [leader]
local_part.append(token)
if value and (value[0]=='\\' or value[0] not in PHRASE_ENDS):
obs_local_part, value = get_obs_local_part(str(local_part) + value)
if obs_local_part.token_type == 'invalid-obs-local-part':
local_part.defects.append(errors.InvalidHeaderDefect(
"local-part is not dot-atom, quoted-string, or obs-local-part"))
else:
local_part.defects.append(errors.ObsoleteHeaderDefect(
"local-part is not a dot-atom (contains CFWS)"))
local_part[0] = obs_local_part
try:
local_part.value.encode('ascii')
except UnicodeEncodeError:
local_part.defects.append(errors.NonASCIILocalPartDefect(
"local-part contains non-ASCII characters)"))
return local_part, value
def get_obs_local_part(value):
""" obs-local-part = word *("." word)
"""
obs_local_part = ObsLocalPart()
last_non_ws_was_dot = False
while value and (value[0]=='\\' or value[0] not in PHRASE_ENDS):
if value[0] == '.':
if last_non_ws_was_dot:
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"invalid repeated '.'"))
obs_local_part.append(DOT)
last_non_ws_was_dot = True
value = value[1:]
continue
elif value[0]=='\\':
obs_local_part.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"'\\' character outside of quoted-string/ccontent"))
last_non_ws_was_dot = False
continue
if obs_local_part and obs_local_part[-1].token_type != 'dot':
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"missing '.' between words"))
try:
token, value = get_word(value)
last_non_ws_was_dot = False
except errors.HeaderParseError:
if value[0] not in CFWS_LEADER:
raise
token, value = get_cfws(value)
obs_local_part.append(token)
if (obs_local_part[0].token_type == 'dot' or
obs_local_part[0].token_type=='cfws' and
obs_local_part[1].token_type=='dot'):
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"Invalid leading '.' in local part"))
if (obs_local_part[-1].token_type == 'dot' or
obs_local_part[-1].token_type=='cfws' and
obs_local_part[-2].token_type=='dot'):
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"Invalid trailing '.' in local part"))
if obs_local_part.defects:
obs_local_part.token_type = 'invalid-obs-local-part'
return obs_local_part, value
def get_dtext(value):
r""" dtext = <printable ascii except \ [ ]> / obs-dtext
obs-dtext = obs-NO-WS-CTL / quoted-pair
We allow anything except the excluded characters, but if we find any
ASCII other than the RFC defined printable ASCII, a NonPrintableDefect is
added to the token's defects list. Quoted pairs are converted to their
unquoted values, so what is returned is a ptext token, in this case a
ValueTerminal. If there were quoted-printables, an ObsoleteHeaderDefect is
added to the returned token's defect list.
"""
ptext, value, had_qp = _get_ptext_to_endchars(value, '[]')
ptext = ValueTerminal(ptext, 'ptext')
if had_qp:
ptext.defects.append(errors.ObsoleteHeaderDefect(
"quoted printable found in domain-literal"))
_validate_xtext(ptext)
return ptext, value
def _check_for_early_dl_end(value, domain_literal):
if value:
return False
domain_literal.append(errors.InvalidHeaderDefect(
"end of input inside domain-literal"))
domain_literal.append(ValueTerminal(']', 'domain-literal-end'))
return True
def get_domain_literal(value):
""" domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS]
"""
domain_literal = DomainLiteral()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
domain_literal.append(token)
if not value:
raise errors.HeaderParseError("expected domain-literal")
if value[0] != '[':
raise errors.HeaderParseError("expected '[' at start of domain-literal "
"but found '{}'".format(value))
value = value[1:]
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
domain_literal.append(ValueTerminal('[', 'domain-literal-start'))
if value[0] in WSP:
token, value = get_fws(value)
domain_literal.append(token)
token, value = get_dtext(value)
domain_literal.append(token)
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
if value[0] in WSP:
token, value = get_fws(value)
domain_literal.append(token)
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
if value[0] != ']':
raise errors.HeaderParseError("expected ']' at end of domain-literal "
"but found '{}'".format(value))
domain_literal.append(ValueTerminal(']', 'domain-literal-end'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
domain_literal.append(token)
return domain_literal, value
def get_domain(value):
""" domain = dot-atom / domain-literal / obs-domain
obs-domain = atom *("." atom))
"""
domain = Domain()
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected domain but found '{}'".format(value))
if value[0] == '[':
token, value = get_domain_literal(value)
if leader is not None:
token[:0] = [leader]
domain.append(token)
return domain, value
try:
token, value = get_dot_atom(value)
except errors.HeaderParseError:
token, value = get_atom(value)
if leader is not None:
token[:0] = [leader]
domain.append(token)
if value and value[0] == '.':
domain.defects.append(errors.ObsoleteHeaderDefect(
"domain is not a dot-atom (contains CFWS)"))
if domain[0].token_type == 'dot-atom':
domain[:] = domain[0]
while value and value[0] == '.':
domain.append(DOT)
token, value = get_atom(value[1:])
domain.append(token)
return domain, value
def get_addr_spec(value):
""" addr-spec = local-part "@" domain
"""
addr_spec = AddrSpec()
token, value = get_local_part(value)
addr_spec.append(token)
if not value or value[0] != '@':
addr_spec.defects.append(errors.InvalidHeaderDefect(
"addr-spec local part with no domain"))
return addr_spec, value
addr_spec.append(ValueTerminal('@', 'address-at-symbol'))
token, value = get_domain(value[1:])
addr_spec.append(token)
return addr_spec, value
def get_obs_route(value):
""" obs-route = obs-domain-list ":"
obs-domain-list = *(CFWS / ",") "@" domain *("," [CFWS] ["@" domain])
Returns an obs-route token with the appropriate sub-tokens (that is,
there is no obs-domain-list in the parse tree).
"""
obs_route = ObsRoute()
while value and (value[0]==',' or value[0] in CFWS_LEADER):
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
obs_route.append(token)
elif value[0] == ',':
obs_route.append(ListSeparator)
value = value[1:]
if not value or value[0] != '@':
raise errors.HeaderParseError(
"expected obs-route domain but found '{}'".format(value))
obs_route.append(RouteComponentMarker)
token, value = get_domain(value[1:])
obs_route.append(token)
while value and value[0]==',':
obs_route.append(ListSeparator)
value = value[1:]
if not value:
break
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
obs_route.append(token)
if value[0] == '@':
obs_route.append(RouteComponentMarker)
token, value = get_domain(value[1:])
obs_route.append(token)
if not value:
raise errors.HeaderParseError("end of header while parsing obs-route")
if value[0] != ':':
raise errors.HeaderParseError( "expected ':' marking end of "
"obs-route but found '{}'".format(value))
obs_route.append(ValueTerminal(':', 'end-of-obs-route-marker'))
return obs_route, value[1:]
def get_angle_addr(value):
""" angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr
obs-angle-addr = [CFWS] "<" obs-route addr-spec ">" [CFWS]
"""
angle_addr = AngleAddr()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
angle_addr.append(token)
if not value or value[0] != '<':
raise errors.HeaderParseError(
"expected angle-addr but found '{}'".format(value))
angle_addr.append(ValueTerminal('<', 'angle-addr-start'))
value = value[1:]
# Although it is not legal per RFC5322, SMTP uses '<>' in certain
# circumstances.
if value[0] == '>':
angle_addr.append(ValueTerminal('>', 'angle-addr-end'))
angle_addr.defects.append(errors.InvalidHeaderDefect(
"null addr-spec in angle-addr"))
value = value[1:]
return angle_addr, value
try:
token, value = get_addr_spec(value)
except errors.HeaderParseError:
try:
token, value = get_obs_route(value)
angle_addr.defects.append(errors.ObsoleteHeaderDefect(
"obsolete route specification in angle-addr"))
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected addr-spec or obs-route but found '{}'".format(value))
angle_addr.append(token)
token, value = get_addr_spec(value)
angle_addr.append(token)
if value and value[0] == '>':
value = value[1:]
else:
angle_addr.defects.append(errors.InvalidHeaderDefect(
"missing trailing '>' on angle-addr"))
angle_addr.append(ValueTerminal('>', 'angle-addr-end'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
angle_addr.append(token)
return angle_addr, value
def get_display_name(value):
""" display-name = phrase
Because this is simply a name-rule, we don't return a display-name
token containing a phrase, but rather a display-name token with
the content of the phrase.
"""
display_name = DisplayName()
token, value = get_phrase(value)
display_name.extend(token[:])
display_name.defects = token.defects[:]
return display_name, value
def get_name_addr(value):
""" name-addr = [display-name] angle-addr
"""
name_addr = NameAddr()
# Both the optional display name and the angle-addr can start with cfws.
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(leader))
if value[0] != '<':
if value[0] in PHRASE_ENDS:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(value))
token, value = get_display_name(value)
if not value:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(token))
if leader is not None:
token[0][:0] = [leader]
leader = None
name_addr.append(token)
token, value = get_angle_addr(value)
if leader is not None:
token[:0] = [leader]
name_addr.append(token)
return name_addr, value
def get_mailbox(value):
""" mailbox = name-addr / addr-spec
"""
# The only way to figure out if we are dealing with a name-addr or an
# addr-spec is to try parsing each one.
mailbox = Mailbox()
try:
token, value = get_name_addr(value)
except errors.HeaderParseError:
try:
token, value = get_addr_spec(value)
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected mailbox but found '{}'".format(value))
if any(isinstance(x, errors.InvalidHeaderDefect)
for x in token.all_defects):
mailbox.token_type = 'invalid-mailbox'
mailbox.append(token)
return mailbox, value
def get_invalid_mailbox(value, endchars):
""" Read everything up to one of the chars in endchars.
This is outside the formal grammar. The InvalidMailbox TokenList that is
returned acts like a Mailbox, but the data attributes are None.
"""
invalid_mailbox = InvalidMailbox()
while value and value[0] not in endchars:
if value[0] in PHRASE_ENDS:
invalid_mailbox.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
invalid_mailbox.append(token)
return invalid_mailbox, value
def get_mailbox_list(value):
""" mailbox-list = (mailbox *("," mailbox)) / obs-mbox-list
obs-mbox-list = *([CFWS] ",") mailbox *("," [mailbox / CFWS])
For this routine we go outside the formal grammar in order to improve error
handling. We recognize the end of the mailbox list only at the end of the
value or at a ';' (the group terminator). This is so that we can turn
invalid mailboxes into InvalidMailbox tokens and continue parsing any
remaining valid mailboxes. We also allow all mailbox entries to be null,
and this condition is handled appropriately at a higher level.
"""
mailbox_list = MailboxList()
while value and value[0] != ';':
try:
token, value = get_mailbox(value)
mailbox_list.append(token)
except errors.HeaderParseError:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value or value[0] in ',;':
mailbox_list.append(leader)
mailbox_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in mailbox-list"))
else:
token, value = get_invalid_mailbox(value, ',;')
if leader is not None:
token[:0] = [leader]
mailbox_list.append(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
elif value[0] == ',':
mailbox_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in mailbox-list"))
else:
token, value = get_invalid_mailbox(value, ',;')
if leader is not None:
token[:0] = [leader]
mailbox_list.append(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
if value and value[0] not in ',;':
# Crap after mailbox; treat it as an invalid mailbox.
# The mailbox info will still be available.
mailbox = mailbox_list[-1]
mailbox.token_type = 'invalid-mailbox'
token, value = get_invalid_mailbox(value, ',;')
mailbox.extend(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
if value and value[0] == ',':
mailbox_list.append(ListSeparator)
value = value[1:]
return mailbox_list, value
def get_group_list(value):
""" group-list = mailbox-list / CFWS / obs-group-list
obs-group-list = 1*([CFWS] ",") [CFWS]
"""
group_list = GroupList()
if not value:
group_list.defects.append(errors.InvalidHeaderDefect(
"end of header before group-list"))
return group_list, value
leader = None
if value and value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
# This should never happen in email parsing, since CFWS-only is a
# legal alternative to group-list in a group, which is the only
# place group-list appears.
group_list.defects.append(errors.InvalidHeaderDefect(
"end of header in group-list"))
group_list.append(leader)
return group_list, value
if value[0] == ';':
group_list.append(leader)
return group_list, value
token, value = get_mailbox_list(value)
if len(token.all_mailboxes)==0:
if leader is not None:
group_list.append(leader)
group_list.extend(token)
group_list.defects.append(errors.ObsoleteHeaderDefect(
"group-list with empty entries"))
return group_list, value
if leader is not None:
token[:0] = [leader]
group_list.append(token)
return group_list, value
def get_group(value):
""" group = display-name ":" [group-list] ";" [CFWS]
"""
group = Group()
token, value = get_display_name(value)
if not value or value[0] != ':':
raise errors.HeaderParseError("expected ':' at end of group "
"display name but found '{}'".format(value))
group.append(token)
group.append(ValueTerminal(':', 'group-display-name-terminator'))
value = value[1:]
if value and value[0] == ';':
group.append(ValueTerminal(';', 'group-terminator'))
return group, value[1:]
token, value = get_group_list(value)
group.append(token)
if not value:
group.defects.append(errors.InvalidHeaderDefect(
"end of header in group"))
elif value[0] != ';':
raise errors.HeaderParseError(
"expected ';' at end of group but found {}".format(value))
group.append(ValueTerminal(';', 'group-terminator'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
group.append(token)
return group, value
def get_address(value):
""" address = mailbox / group
Note that counter-intuitively, an address can be either a single address or
a list of addresses (a group). This is why the returned Address object has
a 'mailboxes' attribute which treats a single address as a list of length
one. When you need to differentiate between to two cases, extract the single
element, which is either a mailbox or a group token.
"""
# The formal grammar isn't very helpful when parsing an address. mailbox
# and group, especially when allowing for obsolete forms, start off very
# similarly. It is only when you reach one of @, <, or : that you know
# what you've got. So, we try each one in turn, starting with the more
# likely of the two. We could perhaps make this more efficient by looking
# for a phrase and then branching based on the next character, but that
# would be a premature optimization.
address = Address()
try:
token, value = get_group(value)
except errors.HeaderParseError:
try:
token, value = get_mailbox(value)
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected address but found '{}'".format(value))
address.append(token)
return address, value
def get_address_list(value):
""" address_list = (address *("," address)) / obs-addr-list
obs-addr-list = *([CFWS] ",") address *("," [address / CFWS])
We depart from the formal grammar here by continuing to parse until the end
of the input, assuming the input to be entirely composed of an
address-list. This is always true in email parsing, and allows us
to skip invalid addresses to parse additional valid ones.
"""
address_list = AddressList()
while value:
try:
token, value = get_address(value)
address_list.append(token)
except errors.HeaderParseError as err:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value or value[0] == ',':
address_list.append(leader)
address_list.defects.append(errors.ObsoleteHeaderDefect(
"address-list entry with no content"))
else:
token, value = get_invalid_mailbox(value, ',')
if leader is not None:
token[:0] = [leader]
address_list.append(Address([token]))
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
elif value[0] == ',':
address_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in address-list"))
else:
token, value = get_invalid_mailbox(value, ',')
if leader is not None:
token[:0] = [leader]
address_list.append(Address([token]))
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
if value and value[0] != ',':
# Crap after address; treat it as an invalid mailbox.
# The mailbox info will still be available.
mailbox = address_list[-1][0]
mailbox.token_type = 'invalid-mailbox'
token, value = get_invalid_mailbox(value, ',')
mailbox.extend(token)
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
if value: # Must be a , at this point.
address_list.append(ValueTerminal(',', 'list-separator'))
value = value[1:]
return address_list, value
def get_no_fold_literal(value):
""" no-fold-literal = "[" *dtext "]"
"""
no_fold_literal = NoFoldLiteral()
if not value:
raise errors.HeaderParseError(
"expected no-fold-literal but found '{}'".format(value))
if value[0] != '[':
raise errors.HeaderParseError(
"expected '[' at the start of no-fold-literal "
"but found '{}'".format(value))
no_fold_literal.append(ValueTerminal('[', 'no-fold-literal-start'))
value = value[1:]
token, value = get_dtext(value)
no_fold_literal.append(token)
if not value or value[0] != ']':
raise errors.HeaderParseError(
"expected ']' at the end of no-fold-literal "
"but found '{}'".format(value))
no_fold_literal.append(ValueTerminal(']', 'no-fold-literal-end'))
return no_fold_literal, value[1:]
def get_msg_id(value):
"""msg-id = [CFWS] "<" id-left '@' id-right ">" [CFWS]
id-left = dot-atom-text / obs-id-left
id-right = dot-atom-text / no-fold-literal / obs-id-right
no-fold-literal = "[" *dtext "]"
"""
msg_id = MsgID()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
msg_id.append(token)
if not value or value[0] != '<':
raise errors.HeaderParseError(
"expected msg-id but found '{}'".format(value))
msg_id.append(ValueTerminal('<', 'msg-id-start'))
value = value[1:]
# Parse id-left.
try:
token, value = get_dot_atom_text(value)
except errors.HeaderParseError:
try:
# obs-id-left is same as local-part of add-spec.
token, value = get_obs_local_part(value)
msg_id.defects.append(errors.ObsoleteHeaderDefect(
"obsolete id-left in msg-id"))
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected dot-atom-text or obs-id-left"
" but found '{}'".format(value))
msg_id.append(token)
if not value or value[0] != '@':
msg_id.defects.append(errors.InvalidHeaderDefect(
"msg-id with no id-right"))
# Even though there is no id-right, if the local part
# ends with `>` let's just parse it too and return
# along with the defect.
if value and value[0] == '>':
msg_id.append(ValueTerminal('>', 'msg-id-end'))
value = value[1:]
return msg_id, value
msg_id.append(ValueTerminal('@', 'address-at-symbol'))
value = value[1:]
# Parse id-right.
try:
token, value = get_dot_atom_text(value)
except errors.HeaderParseError:
try:
token, value = get_no_fold_literal(value)
except errors.HeaderParseError as e:
try:
token, value = get_domain(value)
msg_id.defects.append(errors.ObsoleteHeaderDefect(
"obsolete id-right in msg-id"))
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected dot-atom-text, no-fold-literal or obs-id-right"
" but found '{}'".format(value))
msg_id.append(token)
if value and value[0] == '>':
value = value[1:]
else:
msg_id.defects.append(errors.InvalidHeaderDefect(
"missing trailing '>' on msg-id"))
msg_id.append(ValueTerminal('>', 'msg-id-end'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
msg_id.append(token)
return msg_id, value
def parse_message_id(value):
"""message-id = "Message-ID:" msg-id CRLF
"""
message_id = MessageID()
try:
token, value = get_msg_id(value)
except errors.HeaderParseError:
message_id.defects.append(errors.InvalidHeaderDefect(
"Expected msg-id but found {!r}".format(value)))
message_id.append(token)
return message_id
#
# XXX: As I begin to add additional header parsers, I'm realizing we probably
# have two level of parser routines: the get_XXX methods that get a token in
# the grammar, and parse_XXX methods that parse an entire field value. So
# get_address_list above should really be a parse_ method, as probably should
# be get_unstructured.
#
def parse_mime_version(value):
""" mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS]
"""
# The [CFWS] is implicit in the RFC 2045 BNF.
# XXX: This routine is a bit verbose, should factor out a get_int method.
mime_version = MIMEVersion()
if not value:
mime_version.defects.append(errors.HeaderMissingRequiredValue(
"Missing MIME version number (eg: 1.0)"))
return mime_version
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value:
mime_version.defects.append(errors.HeaderMissingRequiredValue(
"Expected MIME version number but found only CFWS"))
digits = ''
while value and value[0] != '.' and value[0] not in CFWS_LEADER:
digits += value[0]
value = value[1:]
if not digits.isdigit():
mime_version.defects.append(errors.InvalidHeaderDefect(
"Expected MIME major version number but found {!r}".format(digits)))
mime_version.append(ValueTerminal(digits, 'xtext'))
else:
mime_version.major = int(digits)
mime_version.append(ValueTerminal(digits, 'digits'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value or value[0] != '.':
if mime_version.major is not None:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Incomplete MIME version; found only major number"))
if value:
mime_version.append(ValueTerminal(value, 'xtext'))
return mime_version
mime_version.append(ValueTerminal('.', 'version-separator'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value:
if mime_version.major is not None:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Incomplete MIME version; found only major number"))
return mime_version
digits = ''
while value and value[0] not in CFWS_LEADER:
digits += value[0]
value = value[1:]
if not digits.isdigit():
mime_version.defects.append(errors.InvalidHeaderDefect(
"Expected MIME minor version number but found {!r}".format(digits)))
mime_version.append(ValueTerminal(digits, 'xtext'))
else:
mime_version.minor = int(digits)
mime_version.append(ValueTerminal(digits, 'digits'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if value:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Excess non-CFWS text after MIME version"))
mime_version.append(ValueTerminal(value, 'xtext'))
return mime_version
def get_invalid_parameter(value):
""" Read everything up to the next ';'.
This is outside the formal grammar. The InvalidParameter TokenList that is
returned acts like a Parameter, but the data attributes are None.
"""
invalid_parameter = InvalidParameter()
while value and value[0] != ';':
if value[0] in PHRASE_ENDS:
invalid_parameter.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
invalid_parameter.append(token)
return invalid_parameter, value
def get_ttext(value):
"""ttext = <matches _ttext_matcher>
We allow any non-TOKEN_ENDS in ttext, but add defects to the token's
defects list if we find non-ttext characters. We also register defects for
*any* non-printables even though the RFC doesn't exclude all of them,
because we follow the spirit of RFC 5322.
"""
m = _non_token_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected ttext but found '{}'".format(value))
ttext = m.group()
value = value[len(ttext):]
ttext = ValueTerminal(ttext, 'ttext')
_validate_xtext(ttext)
return ttext, value
def get_token(value):
"""token = [CFWS] 1*ttext [CFWS]
The RFC equivalent of ttext is any US-ASCII chars except space, ctls, or
tspecials. We also exclude tabs even though the RFC doesn't.
The RFC implies the CFWS but is not explicit about it in the BNF.
"""
mtoken = Token()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mtoken.append(token)
if value and value[0] in TOKEN_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_ttext(value)
mtoken.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mtoken.append(token)
return mtoken, value
def get_attrtext(value):
"""attrtext = 1*(any non-ATTRIBUTE_ENDS character)
We allow any non-ATTRIBUTE_ENDS in attrtext, but add defects to the
token's defects list if we find non-attrtext characters. We also register
defects for *any* non-printables even though the RFC doesn't exclude all of
them, because we follow the spirit of RFC 5322.
"""
m = _non_attribute_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected attrtext but found {!r}".format(value))
attrtext = m.group()
value = value[len(attrtext):]
attrtext = ValueTerminal(attrtext, 'attrtext')
_validate_xtext(attrtext)
return attrtext, value
def get_attribute(value):
""" [CFWS] 1*attrtext [CFWS]
This version of the BNF makes the CFWS explicit, and as usual we use a
value terminal for the actual run of characters. The RFC equivalent of
attrtext is the token characters, with the subtraction of '*', "'", and '%'.
We include tab in the excluded set just as we do for token.
"""
attribute = Attribute()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
if value and value[0] in ATTRIBUTE_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_attrtext(value)
attribute.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
return attribute, value
def get_extended_attrtext(value):
"""attrtext = 1*(any non-ATTRIBUTE_ENDS character plus '%')
This is a special parsing routine so that we get a value that
includes % escapes as a single string (which we decode as a single
string later).
"""
m = _non_extended_attribute_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected extended attrtext but found {!r}".format(value))
attrtext = m.group()
value = value[len(attrtext):]
attrtext = ValueTerminal(attrtext, 'extended-attrtext')
_validate_xtext(attrtext)
return attrtext, value
def get_extended_attribute(value):
""" [CFWS] 1*extended_attrtext [CFWS]
This is like the non-extended version except we allow % characters, so that
we can pick up an encoded value as a single string.
"""
# XXX: should we have an ExtendedAttribute TokenList?
attribute = Attribute()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
if value and value[0] in EXTENDED_ATTRIBUTE_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_extended_attrtext(value)
attribute.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
return attribute, value
def get_section(value):
""" '*' digits
The formal BNF is more complicated because leading 0s are not allowed. We
check for that and add a defect. We also assume no CFWS is allowed between
the '*' and the digits, though the RFC is not crystal clear on that.
The caller should already have dealt with leading CFWS.
"""
section = Section()
if not value or value[0] != '*':
raise errors.HeaderParseError("Expected section but found {}".format(
value))
section.append(ValueTerminal('*', 'section-marker'))
value = value[1:]
if not value or not value[0].isdigit():
raise errors.HeaderParseError("Expected section number but "
"found {}".format(value))
digits = ''
while value and value[0].isdigit():
digits += value[0]
value = value[1:]
if digits[0] == '0' and digits != '0':
section.defects.append(errors.InvalidHeaderError(
"section number has an invalid leading 0"))
section.number = int(digits)
section.append(ValueTerminal(digits, 'digits'))
return section, value
def get_value(value):
""" quoted-string / attribute
"""
v = Value()
if not value:
raise errors.HeaderParseError("Expected value but found end of string")
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError("Expected value but found "
"only {}".format(leader))
if value[0] == '"':
token, value = get_quoted_string(value)
else:
token, value = get_extended_attribute(value)
if leader is not None:
token[:0] = [leader]
v.append(token)
return v, value
def get_parameter(value):
""" attribute [section] ["*"] [CFWS] "=" value
The CFWS is implied by the RFC but not made explicit in the BNF. This
simplified form of the BNF from the RFC is made to conform with the RFC BNF
through some extra checks. We do it this way because it makes both error
recovery and working with the resulting parse tree easier.
"""
# It is possible CFWS would also be implicitly allowed between the section
# and the 'extended-attribute' marker (the '*') , but we've never seen that
# in the wild and we will therefore ignore the possibility.
param = Parameter()
token, value = get_attribute(value)
param.append(token)
if not value or value[0] == ';':
param.defects.append(errors.InvalidHeaderDefect("Parameter contains "
"name ({}) but no value".format(token)))
return param, value
if value[0] == '*':
try:
token, value = get_section(value)
param.sectioned = True
param.append(token)
except errors.HeaderParseError:
pass
if not value:
raise errors.HeaderParseError("Incomplete parameter")
if value[0] == '*':
param.append(ValueTerminal('*', 'extended-parameter-marker'))
value = value[1:]
param.extended = True
if value[0] != '=':
raise errors.HeaderParseError("Parameter not followed by '='")
param.append(ValueTerminal('=', 'parameter-separator'))
value = value[1:]
leader = None
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
param.append(token)
remainder = None
appendto = param
if param.extended and value and value[0] == '"':
# Now for some serious hackery to handle the common invalid case of
# double quotes around an extended value. We also accept (with defect)
# a value marked as encoded that isn't really.
qstring, remainder = get_quoted_string(value)
inner_value = qstring.stripped_value
semi_valid = False
if param.section_number == 0:
if inner_value and inner_value[0] == "'":
semi_valid = True
else:
token, rest = get_attrtext(inner_value)
if rest and rest[0] == "'":
semi_valid = True
else:
try:
token, rest = get_extended_attrtext(inner_value)
except:
pass
else:
if not rest:
semi_valid = True
if semi_valid:
param.defects.append(errors.InvalidHeaderDefect(
"Quoted string value for extended parameter is invalid"))
param.append(qstring)
for t in qstring:
if t.token_type == 'bare-quoted-string':
t[:] = []
appendto = t
break
value = inner_value
else:
remainder = None
param.defects.append(errors.InvalidHeaderDefect(
"Parameter marked as extended but appears to have a "
"quoted string value that is non-encoded"))
if value and value[0] == "'":
token = None
else:
token, value = get_value(value)
if not param.extended or param.section_number > 0:
if not value or value[0] != "'":
appendto.append(token)
if remainder is not None:
assert not value, value
value = remainder
return param, value
param.defects.append(errors.InvalidHeaderDefect(
"Apparent initial-extended-value but attribute "
"was not marked as extended or was not initial section"))
if not value:
# Assume the charset/lang is missing and the token is the value.
param.defects.append(errors.InvalidHeaderDefect(
"Missing required charset/lang delimiters"))
appendto.append(token)
if remainder is None:
return param, value
else:
if token is not None:
for t in token:
if t.token_type == 'extended-attrtext':
break
t.token_type == 'attrtext'
appendto.append(t)
param.charset = t.value
if value[0] != "'":
raise errors.HeaderParseError("Expected RFC2231 char/lang encoding "
"delimiter, but found {!r}".format(value))
appendto.append(ValueTerminal("'", 'RFC2231-delimiter'))
value = value[1:]
if value and value[0] != "'":
token, value = get_attrtext(value)
appendto.append(token)
param.lang = token.value
if not value or value[0] != "'":
raise errors.HeaderParseError("Expected RFC2231 char/lang encoding "
"delimiter, but found {}".format(value))
appendto.append(ValueTerminal("'", 'RFC2231-delimiter'))
value = value[1:]
if remainder is not None:
# Treat the rest of value as bare quoted string content.
v = Value()
while value:
if value[0] in WSP:
token, value = get_fws(value)
elif value[0] == '"':
token = ValueTerminal('"', 'DQUOTE')
value = value[1:]
else:
token, value = get_qcontent(value)
v.append(token)
token = v
else:
token, value = get_value(value)
appendto.append(token)
if remainder is not None:
assert not value, value
value = remainder
return param, value
def parse_mime_parameters(value):
""" parameter *( ";" parameter )
That BNF is meant to indicate this routine should only be called after
finding and handling the leading ';'. There is no corresponding rule in
the formal RFC grammar, but it is more convenient for us for the set of
parameters to be treated as its own TokenList.
This is 'parse' routine because it consumes the remaining value, but it
would never be called to parse a full header. Instead it is called to
parse everything after the non-parameter value of a specific MIME header.
"""
mime_parameters = MimeParameters()
while value:
try:
token, value = get_parameter(value)
mime_parameters.append(token)
except errors.HeaderParseError as err:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
mime_parameters.append(leader)
return mime_parameters
if value[0] == ';':
if leader is not None:
mime_parameters.append(leader)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"parameter entry with no content"))
else:
token, value = get_invalid_parameter(value)
if leader:
token[:0] = [leader]
mime_parameters.append(token)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"invalid parameter {!r}".format(token)))
if value and value[0] != ';':
# Junk after the otherwise valid parameter. Mark it as
# invalid, but it will have a value.
param = mime_parameters[-1]
param.token_type = 'invalid-parameter'
token, value = get_invalid_parameter(value)
param.extend(token)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"parameter with invalid trailing text {!r}".format(token)))
if value:
# Must be a ';' at this point.
mime_parameters.append(ValueTerminal(';', 'parameter-separator'))
value = value[1:]
return mime_parameters
def _find_mime_parameters(tokenlist, value):
"""Do our best to find the parameters in an invalid MIME header
"""
while value and value[0] != ';':
if value[0] in PHRASE_ENDS:
tokenlist.append(ValueTerminal(value[0], 'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
tokenlist.append(token)
if not value:
return
tokenlist.append(ValueTerminal(';', 'parameter-separator'))
tokenlist.append(parse_mime_parameters(value[1:]))
def parse_content_type_header(value):
""" maintype "/" subtype *( ";" parameter )
The maintype and substype are tokens. Theoretically they could
be checked against the official IANA list + x-token, but we
don't do that.
"""
ctype = ContentType()
recover = False
if not value:
ctype.defects.append(errors.HeaderMissingRequiredValue(
"Missing content type specification"))
return ctype
try:
token, value = get_token(value)
except errors.HeaderParseError:
ctype.defects.append(errors.InvalidHeaderDefect(
"Expected content maintype but found {!r}".format(value)))
_find_mime_parameters(ctype, value)
return ctype
ctype.append(token)
# XXX: If we really want to follow the formal grammar we should make
# mantype and subtype specialized TokenLists here. Probably not worth it.
if not value or value[0] != '/':
ctype.defects.append(errors.InvalidHeaderDefect(
"Invalid content type"))
if value:
_find_mime_parameters(ctype, value)
return ctype
ctype.maintype = token.value.strip().lower()
ctype.append(ValueTerminal('/', 'content-type-separator'))
value = value[1:]
try:
token, value = get_token(value)
except errors.HeaderParseError:
ctype.defects.append(errors.InvalidHeaderDefect(
"Expected content subtype but found {!r}".format(value)))
_find_mime_parameters(ctype, value)
return ctype
ctype.append(token)
ctype.subtype = token.value.strip().lower()
if not value:
return ctype
if value[0] != ';':
ctype.defects.append(errors.InvalidHeaderDefect(
"Only parameters are valid after content type, but "
"found {!r}".format(value)))
# The RFC requires that a syntactically invalid content-type be treated
# as text/plain. Perhaps we should postel this, but we should probably
# only do that if we were checking the subtype value against IANA.
del ctype.maintype, ctype.subtype
_find_mime_parameters(ctype, value)
return ctype
ctype.append(ValueTerminal(';', 'parameter-separator'))
ctype.append(parse_mime_parameters(value[1:]))
return ctype
def parse_content_disposition_header(value):
""" disposition-type *( ";" parameter )
"""
disp_header = ContentDisposition()
if not value:
disp_header.defects.append(errors.HeaderMissingRequiredValue(
"Missing content disposition"))
return disp_header
try:
token, value = get_token(value)
except errors.HeaderParseError:
disp_header.defects.append(errors.InvalidHeaderDefect(
"Expected content disposition but found {!r}".format(value)))
_find_mime_parameters(disp_header, value)
return disp_header
disp_header.append(token)
disp_header.content_disposition = token.value.strip().lower()
if not value:
return disp_header
if value[0] != ';':
disp_header.defects.append(errors.InvalidHeaderDefect(
"Only parameters are valid after content disposition, but "
"found {!r}".format(value)))
_find_mime_parameters(disp_header, value)
return disp_header
disp_header.append(ValueTerminal(';', 'parameter-separator'))
disp_header.append(parse_mime_parameters(value[1:]))
return disp_header
def parse_content_transfer_encoding_header(value):
""" mechanism
"""
# We should probably validate the values, since the list is fixed.
cte_header = ContentTransferEncoding()
if not value:
cte_header.defects.append(errors.HeaderMissingRequiredValue(
"Missing content transfer encoding"))
return cte_header
try:
token, value = get_token(value)
except errors.HeaderParseError:
cte_header.defects.append(errors.InvalidHeaderDefect(
"Expected content transfer encoding but found {!r}".format(value)))
else:
cte_header.append(token)
cte_header.cte = token.value.strip().lower()
if not value:
return cte_header
while value:
cte_header.defects.append(errors.InvalidHeaderDefect(
"Extra text after content transfer encoding"))
if value[0] in PHRASE_ENDS:
cte_header.append(ValueTerminal(value[0], 'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
cte_header.append(token)
return cte_header
#
# Header folding
#
# Header folding is complex, with lots of rules and corner cases. The
# following code does its best to obey the rules and handle the corner
# cases, but you can be sure there are few bugs:)
#
# This folder generally canonicalizes as it goes, preferring the stringified
# version of each token. The tokens contain information that supports the
# folder, including which tokens can be encoded in which ways.
#
# Folded text is accumulated in a simple list of strings ('lines'), each
# one of which should be less than policy.max_line_length ('maxlen').
#
def _steal_trailing_WSP_if_exists(lines):
wsp = ''
if lines and lines[-1] and lines[-1][-1] in WSP:
wsp = lines[-1][-1]
lines[-1] = lines[-1][:-1]
return wsp
def _refold_parse_tree(parse_tree, *, policy):
"""Return string of contents of parse_tree folded according to RFC rules.
"""
# max_line_length 0/None means no limit, ie: infinitely long.
maxlen = policy.max_line_length or sys.maxsize
encoding = 'utf-8' if policy.utf8 else 'us-ascii'
lines = ['']
last_ew = None
wrap_as_ew_blocked = 0
want_encoding = False
end_ew_not_allowed = Terminal('', 'wrap_as_ew_blocked')
parts = list(parse_tree)
while parts:
part = parts.pop(0)
if part is end_ew_not_allowed:
wrap_as_ew_blocked -= 1
continue
tstr = str(part)
try:
tstr.encode(encoding)
charset = encoding
except UnicodeEncodeError:
if any(isinstance(x, errors.UndecodableBytesDefect)
for x in part.all_defects):
charset = 'unknown-8bit'
else:
# If policy.utf8 is false this should really be taken from a
# 'charset' property on the policy.
charset = 'utf-8'
want_encoding = True
if part.token_type == 'mime-parameters':
# Mime parameter folding (using RFC2231) is extra special.
_fold_mime_parameters(part, lines, maxlen, encoding)
continue
if want_encoding and not wrap_as_ew_blocked:
if not part.as_ew_allowed:
want_encoding = False
last_ew = None
if part.syntactic_break:
encoded_part = part.fold(policy=policy)[:-len(policy.linesep)]
if policy.linesep not in encoded_part:
# It fits on a single line
if len(encoded_part) > maxlen - len(lines[-1]):
# But not on this one, so start a new one.
newline = _steal_trailing_WSP_if_exists(lines)
# XXX what if encoded_part has no leading FWS?
lines.append(newline)
lines[-1] += encoded_part
continue
# Either this is not a major syntactic break, so we don't
# want it on a line by itself even if it fits, or it
# doesn't fit on a line by itself. Either way, fall through
# to unpacking the subparts and wrapping them.
if not hasattr(part, 'encode'):
# It's not a Terminal, do each piece individually.
parts = list(part) + parts
else:
# It's a terminal, wrap it as an encoded word, possibly
# combining it with previously encoded words if allowed.
last_ew = _fold_as_ew(tstr, lines, maxlen, last_ew,
part.ew_combine_allowed, charset)
want_encoding = False
continue
if len(tstr) <= maxlen - len(lines[-1]):
lines[-1] += tstr
continue
# This part is too long to fit. The RFC wants us to break at
# "major syntactic breaks", so unless we don't consider this
# to be one, check if it will fit on the next line by itself.
if (part.syntactic_break and
len(tstr) + 1 <= maxlen):
newline = _steal_trailing_WSP_if_exists(lines)
if newline or part.startswith_fws():
lines.append(newline + tstr)
last_ew = None
continue
if not hasattr(part, 'encode'):
# It's not a terminal, try folding the subparts.
newparts = list(part)
if not part.as_ew_allowed:
wrap_as_ew_blocked += 1
newparts.append(end_ew_not_allowed)
parts = newparts + parts
continue
if part.as_ew_allowed and not wrap_as_ew_blocked:
# It doesn't need CTE encoding, but encode it anyway so we can
# wrap it.
parts.insert(0, part)
want_encoding = True
continue
# We can't figure out how to wrap, it, so give up.
newline = _steal_trailing_WSP_if_exists(lines)
if newline or part.startswith_fws():
lines.append(newline + tstr)
else:
# We can't fold it onto the next line either...
lines[-1] += tstr
return policy.linesep.join(lines) + policy.linesep
def _fold_as_ew(to_encode, lines, maxlen, last_ew, ew_combine_allowed, charset):
"""Fold string to_encode into lines as encoded word, combining if allowed.
Return the new value for last_ew, or None if ew_combine_allowed is False.
If there is already an encoded word in the last line of lines (indicated by
a non-None value for last_ew) and ew_combine_allowed is true, decode the
existing ew, combine it with to_encode, and re-encode. Otherwise, encode
to_encode. In either case, split to_encode as necessary so that the
encoded segments fit within maxlen.
"""
if last_ew is not None and ew_combine_allowed:
to_encode = str(
get_unstructured(lines[-1][last_ew:] + to_encode))
lines[-1] = lines[-1][:last_ew]
if to_encode[0] in WSP:
# We're joining this to non-encoded text, so don't encode
# the leading blank.
leading_wsp = to_encode[0]
to_encode = to_encode[1:]
if (len(lines[-1]) == maxlen):
lines.append(_steal_trailing_WSP_if_exists(lines))
lines[-1] += leading_wsp
trailing_wsp = ''
if to_encode[-1] in WSP:
# Likewise for the trailing space.
trailing_wsp = to_encode[-1]
to_encode = to_encode[:-1]
new_last_ew = len(lines[-1]) if last_ew is None else last_ew
encode_as = 'utf-8' if charset == 'us-ascii' else charset
# The RFC2047 chrome takes up 7 characters plus the length
# of the charset name.
chrome_len = len(encode_as) + 7
if (chrome_len + 1) >= maxlen:
raise errors.HeaderParseError(
"max_line_length is too small to fit an encoded word")
while to_encode:
remaining_space = maxlen - len(lines[-1])
text_space = remaining_space - chrome_len
if text_space <= 0:
lines.append(' ')
continue
to_encode_word = to_encode[:text_space]
encoded_word = _ew.encode(to_encode_word, charset=encode_as)
excess = len(encoded_word) - remaining_space
while excess > 0:
# Since the chunk to encode is guaranteed to fit into less than 100 characters,
# shrinking it by one at a time shouldn't take long.
to_encode_word = to_encode_word[:-1]
encoded_word = _ew.encode(to_encode_word, charset=encode_as)
excess = len(encoded_word) - remaining_space
lines[-1] += encoded_word
to_encode = to_encode[len(to_encode_word):]
if to_encode:
lines.append(' ')
new_last_ew = len(lines[-1])
lines[-1] += trailing_wsp
return new_last_ew if ew_combine_allowed else None
def _fold_mime_parameters(part, lines, maxlen, encoding):
"""Fold TokenList 'part' into the 'lines' list as mime parameters.
Using the decoded list of parameters and values, format them according to
the RFC rules, including using RFC2231 encoding if the value cannot be
expressed in 'encoding' and/or the parameter+value is too long to fit
within 'maxlen'.
"""
# Special case for RFC2231 encoding: start from decoded values and use
# RFC2231 encoding iff needed.
#
# Note that the 1 and 2s being added to the length calculations are
# accounting for the possibly-needed spaces and semicolons we'll be adding.
#
for name, value in part.params:
# XXX What if this ';' puts us over maxlen the first time through the
# loop? We should split the header value onto a newline in that case,
# but to do that we need to recognize the need earlier or reparse the
# header, so I'm going to ignore that bug for now. It'll only put us
# one character over.
if not lines[-1].rstrip().endswith(';'):
lines[-1] += ';'
charset = encoding
error_handler = 'strict'
try:
value.encode(encoding)
encoding_required = False
except UnicodeEncodeError:
encoding_required = True
if utils._has_surrogates(value):
charset = 'unknown-8bit'
error_handler = 'surrogateescape'
else:
charset = 'utf-8'
if encoding_required:
encoded_value = urllib.parse.quote(
value, safe='', errors=error_handler)
tstr = "{}*={}''{}".format(name, charset, encoded_value)
else:
tstr = '{}={}'.format(name, quote_string(value))
if len(lines[-1]) + len(tstr) + 1 < maxlen:
lines[-1] = lines[-1] + ' ' + tstr
continue
elif len(tstr) + 2 <= maxlen:
lines.append(' ' + tstr)
continue
# We need multiple sections. We are allowed to mix encoded and
# non-encoded sections, but we aren't going to. We'll encode them all.
section = 0
extra_chrome = charset + "''"
while value:
chrome_len = len(name) + len(str(section)) + 3 + len(extra_chrome)
if maxlen <= chrome_len + 3:
# We need room for the leading blank, the trailing semicolon,
# and at least one character of the value. If we don't
# have that, we'd be stuck, so in that case fall back to
# the RFC standard width.
maxlen = 78
splitpoint = maxchars = maxlen - chrome_len - 2
while True:
partial = value[:splitpoint]
encoded_value = urllib.parse.quote(
partial, safe='', errors=error_handler)
if len(encoded_value) <= maxchars:
break
splitpoint -= 1
lines.append(" {}*{}*={}{}".format(
name, section, extra_chrome, encoded_value))
extra_chrome = ''
section += 1
value = value[splitpoint:]
if value:
lines[-1] += ';'
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_1065_0 |
crossvul-python_data_good_1739_2 | """Tornado handlers for the contents web service."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import json
from tornado import gen, web
from IPython.html.utils import url_path_join, url_escape
from IPython.utils.jsonutil import date_default
from IPython.html.base.handlers import (
IPythonHandler, APIHandler, json_errors, path_regex,
)
def sort_key(model):
"""key function for case-insensitive sort by name and type"""
iname = model['name'].lower()
type_key = {
'directory' : '0',
'notebook' : '1',
'file' : '2',
}.get(model['type'], '9')
return u'%s%s' % (type_key, iname)
def validate_model(model, expect_content):
"""
Validate a model returned by a ContentsManager method.
If expect_content is True, then we expect non-null entries for 'content'
and 'format'.
"""
required_keys = {
"name",
"path",
"type",
"writable",
"created",
"last_modified",
"mimetype",
"content",
"format",
}
missing = required_keys - set(model.keys())
if missing:
raise web.HTTPError(
500,
u"Missing Model Keys: {missing}".format(missing=missing),
)
maybe_none_keys = ['content', 'format']
if expect_content:
errors = [key for key in maybe_none_keys if model[key] is None]
if errors:
raise web.HTTPError(
500,
u"Keys unexpectedly None: {keys}".format(keys=errors),
)
else:
errors = {
key: model[key]
for key in maybe_none_keys
if model[key] is not None
}
if errors:
raise web.HTTPError(
500,
u"Keys unexpectedly not None: {keys}".format(keys=errors),
)
class ContentsHandler(APIHandler):
SUPPORTED_METHODS = (u'GET', u'PUT', u'PATCH', u'POST', u'DELETE')
def location_url(self, path):
"""Return the full URL location of a file.
Parameters
----------
path : unicode
The API path of the file, such as "foo/bar.txt".
"""
return url_escape(url_path_join(
self.base_url, 'api', 'contents', path
))
def _finish_model(self, model, location=True):
"""Finish a JSON request with a model, setting relevant headers, etc."""
if location:
location = self.location_url(model['path'])
self.set_header('Location', location)
self.set_header('Last-Modified', model['last_modified'])
self.set_header('Content-Type', 'application/json')
self.finish(json.dumps(model, default=date_default))
@web.authenticated
@json_errors
@gen.coroutine
def get(self, path=''):
"""Return a model for a file or directory.
A directory model contains a list of models (without content)
of the files and directories it contains.
"""
path = path or ''
type = self.get_query_argument('type', default=None)
if type not in {None, 'directory', 'file', 'notebook'}:
raise web.HTTPError(400, u'Type %r is invalid' % type)
format = self.get_query_argument('format', default=None)
if format not in {None, 'text', 'base64'}:
raise web.HTTPError(400, u'Format %r is invalid' % format)
content = self.get_query_argument('content', default='1')
if content not in {'0', '1'}:
raise web.HTTPError(400, u'Content %r is invalid' % content)
content = int(content)
model = yield gen.maybe_future(self.contents_manager.get(
path=path, type=type, format=format, content=content,
))
if model['type'] == 'directory' and content:
# group listing by type, then by name (case-insensitive)
# FIXME: sorting should be done in the frontends
model['content'].sort(key=sort_key)
validate_model(model, expect_content=content)
self._finish_model(model, location=False)
@web.authenticated
@json_errors
@gen.coroutine
def patch(self, path=''):
"""PATCH renames a file or directory without re-uploading content."""
cm = self.contents_manager
model = self.get_json_body()
if model is None:
raise web.HTTPError(400, u'JSON body missing')
model = yield gen.maybe_future(cm.update(model, path))
validate_model(model, expect_content=False)
self._finish_model(model)
@gen.coroutine
def _copy(self, copy_from, copy_to=None):
"""Copy a file, optionally specifying a target directory."""
self.log.info(u"Copying {copy_from} to {copy_to}".format(
copy_from=copy_from,
copy_to=copy_to or '',
))
model = yield gen.maybe_future(self.contents_manager.copy(copy_from, copy_to))
self.set_status(201)
validate_model(model, expect_content=False)
self._finish_model(model)
@gen.coroutine
def _upload(self, model, path):
"""Handle upload of a new file to path"""
self.log.info(u"Uploading file to %s", path)
model = yield gen.maybe_future(self.contents_manager.new(model, path))
self.set_status(201)
validate_model(model, expect_content=False)
self._finish_model(model)
@gen.coroutine
def _new_untitled(self, path, type='', ext=''):
"""Create a new, empty untitled entity"""
self.log.info(u"Creating new %s in %s", type or 'file', path)
model = yield gen.maybe_future(self.contents_manager.new_untitled(path=path, type=type, ext=ext))
self.set_status(201)
validate_model(model, expect_content=False)
self._finish_model(model)
@gen.coroutine
def _save(self, model, path):
"""Save an existing file."""
self.log.info(u"Saving file at %s", path)
model = yield gen.maybe_future(self.contents_manager.save(model, path))
validate_model(model, expect_content=False)
self._finish_model(model)
@web.authenticated
@json_errors
@gen.coroutine
def post(self, path=''):
"""Create a new file in the specified path.
POST creates new files. The server always decides on the name.
POST /api/contents/path
New untitled, empty file or directory.
POST /api/contents/path
with body {"copy_from" : "/path/to/OtherNotebook.ipynb"}
New copy of OtherNotebook in path
"""
cm = self.contents_manager
if cm.file_exists(path):
raise web.HTTPError(400, "Cannot POST to files, use PUT instead.")
if not cm.dir_exists(path):
raise web.HTTPError(404, "No such directory: %s" % path)
model = self.get_json_body()
if model is not None:
copy_from = model.get('copy_from')
ext = model.get('ext', '')
type = model.get('type', '')
if copy_from:
yield self._copy(copy_from, path)
else:
yield self._new_untitled(path, type=type, ext=ext)
else:
yield self._new_untitled(path)
@web.authenticated
@json_errors
@gen.coroutine
def put(self, path=''):
"""Saves the file in the location specified by name and path.
PUT is very similar to POST, but the requester specifies the name,
whereas with POST, the server picks the name.
PUT /api/contents/path/Name.ipynb
Save notebook at ``path/Name.ipynb``. Notebook structure is specified
in `content` key of JSON request body. If content is not specified,
create a new empty notebook.
"""
model = self.get_json_body()
if model:
if model.get('copy_from'):
raise web.HTTPError(400, "Cannot copy with PUT, only POST")
exists = yield gen.maybe_future(self.contents_manager.file_exists(path))
if exists:
yield gen.maybe_future(self._save(model, path))
else:
yield gen.maybe_future(self._upload(model, path))
else:
yield gen.maybe_future(self._new_untitled(path))
@web.authenticated
@json_errors
@gen.coroutine
def delete(self, path=''):
"""delete a file in the given path"""
cm = self.contents_manager
self.log.warn('delete %s', path)
yield gen.maybe_future(cm.delete(path))
self.set_status(204)
self.finish()
class CheckpointsHandler(APIHandler):
SUPPORTED_METHODS = ('GET', 'POST')
@web.authenticated
@json_errors
@gen.coroutine
def get(self, path=''):
"""get lists checkpoints for a file"""
cm = self.contents_manager
checkpoints = yield gen.maybe_future(cm.list_checkpoints(path))
data = json.dumps(checkpoints, default=date_default)
self.finish(data)
@web.authenticated
@json_errors
@gen.coroutine
def post(self, path=''):
"""post creates a new checkpoint"""
cm = self.contents_manager
checkpoint = yield gen.maybe_future(cm.create_checkpoint(path))
data = json.dumps(checkpoint, default=date_default)
location = url_path_join(self.base_url, 'api/contents',
path, 'checkpoints', checkpoint['id'])
self.set_header('Location', url_escape(location))
self.set_status(201)
self.finish(data)
class ModifyCheckpointsHandler(APIHandler):
SUPPORTED_METHODS = ('POST', 'DELETE')
@web.authenticated
@json_errors
@gen.coroutine
def post(self, path, checkpoint_id):
"""post restores a file from a checkpoint"""
cm = self.contents_manager
yield gen.maybe_future(cm.restore_checkpoint(checkpoint_id, path))
self.set_status(204)
self.finish()
@web.authenticated
@json_errors
@gen.coroutine
def delete(self, path, checkpoint_id):
"""delete clears a checkpoint for a given file"""
cm = self.contents_manager
yield gen.maybe_future(cm.delete_checkpoint(checkpoint_id, path))
self.set_status(204)
self.finish()
class NotebooksRedirectHandler(IPythonHandler):
"""Redirect /api/notebooks to /api/contents"""
SUPPORTED_METHODS = ('GET', 'PUT', 'PATCH', 'POST', 'DELETE')
def get(self, path):
self.log.warn("/api/notebooks is deprecated, use /api/contents")
self.redirect(url_path_join(
self.base_url,
'api/contents',
path
))
put = patch = post = delete = get
#-----------------------------------------------------------------------------
# URL to handler mappings
#-----------------------------------------------------------------------------
_checkpoint_id_regex = r"(?P<checkpoint_id>[\w-]+)"
default_handlers = [
(r"/api/contents%s/checkpoints" % path_regex, CheckpointsHandler),
(r"/api/contents%s/checkpoints/%s" % (path_regex, _checkpoint_id_regex),
ModifyCheckpointsHandler),
(r"/api/contents%s" % path_regex, ContentsHandler),
(r"/api/notebooks/?(.*)", NotebooksRedirectHandler),
]
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_1739_2 |
crossvul-python_data_bad_3659_1 | # Copyright 2011 OpenStack LLC.
# Copyright 2012 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The security groups extension."""
import urllib
from xml.dom import minidom
from webob import exc
import webob
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova import quota
from nova import utils
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
authorize = extensions.extension_authorizer('compute', 'security_groups')
def make_rule(elem):
elem.set('id')
elem.set('parent_group_id')
proto = xmlutil.SubTemplateElement(elem, 'ip_protocol')
proto.text = 'ip_protocol'
from_port = xmlutil.SubTemplateElement(elem, 'from_port')
from_port.text = 'from_port'
to_port = xmlutil.SubTemplateElement(elem, 'to_port')
to_port.text = 'to_port'
group = xmlutil.SubTemplateElement(elem, 'group', selector='group')
name = xmlutil.SubTemplateElement(group, 'name')
name.text = 'name'
tenant_id = xmlutil.SubTemplateElement(group, 'tenant_id')
tenant_id.text = 'tenant_id'
ip_range = xmlutil.SubTemplateElement(elem, 'ip_range',
selector='ip_range')
cidr = xmlutil.SubTemplateElement(ip_range, 'cidr')
cidr.text = 'cidr'
def make_sg(elem):
elem.set('id')
elem.set('tenant_id')
elem.set('name')
desc = xmlutil.SubTemplateElement(elem, 'description')
desc.text = 'description'
rules = xmlutil.SubTemplateElement(elem, 'rules')
rule = xmlutil.SubTemplateElement(rules, 'rule', selector='rules')
make_rule(rule)
sg_nsmap = {None: wsgi.XMLNS_V11}
class SecurityGroupRuleTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('security_group_rule',
selector='security_group_rule')
make_rule(root)
return xmlutil.MasterTemplate(root, 1, nsmap=sg_nsmap)
class SecurityGroupTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('security_group',
selector='security_group')
make_sg(root)
return xmlutil.MasterTemplate(root, 1, nsmap=sg_nsmap)
class SecurityGroupsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('security_groups')
elem = xmlutil.SubTemplateElement(root, 'security_group',
selector='security_groups')
make_sg(elem)
return xmlutil.MasterTemplate(root, 1, nsmap=sg_nsmap)
class SecurityGroupXMLDeserializer(wsgi.MetadataXMLDeserializer):
"""
Deserializer to handle xml-formatted security group requests.
"""
def default(self, string):
"""Deserialize an xml-formatted security group create request"""
dom = minidom.parseString(string)
security_group = {}
sg_node = self.find_first_child_named(dom,
'security_group')
if sg_node is not None:
if sg_node.hasAttribute('name'):
security_group['name'] = sg_node.getAttribute('name')
desc_node = self.find_first_child_named(sg_node,
"description")
if desc_node:
security_group['description'] = self.extract_text(desc_node)
return {'body': {'security_group': security_group}}
class SecurityGroupRulesXMLDeserializer(wsgi.MetadataXMLDeserializer):
"""
Deserializer to handle xml-formatted security group requests.
"""
def default(self, string):
"""Deserialize an xml-formatted security group create request"""
dom = minidom.parseString(string)
security_group_rule = self._extract_security_group_rule(dom)
return {'body': {'security_group_rule': security_group_rule}}
def _extract_security_group_rule(self, node):
"""Marshal the security group rule attribute of a parsed request"""
sg_rule = {}
sg_rule_node = self.find_first_child_named(node,
'security_group_rule')
if sg_rule_node is not None:
ip_protocol_node = self.find_first_child_named(sg_rule_node,
"ip_protocol")
if ip_protocol_node is not None:
sg_rule['ip_protocol'] = self.extract_text(ip_protocol_node)
from_port_node = self.find_first_child_named(sg_rule_node,
"from_port")
if from_port_node is not None:
sg_rule['from_port'] = self.extract_text(from_port_node)
to_port_node = self.find_first_child_named(sg_rule_node, "to_port")
if to_port_node is not None:
sg_rule['to_port'] = self.extract_text(to_port_node)
parent_group_id_node = self.find_first_child_named(sg_rule_node,
"parent_group_id")
if parent_group_id_node is not None:
sg_rule['parent_group_id'] = self.extract_text(
parent_group_id_node)
group_id_node = self.find_first_child_named(sg_rule_node,
"group_id")
if group_id_node is not None:
sg_rule['group_id'] = self.extract_text(group_id_node)
cidr_node = self.find_first_child_named(sg_rule_node, "cidr")
if cidr_node is not None:
sg_rule['cidr'] = self.extract_text(cidr_node)
return sg_rule
class SecurityGroupControllerBase(object):
"""Base class for Security Group controllers."""
def __init__(self):
self.compute_api = compute.API()
self.sgh = utils.import_object(FLAGS.security_group_handler)
def _format_security_group_rule(self, context, rule):
sg_rule = {}
sg_rule['id'] = rule.id
sg_rule['parent_group_id'] = rule.parent_group_id
sg_rule['ip_protocol'] = rule.protocol
sg_rule['from_port'] = rule.from_port
sg_rule['to_port'] = rule.to_port
sg_rule['group'] = {}
sg_rule['ip_range'] = {}
if rule.group_id:
source_group = db.security_group_get(context, rule.group_id)
sg_rule['group'] = {'name': source_group.name,
'tenant_id': source_group.project_id}
else:
sg_rule['ip_range'] = {'cidr': rule.cidr}
return sg_rule
def _format_security_group(self, context, group):
security_group = {}
security_group['id'] = group.id
security_group['description'] = group.description
security_group['name'] = group.name
security_group['tenant_id'] = group.project_id
security_group['rules'] = []
for rule in group.rules:
security_group['rules'] += [self._format_security_group_rule(
context, rule)]
return security_group
class SecurityGroupController(SecurityGroupControllerBase):
"""The Security group API controller for the OpenStack API."""
def _get_security_group(self, context, id):
try:
id = int(id)
security_group = db.security_group_get(context, id)
except ValueError:
msg = _("Security group id should be integer")
raise exc.HTTPBadRequest(explanation=msg)
except exception.NotFound as exp:
raise exc.HTTPNotFound(explanation=unicode(exp))
return security_group
@wsgi.serializers(xml=SecurityGroupTemplate)
def show(self, req, id):
"""Return data about the given security group."""
context = req.environ['nova.context']
authorize(context)
security_group = self._get_security_group(context, id)
return {'security_group': self._format_security_group(context,
security_group)}
def delete(self, req, id):
"""Delete a security group."""
context = req.environ['nova.context']
authorize(context)
security_group = self._get_security_group(context, id)
if db.security_group_in_use(context, security_group.id):
msg = _("Security group is still in use")
raise exc.HTTPBadRequest(explanation=msg)
LOG.audit(_("Delete security group %s"), id, context=context)
db.security_group_destroy(context, security_group.id)
self.sgh.trigger_security_group_destroy_refresh(
context, security_group.id)
return webob.Response(status_int=202)
@wsgi.serializers(xml=SecurityGroupsTemplate)
def index(self, req):
"""Returns a list of security groups"""
context = req.environ['nova.context']
authorize(context)
self.compute_api.ensure_default_security_group(context)
groups = db.security_group_get_by_project(context,
context.project_id)
limited_list = common.limited(groups, req)
result = [self._format_security_group(context, group)
for group in limited_list]
return {'security_groups':
list(sorted(result,
key=lambda k: (k['tenant_id'], k['name'])))}
@wsgi.serializers(xml=SecurityGroupTemplate)
@wsgi.deserializers(xml=SecurityGroupXMLDeserializer)
def create(self, req, body):
"""Creates a new security group."""
context = req.environ['nova.context']
authorize(context)
if not body:
raise exc.HTTPUnprocessableEntity()
security_group = body.get('security_group', None)
if security_group is None:
raise exc.HTTPUnprocessableEntity()
group_name = security_group.get('name', None)
group_description = security_group.get('description', None)
self._validate_security_group_property(group_name, "name")
self._validate_security_group_property(group_description,
"description")
group_name = group_name.strip()
group_description = group_description.strip()
if quota.allowed_security_groups(context, 1) < 1:
msg = _("Quota exceeded, too many security groups.")
raise exc.HTTPBadRequest(explanation=msg)
LOG.audit(_("Create Security Group %s"), group_name, context=context)
self.compute_api.ensure_default_security_group(context)
if db.security_group_exists(context, context.project_id, group_name):
msg = _('Security group %s already exists') % group_name
raise exc.HTTPBadRequest(explanation=msg)
group = {'user_id': context.user_id,
'project_id': context.project_id,
'name': group_name,
'description': group_description}
group_ref = db.security_group_create(context, group)
self.sgh.trigger_security_group_create_refresh(context, group)
return {'security_group': self._format_security_group(context,
group_ref)}
def _validate_security_group_property(self, value, typ):
""" typ will be either 'name' or 'description',
depending on the caller
"""
try:
val = value.strip()
except AttributeError:
msg = _("Security group %s is not a string or unicode") % typ
raise exc.HTTPBadRequest(explanation=msg)
if not val:
msg = _("Security group %s cannot be empty.") % typ
raise exc.HTTPBadRequest(explanation=msg)
if len(val) > 255:
msg = _("Security group %s should not be greater "
"than 255 characters.") % typ
raise exc.HTTPBadRequest(explanation=msg)
class SecurityGroupRulesController(SecurityGroupControllerBase):
@wsgi.serializers(xml=SecurityGroupRuleTemplate)
@wsgi.deserializers(xml=SecurityGroupRulesXMLDeserializer)
def create(self, req, body):
context = req.environ['nova.context']
authorize(context)
if not body:
raise exc.HTTPUnprocessableEntity()
if not 'security_group_rule' in body:
raise exc.HTTPUnprocessableEntity()
self.compute_api.ensure_default_security_group(context)
sg_rule = body['security_group_rule']
parent_group_id = sg_rule.get('parent_group_id', None)
try:
parent_group_id = int(parent_group_id)
security_group = db.security_group_get(context, parent_group_id)
except ValueError:
msg = _("Parent group id is not integer")
raise exc.HTTPBadRequest(explanation=msg)
except exception.NotFound as exp:
msg = _("Security group (%s) not found") % parent_group_id
raise exc.HTTPNotFound(explanation=msg)
msg = _("Authorize security group ingress %s")
LOG.audit(msg, security_group['name'], context=context)
try:
values = self._rule_args_to_dict(context,
to_port=sg_rule.get('to_port'),
from_port=sg_rule.get('from_port'),
parent_group_id=sg_rule.get('parent_group_id'),
ip_protocol=sg_rule.get('ip_protocol'),
cidr=sg_rule.get('cidr'),
group_id=sg_rule.get('group_id'))
except Exception as exp:
raise exc.HTTPBadRequest(explanation=unicode(exp))
if values is None:
msg = _("Not enough parameters to build a "
"valid rule.")
raise exc.HTTPBadRequest(explanation=msg)
values['parent_group_id'] = security_group.id
if self._security_group_rule_exists(security_group, values):
msg = _('This rule already exists in group %s') % parent_group_id
raise exc.HTTPBadRequest(explanation=msg)
allowed = quota.allowed_security_group_rules(context,
parent_group_id,
1)
if allowed < 1:
msg = _("Quota exceeded, too many security group rules.")
raise exc.HTTPBadRequest(explanation=msg)
security_group_rule = db.security_group_rule_create(context, values)
self.sgh.trigger_security_group_rule_create_refresh(
context, [security_group_rule['id']])
self.compute_api.trigger_security_group_rules_refresh(context,
security_group_id=security_group['id'])
return {"security_group_rule": self._format_security_group_rule(
context,
security_group_rule)}
def _security_group_rule_exists(self, security_group, values):
"""Indicates whether the specified rule values are already
defined in the given security group.
"""
for rule in security_group.rules:
is_duplicate = True
keys = ('group_id', 'cidr', 'from_port', 'to_port', 'protocol')
for key in keys:
if rule.get(key) != values.get(key):
is_duplicate = False
break
if is_duplicate:
return True
return False
def _rule_args_to_dict(self, context, to_port=None, from_port=None,
parent_group_id=None, ip_protocol=None,
cidr=None, group_id=None):
values = {}
if group_id is not None:
try:
parent_group_id = int(parent_group_id)
group_id = int(group_id)
except ValueError:
msg = _("Parent or group id is not integer")
raise exception.InvalidInput(reason=msg)
values['group_id'] = group_id
#check if groupId exists
db.security_group_get(context, group_id)
elif cidr:
# If this fails, it throws an exception. This is what we want.
try:
cidr = urllib.unquote(cidr).decode()
except Exception:
raise exception.InvalidCidr(cidr=cidr)
if not utils.is_valid_cidr(cidr):
# Raise exception for non-valid address
raise exception.InvalidCidr(cidr=cidr)
values['cidr'] = cidr
else:
values['cidr'] = '0.0.0.0/0'
if group_id:
# Open everything if an explicit port range or type/code are not
# specified, but only if a source group was specified.
ip_proto_upper = ip_protocol.upper() if ip_protocol else ''
if (ip_proto_upper == 'ICMP' and
from_port is None and to_port is None):
from_port = -1
to_port = -1
elif (ip_proto_upper in ['TCP', 'UDP'] and from_port is None
and to_port is None):
from_port = 1
to_port = 65535
if ip_protocol and from_port is not None and to_port is not None:
ip_protocol = str(ip_protocol)
try:
from_port = int(from_port)
to_port = int(to_port)
except ValueError:
if ip_protocol.upper() == 'ICMP':
raise exception.InvalidInput(reason="Type and"
" Code must be integers for ICMP protocol type")
else:
raise exception.InvalidInput(reason="To and From ports "
"must be integers")
if ip_protocol.upper() not in ['TCP', 'UDP', 'ICMP']:
raise exception.InvalidIpProtocol(protocol=ip_protocol)
# Verify that from_port must always be less than
# or equal to to_port
if (ip_protocol.upper() in ['TCP', 'UDP'] and
from_port > to_port):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="Former value cannot"
" be greater than the later")
# Verify valid TCP, UDP port ranges
if (ip_protocol.upper() in ['TCP', 'UDP'] and
(from_port < 1 or to_port > 65535)):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="Valid TCP ports should"
" be between 1-65535")
# Verify ICMP type and code
if (ip_protocol.upper() == "ICMP" and
(from_port < -1 or from_port > 255 or
to_port < -1 or to_port > 255)):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="For ICMP, the"
" type:code must be valid")
values['protocol'] = ip_protocol
values['from_port'] = from_port
values['to_port'] = to_port
else:
# If cidr based filtering, protocol and ports are mandatory
if 'cidr' in values:
return None
return values
def delete(self, req, id):
context = req.environ['nova.context']
authorize(context)
self.compute_api.ensure_default_security_group(context)
try:
id = int(id)
rule = db.security_group_rule_get(context, id)
except ValueError:
msg = _("Rule id is not integer")
raise exc.HTTPBadRequest(explanation=msg)
except exception.NotFound:
msg = _("Rule (%s) not found") % id
raise exc.HTTPNotFound(explanation=msg)
group_id = rule.parent_group_id
self.compute_api.ensure_default_security_group(context)
security_group = db.security_group_get(context, group_id)
msg = _("Revoke security group ingress %s")
LOG.audit(msg, security_group['name'], context=context)
db.security_group_rule_destroy(context, rule['id'])
self.sgh.trigger_security_group_rule_destroy_refresh(
context, [rule['id']])
self.compute_api.trigger_security_group_rules_refresh(context,
security_group_id=security_group['id'])
return webob.Response(status_int=202)
class ServerSecurityGroupController(SecurityGroupControllerBase):
@wsgi.serializers(xml=SecurityGroupsTemplate)
def index(self, req, server_id):
"""Returns a list of security groups for the given instance."""
context = req.environ['nova.context']
authorize(context)
self.compute_api.ensure_default_security_group(context)
try:
instance = self.compute_api.get(context, server_id)
groups = db.security_group_get_by_instance(context,
instance['id'])
except exception.ApiError, e:
raise webob.exc.HTTPBadRequest(explanation=e.message)
except exception.NotAuthorized, e:
raise webob.exc.HTTPUnauthorized()
result = [self._format_security_group(context, group)
for group in groups]
return {'security_groups':
list(sorted(result,
key=lambda k: (k['tenant_id'], k['name'])))}
class SecurityGroupActionController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(SecurityGroupActionController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
self.sgh = utils.import_object(FLAGS.security_group_handler)
@wsgi.action('addSecurityGroup')
def _addSecurityGroup(self, req, id, body):
context = req.environ['nova.context']
authorize(context)
try:
body = body['addSecurityGroup']
group_name = body['name']
except TypeError:
msg = _("Missing parameter dict")
raise webob.exc.HTTPBadRequest(explanation=msg)
except KeyError:
msg = _("Security group not specified")
raise webob.exc.HTTPBadRequest(explanation=msg)
if not group_name or group_name.strip() == '':
msg = _("Security group name cannot be empty")
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
instance = self.compute_api.get(context, id)
self.compute_api.add_security_group(context, instance, group_name)
self.sgh.trigger_instance_add_security_group_refresh(
context, instance, group_name)
except exception.SecurityGroupNotFound as exp:
raise exc.HTTPNotFound(explanation=unicode(exp))
except exception.InstanceNotFound as exp:
raise exc.HTTPNotFound(explanation=unicode(exp))
except exception.Invalid as exp:
raise exc.HTTPBadRequest(explanation=unicode(exp))
return webob.Response(status_int=202)
@wsgi.action('removeSecurityGroup')
def _removeSecurityGroup(self, req, id, body):
context = req.environ['nova.context']
authorize(context)
try:
body = body['removeSecurityGroup']
group_name = body['name']
except TypeError:
msg = _("Missing parameter dict")
raise webob.exc.HTTPBadRequest(explanation=msg)
except KeyError:
msg = _("Security group not specified")
raise webob.exc.HTTPBadRequest(explanation=msg)
if not group_name or group_name.strip() == '':
msg = _("Security group name cannot be empty")
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
instance = self.compute_api.get(context, id)
self.compute_api.remove_security_group(context, instance,
group_name)
self.sgh.trigger_instance_remove_security_group_refresh(
context, instance, group_name)
except exception.SecurityGroupNotFound as exp:
raise exc.HTTPNotFound(explanation=unicode(exp))
except exception.InstanceNotFound as exp:
raise exc.HTTPNotFound(explanation=unicode(exp))
except exception.Invalid as exp:
raise exc.HTTPBadRequest(explanation=unicode(exp))
return webob.Response(status_int=202)
class Security_groups(extensions.ExtensionDescriptor):
"""Security group support"""
name = "SecurityGroups"
alias = "security_groups"
namespace = "http://docs.openstack.org/compute/ext/securitygroups/api/v1.1"
updated = "2011-07-21T00:00:00+00:00"
def get_controller_extensions(self):
controller = SecurityGroupActionController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-security-groups',
controller=SecurityGroupController())
resources.append(res)
res = extensions.ResourceExtension('os-security-group-rules',
controller=SecurityGroupRulesController())
resources.append(res)
res = extensions.ResourceExtension(
'os-security-groups',
controller=ServerSecurityGroupController(),
parent=dict(member_name='server', collection_name='servers'))
resources.append(res)
return resources
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3659_1 |
crossvul-python_data_good_1656_0 | # Copyright (C) 2013, Red Hat, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import io
import logging
import select
import socket
import struct
import sys
import time
try: # Python 3.x
import http.client as httplib
import urllib.parse as urlparse
except ImportError: # Python 2.x
import httplib
import urlparse
import kdcproxy.codec as codec
from kdcproxy.config import MetaResolver
class HTTPException(Exception):
def __init__(self, code, msg, headers=[]):
headers = list(filter(lambda h: h[0] != 'Content-Length', headers))
if 'Content-Type' not in dict(headers):
headers.append(('Content-Type', 'text/plain; charset=utf-8'))
if sys.version_info.major == 3 and isinstance(msg, str):
msg = bytes(msg, "utf-8")
headers.append(('Content-Length', str(len(msg))))
super(HTTPException, self).__init__(code, msg, headers)
self.code = code
self.message = msg
self.headers = headers
def __str__(self):
return "%d %s" % (self.code, httplib.responses[self.code])
class Application:
MAX_LENGTH = 128 * 1024
SOCKTYPES = {
"tcp": socket.SOCK_STREAM,
"udp": socket.SOCK_DGRAM,
}
def __init__(self):
self.__resolver = MetaResolver()
def __await_reply(self, pr, rsocks, wsocks, timeout):
extra = 0
read_buffers = {}
while (timeout + extra) > time.time():
if not wsocks and not rsocks:
break
r, w, x = select.select(rsocks, wsocks, rsocks + wsocks,
(timeout + extra) - time.time())
for sock in x:
sock.close()
try:
rsocks.remove(sock)
except ValueError:
pass
try:
wsocks.remove(sock)
except ValueError:
pass
for sock in w:
try:
if self.sock_type(sock) == socket.SOCK_DGRAM:
# If we proxy over UDP, remove the 4-byte length
# prefix since it is TCP only.
sock.sendall(pr.request[4:])
else:
sock.sendall(pr.request)
extra = 10 # New connections get 10 extra seconds
except Exception:
logging.exception('Error in recv() of %s', sock)
continue
rsocks.append(sock)
wsocks.remove(sock)
for sock in r:
try:
reply = self.__handle_recv(sock, read_buffers)
except Exception:
logging.exception('Error in recv() of %s', sock)
if self.sock_type(sock) == socket.SOCK_STREAM:
# Remove broken TCP socket from readers
rsocks.remove(sock)
else:
if reply is not None:
return reply
return None
def __handle_recv(self, sock, read_buffers):
if self.sock_type(sock) == socket.SOCK_DGRAM:
# For UDP sockets, recv() returns an entire datagram
# package. KDC sends one datagram as reply.
reply = sock.recv(1048576)
# If we proxy over UDP, we will be missing the 4-byte
# length prefix. So add it.
reply = struct.pack("!I", len(reply)) + reply
return reply
else:
# TCP is a different story. The reply must be buffered
# until the full answer is accumulated.
buf = read_buffers.get(sock)
part = sock.recv(1048576)
if buf is None:
if len(part) > 4:
# got enough data in the initial package. Now check
# if we got the full package in the first run.
(length, ) = struct.unpack("!I", part[0:4])
if length + 4 == len(part):
return part
read_buffers[sock] = buf = io.BytesIO()
if part:
# data received, accumulate it in a buffer
buf.write(part)
return None
else:
# EOF received
read_buffers.pop(sock)
reply = buf.getvalue()
return reply
def __filter_addr(self, addr):
if addr[0] not in (socket.AF_INET, socket.AF_INET6):
return False
if addr[1] not in (socket.SOCK_STREAM, socket.SOCK_DGRAM):
return False
if addr[2] not in (socket.IPPROTO_TCP, socket.IPPROTO_UDP):
return False
return True
def sock_type(self, sock):
try:
return sock.type & ~socket.SOCK_NONBLOCK
except AttributeError:
return sock.type
def __call__(self, env, start_response):
try:
# Validate the method
method = env["REQUEST_METHOD"].upper()
if method != "POST":
raise HTTPException(405, "Method not allowed (%s)." % method)
# Parse the request
try:
length = int(env["CONTENT_LENGTH"])
except AttributeError:
raise HTTPException(411, "Length required.")
if length < 0:
raise HTTPException(411, "Length required.")
if length > self.MAX_LENGTH:
raise HTTPException(413, "Request entity too large.")
try:
pr = codec.decode(env["wsgi.input"].read(length))
except codec.ParsingError as e:
raise HTTPException(400, e.message)
# Find the remote proxy
servers = self.__resolver.lookup(
pr.realm,
kpasswd=isinstance(pr, codec.KPASSWDProxyRequest)
)
if not servers:
raise HTTPException(503, "Can't find remote (%s)." % pr)
# Contact the remote server
reply = None
wsocks = []
rsocks = []
for server in map(urlparse.urlparse, servers):
# Enforce valid, supported URIs
scheme = server.scheme.lower().split("+", 1)
if scheme[0] not in ("kerberos", "kpasswd"):
continue
if len(scheme) > 1 and scheme[1] not in ("tcp", "udp"):
continue
# Do the DNS lookup
try:
port = server.port
if port is None:
port = scheme[0]
addrs = socket.getaddrinfo(server.hostname, port)
except socket.gaierror:
continue
# Sort addresses so that we get TCP first.
#
# Stick a None address on the end so we can get one
# more attempt after all servers have been contacted.
addrs = tuple(sorted(filter(self.__filter_addr, addrs)))
for addr in addrs + (None,):
if addr is not None:
# Bypass unspecified socktypes
if (len(scheme) > 1
and addr[1] != self.SOCKTYPES[scheme[1]]):
continue
# Create the socket
sock = socket.socket(*addr[:3])
sock.setblocking(0)
# Connect
try:
# In Python 2.x, non-blocking connect() throws
# socket.error() with errno == EINPROGRESS. In
# Python 3.x, it throws io.BlockingIOError().
sock.connect(addr[4])
except socket.error as e:
if e.errno != 115: # errno != EINPROGRESS
sock.close()
continue
except io.BlockingIOError:
pass
wsocks.append(sock)
# Resend packets to UDP servers
for sock in tuple(rsocks):
if self.sock_type(sock) == socket.SOCK_DGRAM:
wsocks.append(sock)
rsocks.remove(sock)
# Call select()
timeout = time.time() + (15 if addr is None else 2)
reply = self.__await_reply(pr, rsocks, wsocks, timeout)
if reply is not None:
break
if reply is not None:
break
for sock in rsocks + wsocks:
sock.close()
if reply is None:
raise HTTPException(503, "Remote unavailable (%s)." % pr)
# Return the result to the client
raise HTTPException(200, codec.encode(reply),
[("Content-Type", "application/kerberos")])
except HTTPException as e:
start_response(str(e), e.headers)
return [e.message]
application = Application()
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_1656_0 |
crossvul-python_data_good_3660_2 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2011 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import context
from nova import db
from nova import flags
from nova import log as logging
from nova.openstack.common import cfg
from nova import utils
from nova.virt import netutils
LOG = logging.getLogger(__name__)
allow_same_net_traffic_opt = cfg.BoolOpt('allow_same_net_traffic',
default=True,
help='Whether to allow network traffic from same network')
FLAGS = flags.FLAGS
FLAGS.register_opt(allow_same_net_traffic_opt)
class FirewallDriver(object):
""" Firewall Driver base class.
Defines methods that any driver providing security groups
and provider fireall functionality should implement.
"""
def prepare_instance_filter(self, instance, network_info):
"""Prepare filters for the instance.
At this point, the instance isn't running yet."""
raise NotImplementedError()
def unfilter_instance(self, instance, network_info):
"""Stop filtering instance"""
raise NotImplementedError()
def apply_instance_filter(self, instance, network_info):
"""Apply instance filter.
Once this method returns, the instance should be firewalled
appropriately. This method should as far as possible be a
no-op. It's vastly preferred to get everything set up in
prepare_instance_filter.
"""
raise NotImplementedError()
def refresh_security_group_rules(self, security_group_id):
"""Refresh security group rules from data store
Gets called when a rule has been added to or removed from
the security group."""
raise NotImplementedError()
def refresh_security_group_members(self, security_group_id):
"""Refresh security group members from data store
Gets called when an instance gets added to or removed from
the security group."""
raise NotImplementedError()
def refresh_provider_fw_rules(self):
"""Refresh common rules for all hosts/instances from data store.
Gets called when a rule has been added to or removed from
the list of rules (via admin api).
"""
raise NotImplementedError()
def setup_basic_filtering(self, instance, network_info):
"""Create rules to block spoofing and allow dhcp.
This gets called when spawning an instance, before
:py:meth:`prepare_instance_filter`.
"""
raise NotImplementedError()
def instance_filter_exists(self, instance, network_info):
"""Check nova-instance-instance-xxx exists"""
raise NotImplementedError()
def _handle_network_info_model(self, network_info):
# make sure this is legacy network_info
try:
return network_info.legacy()
except AttributeError:
# no "legacy" function means network_info is legacy
return network_info
class IptablesFirewallDriver(FirewallDriver):
"""Driver which enforces security groups through iptables rules."""
def __init__(self, **kwargs):
from nova.network import linux_net
self.iptables = linux_net.iptables_manager
self.instances = {}
self.network_infos = {}
self.basicly_filtered = False
self.iptables.ipv4['filter'].add_chain('sg-fallback')
self.iptables.ipv4['filter'].add_rule('sg-fallback', '-j DROP')
self.iptables.ipv6['filter'].add_chain('sg-fallback')
self.iptables.ipv6['filter'].add_rule('sg-fallback', '-j DROP')
def setup_basic_filtering(self, instance, network_info):
pass
def apply_instance_filter(self, instance, network_info):
"""No-op. Everything is done in prepare_instance_filter."""
pass
def unfilter_instance(self, instance, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
if self.instances.pop(instance['id'], None):
# NOTE(vish): use the passed info instead of the stored info
self.network_infos.pop(instance['id'])
self.remove_filters_for_instance(instance)
self.iptables.apply()
else:
LOG.info(_('Attempted to unfilter instance which is not '
'filtered'), instance=instance)
def prepare_instance_filter(self, instance, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
self.instances[instance['id']] = instance
self.network_infos[instance['id']] = network_info
self.add_filters_for_instance(instance)
LOG.debug(_('Filters added to instance'), instance=instance)
self.refresh_provider_fw_rules()
LOG.debug(_('Provider Firewall Rules refreshed'), instance=instance)
self.iptables.apply()
def _create_filter(self, ips, chain_name):
return ['-d %s -j $%s' % (ip, chain_name) for ip in ips]
def _filters_for_instance(self, chain_name, network_info):
"""Creates a rule corresponding to each ip that defines a
jump to the corresponding instance - chain for all the traffic
destined to that ip."""
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
ips_v4 = [ip['ip'] for (_n, mapping) in network_info
for ip in mapping['ips']]
ipv4_rules = self._create_filter(ips_v4, chain_name)
ipv6_rules = []
if FLAGS.use_ipv6:
ips_v6 = [ip['ip'] for (_n, mapping) in network_info
for ip in mapping['ip6s']]
ipv6_rules = self._create_filter(ips_v6, chain_name)
return ipv4_rules, ipv6_rules
def _add_filters(self, chain_name, ipv4_rules, ipv6_rules):
for rule in ipv4_rules:
self.iptables.ipv4['filter'].add_rule(chain_name, rule)
if FLAGS.use_ipv6:
for rule in ipv6_rules:
self.iptables.ipv6['filter'].add_rule(chain_name, rule)
def add_filters_for_instance(self, instance):
network_info = self.network_infos[instance['id']]
chain_name = self._instance_chain_name(instance)
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].add_chain(chain_name)
self.iptables.ipv4['filter'].add_chain(chain_name)
ipv4_rules, ipv6_rules = self._filters_for_instance(chain_name,
network_info)
self._add_filters('local', ipv4_rules, ipv6_rules)
ipv4_rules, ipv6_rules = self.instance_rules(instance, network_info)
self._add_filters(chain_name, ipv4_rules, ipv6_rules)
def remove_filters_for_instance(self, instance):
chain_name = self._instance_chain_name(instance)
self.iptables.ipv4['filter'].remove_chain(chain_name)
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].remove_chain(chain_name)
@staticmethod
def _security_group_chain_name(security_group_id):
return 'nova-sg-%s' % (security_group_id,)
def _instance_chain_name(self, instance):
return 'inst-%s' % (instance['id'],)
def _do_basic_rules(self, ipv4_rules, ipv6_rules, network_info):
# Always drop invalid packets
ipv4_rules += ['-m state --state ' 'INVALID -j DROP']
ipv6_rules += ['-m state --state ' 'INVALID -j DROP']
# Allow established connections
ipv4_rules += ['-m state --state ESTABLISHED,RELATED -j ACCEPT']
ipv6_rules += ['-m state --state ESTABLISHED,RELATED -j ACCEPT']
# Pass through provider-wide drops
ipv4_rules += ['-j $provider']
ipv6_rules += ['-j $provider']
def _do_dhcp_rules(self, ipv4_rules, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
dhcp_servers = [info['dhcp_server'] for (_n, info) in network_info]
for dhcp_server in dhcp_servers:
if dhcp_server:
ipv4_rules.append('-s %s -p udp --sport 67 --dport 68 '
'-j ACCEPT' % (dhcp_server,))
def _do_project_network_rules(self, ipv4_rules, ipv6_rules, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
cidrs = [network['cidr'] for (network, _i) in network_info]
for cidr in cidrs:
ipv4_rules.append('-s %s -j ACCEPT' % (cidr,))
if FLAGS.use_ipv6:
cidrv6s = [network['cidr_v6'] for (network, _i) in
network_info]
for cidrv6 in cidrv6s:
ipv6_rules.append('-s %s -j ACCEPT' % (cidrv6,))
def _do_ra_rules(self, ipv6_rules, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
gateways_v6 = [mapping['gateway_v6'] for (_n, mapping) in
network_info]
for gateway_v6 in gateways_v6:
ipv6_rules.append(
'-s %s/128 -p icmpv6 -j ACCEPT' % (gateway_v6,))
def _build_icmp_rule(self, rule, version):
icmp_type = rule.from_port
icmp_code = rule.to_port
if icmp_type == -1:
icmp_type_arg = None
else:
icmp_type_arg = '%s' % icmp_type
if not icmp_code == -1:
icmp_type_arg += '/%s' % icmp_code
if icmp_type_arg:
if version == 4:
return ['-m', 'icmp', '--icmp-type', icmp_type_arg]
elif version == 6:
return ['-m', 'icmp6', '--icmpv6-type', icmp_type_arg]
# return empty list if icmp_type == -1
return []
def _build_tcp_udp_rule(self, rule, version):
if rule.from_port == rule.to_port:
return ['--dport', '%s' % (rule.from_port,)]
else:
return ['-m', 'multiport',
'--dports', '%s:%s' % (rule.from_port,
rule.to_port)]
def instance_rules(self, instance, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
ctxt = context.get_admin_context()
ipv4_rules = []
ipv6_rules = []
# Initialize with basic rules
self._do_basic_rules(ipv4_rules, ipv6_rules, network_info)
# Set up rules to allow traffic to/from DHCP server
self._do_dhcp_rules(ipv4_rules, network_info)
#Allow project network traffic
if FLAGS.allow_same_net_traffic:
self._do_project_network_rules(ipv4_rules, ipv6_rules,
network_info)
# We wrap these in FLAGS.use_ipv6 because they might cause
# a DB lookup. The other ones are just list operations, so
# they're not worth the clutter.
if FLAGS.use_ipv6:
# Allow RA responses
self._do_ra_rules(ipv6_rules, network_info)
security_groups = db.security_group_get_by_instance(ctxt,
instance['id'])
# then, security group chains and rules
for security_group in security_groups:
rules = db.security_group_rule_get_by_security_group(ctxt,
security_group['id'])
for rule in rules:
LOG.debug(_('Adding security group rule: %r'), rule,
instance=instance)
if not rule.cidr:
version = 4
else:
version = netutils.get_ip_version(rule.cidr)
if version == 4:
fw_rules = ipv4_rules
else:
fw_rules = ipv6_rules
protocol = rule.protocol.lower()
if version == 6 and protocol == 'icmp':
protocol = 'icmpv6'
args = ['-j ACCEPT']
if protocol:
args += ['-p', protocol]
if protocol in ['udp', 'tcp']:
args += self._build_tcp_udp_rule(rule, version)
elif protocol == 'icmp':
args += self._build_icmp_rule(rule, version)
if rule.cidr:
LOG.debug('Using cidr %r', rule.cidr, instance=instance)
args += ['-s', rule.cidr]
fw_rules += [' '.join(args)]
else:
if rule['grantee_group']:
# FIXME(jkoelker) This needs to be ported up into
# the compute manager which already
# has access to a nw_api handle,
# and should be the only one making
# making rpc calls.
import nova.network
nw_api = nova.network.API()
for instance in rule['grantee_group']['instances']:
nw_info = nw_api.get_instance_nw_info(ctxt,
instance)
ips = [ip['address']
for ip in nw_info.fixed_ips()
if ip['version'] == version]
LOG.debug('ips: %r', ips, instance=instance)
for ip in ips:
subrule = args + ['-s %s' % ip]
fw_rules += [' '.join(subrule)]
LOG.debug('Using fw_rules: %r', fw_rules, instance=instance)
ipv4_rules += ['-j $sg-fallback']
ipv6_rules += ['-j $sg-fallback']
return ipv4_rules, ipv6_rules
def instance_filter_exists(self, instance, network_info):
pass
def refresh_security_group_members(self, security_group):
self.do_refresh_security_group_rules(security_group)
self.iptables.apply()
def refresh_security_group_rules(self, security_group):
self.do_refresh_security_group_rules(security_group)
self.iptables.apply()
@utils.synchronized('iptables', external=True)
def do_refresh_security_group_rules(self, security_group):
for instance in self.instances.values():
self.remove_filters_for_instance(instance)
self.add_filters_for_instance(instance)
def refresh_provider_fw_rules(self):
"""See :class:`FirewallDriver` docs."""
self._do_refresh_provider_fw_rules()
self.iptables.apply()
@utils.synchronized('iptables', external=True)
def _do_refresh_provider_fw_rules(self):
"""Internal, synchronized version of refresh_provider_fw_rules."""
self._purge_provider_fw_rules()
self._build_provider_fw_rules()
def _purge_provider_fw_rules(self):
"""Remove all rules from the provider chains."""
self.iptables.ipv4['filter'].empty_chain('provider')
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].empty_chain('provider')
def _build_provider_fw_rules(self):
"""Create all rules for the provider IP DROPs."""
self.iptables.ipv4['filter'].add_chain('provider')
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].add_chain('provider')
ipv4_rules, ipv6_rules = self._provider_rules()
for rule in ipv4_rules:
self.iptables.ipv4['filter'].add_rule('provider', rule)
if FLAGS.use_ipv6:
for rule in ipv6_rules:
self.iptables.ipv6['filter'].add_rule('provider', rule)
@staticmethod
def _provider_rules():
"""Generate a list of rules from provider for IP4 & IP6."""
ctxt = context.get_admin_context()
ipv4_rules = []
ipv6_rules = []
rules = db.provider_fw_rule_get_all(ctxt)
for rule in rules:
LOG.debug(_('Adding provider rule: %s'), rule['cidr'])
version = netutils.get_ip_version(rule['cidr'])
if version == 4:
fw_rules = ipv4_rules
else:
fw_rules = ipv6_rules
protocol = rule['protocol']
if version == 6 and protocol == 'icmp':
protocol = 'icmpv6'
args = ['-p', protocol, '-s', rule['cidr']]
if protocol in ['udp', 'tcp']:
if rule['from_port'] == rule['to_port']:
args += ['--dport', '%s' % (rule['from_port'],)]
else:
args += ['-m', 'multiport',
'--dports', '%s:%s' % (rule['from_port'],
rule['to_port'])]
elif protocol == 'icmp':
icmp_type = rule['from_port']
icmp_code = rule['to_port']
if icmp_type == -1:
icmp_type_arg = None
else:
icmp_type_arg = '%s' % icmp_type
if not icmp_code == -1:
icmp_type_arg += '/%s' % icmp_code
if icmp_type_arg:
if version == 4:
args += ['-m', 'icmp', '--icmp-type',
icmp_type_arg]
elif version == 6:
args += ['-m', 'icmp6', '--icmpv6-type',
icmp_type_arg]
args += ['-j DROP']
fw_rules += [' '.join(args)]
return ipv4_rules, ipv6_rules
class NoopFirewallDriver(object):
"""Firewall driver which just provides No-op methods."""
def __init__(*args, **kwargs):
pass
def _noop(*args, **kwargs):
pass
def __getattr__(self, key):
return self._noop
def instance_filter_exists(self, instance, network_info):
return True
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3660_2 |
crossvul-python_data_good_2816_0 | # -*- coding: utf-8 -*-
'''
The crypt module manages all of the cryptography functions for minions and
masters, encrypting and decrypting payloads, preparing messages, and
authenticating peers
'''
# Import python libs
from __future__ import absolute_import, print_function
import os
import sys
import copy
import time
import hmac
import base64
import hashlib
import logging
import stat
import traceback
import binascii
import weakref
import getpass
# Import third party libs
import salt.ext.six as six
from salt.ext.six.moves import zip # pylint: disable=import-error,redefined-builtin
try:
from Cryptodome.Cipher import AES, PKCS1_OAEP
from Cryptodome.Hash import SHA
from Cryptodome.PublicKey import RSA
from Cryptodome.Signature import PKCS1_v1_5
import Cryptodome.Random # pylint: disable=W0611
CDOME = True
except ImportError:
CDOME = False
if not CDOME:
try:
from Crypto.Cipher import AES, PKCS1_OAEP
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
# let this be imported, if possible
import Crypto.Random # pylint: disable=W0611
except ImportError:
# No need for crypt in local mode
pass
# Import salt libs
import salt.defaults.exitcodes
import salt.utils
import salt.utils.decorators
import salt.payload
import salt.transport.client
import salt.transport.frame
import salt.utils.rsax931
import salt.utils.verify
import salt.version
from salt.exceptions import (
AuthenticationError, SaltClientError, SaltReqTimeoutError
)
import tornado.gen
log = logging.getLogger(__name__)
def dropfile(cachedir, user=None):
'''
Set an AES dropfile to request the master update the publish session key
'''
dfn = os.path.join(cachedir, '.dfn')
# set a mask (to avoid a race condition on file creation) and store original.
mask = os.umask(191)
try:
log.info('Rotating AES key')
if os.path.isfile(dfn):
log.info('AES key rotation already requested')
return
if os.path.isfile(dfn) and not os.access(dfn, os.W_OK):
os.chmod(dfn, stat.S_IRUSR | stat.S_IWUSR)
with salt.utils.fopen(dfn, 'wb+') as fp_:
fp_.write(b'')
os.chmod(dfn, stat.S_IRUSR)
if user:
try:
import pwd
uid = pwd.getpwnam(user).pw_uid
os.chown(dfn, uid, -1)
except (KeyError, ImportError, OSError, IOError):
pass
finally:
os.umask(mask) # restore original umask
def gen_keys(keydir, keyname, keysize, user=None):
'''
Generate a RSA public keypair for use with salt
:param str keydir: The directory to write the keypair to
:param str keyname: The type of salt server for whom this key should be written. (i.e. 'master' or 'minion')
:param int keysize: The number of bits in the key
:param str user: The user on the system who should own this keypair
:rtype: str
:return: Path on the filesystem to the RSA private key
'''
base = os.path.join(keydir, keyname)
priv = '{0}.pem'.format(base)
pub = '{0}.pub'.format(base)
salt.utils.reinit_crypto()
gen = RSA.generate(bits=keysize, e=65537)
if os.path.isfile(priv):
# Between first checking and the generation another process has made
# a key! Use the winner's key
return priv
# Do not try writing anything, if directory has no permissions.
if not os.access(keydir, os.W_OK):
raise IOError('Write access denied to "{0}" for user "{1}".'.format(os.path.abspath(keydir), getpass.getuser()))
cumask = os.umask(191)
with salt.utils.fopen(priv, 'wb+') as f:
f.write(gen.exportKey('PEM'))
os.umask(cumask)
with salt.utils.fopen(pub, 'wb+') as f:
f.write(gen.publickey().exportKey('PEM'))
os.chmod(priv, 256)
if user:
try:
import pwd
uid = pwd.getpwnam(user).pw_uid
os.chown(priv, uid, -1)
os.chown(pub, uid, -1)
except (KeyError, ImportError, OSError):
# The specified user was not found, allow the backup systems to
# report the error
pass
return priv
@salt.utils.decorators.memoize
def _get_key_with_evict(path, timestamp):
'''
Load a key from disk. `timestamp` above is intended to be the timestamp
of the file's last modification. This fn is memoized so if it is called with the
same path and timestamp (the file's last modified time) the second time
the result is returned from the memoiziation. If the file gets modified
then the params are different and the key is loaded from disk.
'''
log.debug('salt.crypt._get_key_with_evict: Loading private key')
with salt.utils.fopen(path) as f:
key = RSA.importKey(f.read())
return key
def _get_rsa_key(path):
'''
Read a key off the disk. Poor man's simple cache in effect here,
we memoize the result of calling _get_rsa_with_evict. This means
the first time _get_key_with_evict is called with a path and a timestamp
the result is cached. If the file (the private key) does not change
then its timestamp will not change and the next time the result is returned
from the cache. If the key DOES change the next time _get_rsa_with_evict
is called it is called with different parameters and the fn is run fully to
retrieve the key from disk.
'''
log.debug('salt.crypt._get_rsa_key: Loading private key')
return _get_key_with_evict(path, str(os.path.getmtime(path)))
def sign_message(privkey_path, message):
'''
Use Crypto.Signature.PKCS1_v1_5 to sign a message. Returns the signature.
'''
key = _get_rsa_key(privkey_path)
log.debug('salt.crypt.sign_message: Signing message.')
signer = PKCS1_v1_5.new(key)
return signer.sign(SHA.new(message))
def verify_signature(pubkey_path, message, signature):
'''
Use Crypto.Signature.PKCS1_v1_5 to verify the signature on a message.
Returns True for valid signature.
'''
log.debug('salt.crypt.verify_signature: Loading public key')
with salt.utils.fopen(pubkey_path) as f:
pubkey = RSA.importKey(f.read())
log.debug('salt.crypt.verify_signature: Verifying signature')
verifier = PKCS1_v1_5.new(pubkey)
return verifier.verify(SHA.new(message), signature)
def gen_signature(priv_path, pub_path, sign_path):
'''
creates a signature for the given public-key with
the given private key and writes it to sign_path
'''
with salt.utils.fopen(pub_path) as fp_:
mpub_64 = fp_.read()
mpub_sig = sign_message(priv_path, mpub_64)
mpub_sig_64 = binascii.b2a_base64(mpub_sig)
if os.path.isfile(sign_path):
return False
log.trace('Calculating signature for {0} with {1}'
.format(os.path.basename(pub_path),
os.path.basename(priv_path)))
if os.path.isfile(sign_path):
log.trace('Signature file {0} already exists, please '
'remove it first and try again'.format(sign_path))
else:
with salt.utils.fopen(sign_path, 'wb+') as sig_f:
sig_f.write(salt.utils.to_bytes(mpub_sig_64))
log.trace('Wrote signature to {0}'.format(sign_path))
return True
def private_encrypt(key, message):
'''
Generate an M2Crypto-compatible signature
:param Crypto.PublicKey.RSA._RSAobj key: The RSA key object
:param str message: The message to sign
:rtype: str
:return: The signature, or an empty string if the signature operation failed
'''
signer = salt.utils.rsax931.RSAX931Signer(key.exportKey('PEM'))
return signer.sign(message)
def public_decrypt(pub, message):
'''
Verify an M2Crypto-compatible signature
:param Crypto.PublicKey.RSA._RSAobj key: The RSA public key object
:param str message: The signed message to verify
:rtype: str
:return: The message (or digest) recovered from the signature, or an
empty string if the verification failed
'''
verifier = salt.utils.rsax931.RSAX931Verifier(pub.exportKey('PEM'))
return verifier.verify(message)
class MasterKeys(dict):
'''
The Master Keys class is used to manage the RSA public key pair used for
authentication by the master.
It also generates a signing key-pair if enabled with master_sign_key_name.
'''
def __init__(self, opts):
super(MasterKeys, self).__init__()
self.opts = opts
self.pub_path = os.path.join(self.opts['pki_dir'], 'master.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'master.pem')
self.key = self.__get_keys()
self.pub_signature = None
# set names for the signing key-pairs
if opts['master_sign_pubkey']:
# if only the signature is available, use that
if opts['master_use_pubkey_signature']:
self.sig_path = os.path.join(self.opts['pki_dir'],
opts['master_pubkey_signature'])
if os.path.isfile(self.sig_path):
with salt.utils.fopen(self.sig_path) as fp_:
self.pub_signature = fp_.read()
log.info('Read {0}\'s signature from {1}'
''.format(os.path.basename(self.pub_path),
self.opts['master_pubkey_signature']))
else:
log.error('Signing the master.pub key with a signature is enabled '
'but no signature file found at the defined location '
'{0}'.format(self.sig_path))
log.error('The signature-file may be either named differently '
'or has to be created with \'salt-key --gen-signature\'')
sys.exit(1)
# create a new signing key-pair to sign the masters
# auth-replies when a minion tries to connect
else:
self.pub_sign_path = os.path.join(self.opts['pki_dir'],
opts['master_sign_key_name'] + '.pub')
self.rsa_sign_path = os.path.join(self.opts['pki_dir'],
opts['master_sign_key_name'] + '.pem')
self.sign_key = self.__get_keys(name=opts['master_sign_key_name'])
# We need __setstate__ and __getstate__ to avoid pickling errors since
# some of the member variables correspond to Cython objects which are
# not picklable.
# These methods are only used when pickling so will not be used on
# non-Windows platforms.
def __setstate__(self, state):
self.__init__(state['opts'])
def __getstate__(self):
return {'opts': self.opts}
def __get_keys(self, name='master'):
'''
Returns a key object for a key in the pki-dir
'''
path = os.path.join(self.opts['pki_dir'],
name + '.pem')
if os.path.exists(path):
with salt.utils.fopen(path) as f:
key = RSA.importKey(f.read())
log.debug('Loaded {0} key: {1}'.format(name, path))
else:
log.info('Generating {0} keys: {1}'.format(name, self.opts['pki_dir']))
gen_keys(self.opts['pki_dir'],
name,
self.opts['keysize'],
self.opts.get('user'))
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
return key
def get_pub_str(self, name='master'):
'''
Return the string representation of a public key
in the pki-directory
'''
path = os.path.join(self.opts['pki_dir'],
name + '.pub')
if not os.path.isfile(path):
key = self.__get_keys()
with salt.utils.fopen(path, 'wb+') as wfh:
wfh.write(key.publickey().exportKey('PEM'))
with salt.utils.fopen(path) as rfh:
return rfh.read()
def get_mkey_paths(self):
return self.pub_path, self.rsa_path
def get_sign_paths(self):
return self.pub_sign_path, self.rsa_sign_path
def pubkey_signature(self):
'''
returns the base64 encoded signature from the signature file
or None if the master has its own signing keys
'''
return self.pub_signature
class AsyncAuth(object):
'''
Set up an Async object to maintain authentication with the salt master
'''
# This class is only a singleton per minion/master pair
# mapping of io_loop -> {key -> auth}
instance_map = weakref.WeakKeyDictionary()
# mapping of key -> creds
creds_map = {}
def __new__(cls, opts, io_loop=None):
'''
Only create one instance of AsyncAuth per __key()
'''
# do we have any mapping for this io_loop
io_loop = io_loop or tornado.ioloop.IOLoop.current()
if io_loop not in AsyncAuth.instance_map:
AsyncAuth.instance_map[io_loop] = weakref.WeakValueDictionary()
loop_instance_map = AsyncAuth.instance_map[io_loop]
key = cls.__key(opts)
auth = loop_instance_map.get(key)
if auth is None:
log.debug('Initializing new AsyncAuth for {0}'.format(key))
# we need to make a local variable for this, as we are going to store
# it in a WeakValueDictionary-- which will remove the item if no one
# references it-- this forces a reference while we return to the caller
auth = object.__new__(cls)
auth.__singleton_init__(opts, io_loop=io_loop)
loop_instance_map[key] = auth
else:
log.debug('Re-using AsyncAuth for {0}'.format(key))
return auth
@classmethod
def __key(cls, opts, io_loop=None):
return (opts['pki_dir'], # where the keys are stored
opts['id'], # minion ID
opts['master_uri'], # master ID
)
# has to remain empty for singletons, since __init__ will *always* be called
def __init__(self, opts, io_loop=None):
pass
# an init for the singleton instance to call
def __singleton_init__(self, opts, io_loop=None):
'''
Init an Auth instance
:param dict opts: Options for this server
:return: Auth instance
:rtype: Auth
'''
self.opts = opts
if six.PY2:
self.token = Crypticle.generate_key_string()
else:
self.token = salt.utils.to_bytes(Crypticle.generate_key_string())
self.serial = salt.payload.Serial(self.opts)
self.pub_path = os.path.join(self.opts['pki_dir'], 'minion.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
if self.opts['__role'] == 'syndic':
self.mpub = 'syndic_master.pub'
else:
self.mpub = 'minion_master.pub'
if not os.path.isfile(self.pub_path):
self.get_keys()
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
salt.utils.reinit_crypto()
key = self.__key(self.opts)
# TODO: if we already have creds for this key, lets just re-use
if key in AsyncAuth.creds_map:
creds = AsyncAuth.creds_map[key]
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
self._authenticate_future = tornado.concurrent.Future()
self._authenticate_future.set_result(True)
else:
self.authenticate()
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls, copy.deepcopy(self.opts, memo), io_loop=None)
memo[id(self)] = result
for key in self.__dict__:
if key in ('io_loop',):
# The io_loop has a thread Lock which will fail to be deep
# copied. Skip it because it will just be recreated on the
# new copy.
continue
setattr(result, key, copy.deepcopy(self.__dict__[key], memo))
return result
@property
def creds(self):
return self._creds
@property
def crypticle(self):
return self._crypticle
@property
def authenticated(self):
return hasattr(self, '_authenticate_future') and \
self._authenticate_future.done() and \
self._authenticate_future.exception() is None
def invalidate(self):
if self.authenticated:
del self._authenticate_future
key = self.__key(self.opts)
if key in AsyncAuth.creds_map:
del AsyncAuth.creds_map[key]
def authenticate(self, callback=None):
'''
Ask for this client to reconnect to the origin
This function will de-dupe all calls here and return a *single* future
for the sign-in-- whis way callers can all assume there aren't others
'''
# if an auth is in flight-- and not done-- just pass that back as the future to wait on
if hasattr(self, '_authenticate_future') and not self._authenticate_future.done():
future = self._authenticate_future
else:
future = tornado.concurrent.Future()
self._authenticate_future = future
self.io_loop.add_callback(self._authenticate)
if callback is not None:
def handle_future(future):
response = future.result()
self.io_loop.add_callback(callback, response)
future.add_done_callback(handle_future)
return future
@tornado.gen.coroutine
def _authenticate(self):
'''
Authenticate with the master, this method breaks the functional
paradigm, it will update the master information from a fresh sign
in, signing in can occur as often as needed to keep up with the
revolving master AES key.
:rtype: Crypticle
:returns: A crypticle used for encryption operations
'''
acceptance_wait_time = self.opts['acceptance_wait_time']
acceptance_wait_time_max = self.opts['acceptance_wait_time_max']
if not acceptance_wait_time_max:
acceptance_wait_time_max = acceptance_wait_time
creds = None
channel = salt.transport.client.AsyncReqChannel.factory(self.opts,
crypt='clear',
io_loop=self.io_loop)
error = None
while True:
try:
creds = yield self.sign_in(channel=channel)
except SaltClientError as exc:
error = exc
break
if creds == 'retry':
if self.opts.get('detect_mode') is True:
error = SaltClientError('Detect mode is on')
break
if self.opts.get('caller'):
print('Minion failed to authenticate with the master, '
'has the minion key been accepted?')
sys.exit(2)
if acceptance_wait_time:
log.info('Waiting {0} seconds before retry.'.format(acceptance_wait_time))
yield tornado.gen.sleep(acceptance_wait_time)
if acceptance_wait_time < acceptance_wait_time_max:
acceptance_wait_time += acceptance_wait_time
log.debug('Authentication wait time is {0}'.format(acceptance_wait_time))
continue
break
if not isinstance(creds, dict) or 'aes' not in creds:
if self.opts.get('detect_mode') is True:
error = SaltClientError('-|RETRY|-')
try:
del AsyncAuth.creds_map[self.__key(self.opts)]
except KeyError:
pass
if not error:
error = SaltClientError('Attempt to authenticate with the salt master failed')
self._authenticate_future.set_exception(error)
else:
key = self.__key(self.opts)
AsyncAuth.creds_map[key] = creds
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
self._authenticate_future.set_result(True) # mark the sign-in as complete
# Notify the bus about creds change
event = salt.utils.event.get_event(self.opts.get('__role'), opts=self.opts, listen=False)
event.fire_event({'key': key, 'creds': creds}, salt.utils.event.tagify(prefix='auth', suffix='creds'))
@tornado.gen.coroutine
def sign_in(self, timeout=60, safe=True, tries=1, channel=None):
'''
Send a sign in request to the master, sets the key information and
returns a dict containing the master publish interface to bind to
and the decrypted aes key for transport decryption.
:param int timeout: Number of seconds to wait before timing out the sign-in request
:param bool safe: If True, do not raise an exception on timeout. Retry instead.
:param int tries: The number of times to try to authenticate before giving up.
:raises SaltReqTimeoutError: If the sign-in request has timed out and :param safe: is not set
:return: Return a string on failure indicating the reason for failure. On success, return a dictionary
with the publication port and the shared AES key.
'''
auth = {}
auth_timeout = self.opts.get('auth_timeout', None)
if auth_timeout is not None:
timeout = auth_timeout
auth_safemode = self.opts.get('auth_safemode', None)
if auth_safemode is not None:
safe = auth_safemode
auth_tries = self.opts.get('auth_tries', None)
if auth_tries is not None:
tries = auth_tries
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
auth['master_uri'] = self.opts['master_uri']
if not channel:
channel = salt.transport.client.AsyncReqChannel.factory(self.opts,
crypt='clear',
io_loop=self.io_loop)
sign_in_payload = self.minion_sign_in_payload()
try:
payload = yield channel.send(
sign_in_payload,
tries=tries,
timeout=timeout
)
except SaltReqTimeoutError as e:
if safe:
log.warning('SaltReqTimeoutError: {0}'.format(e))
raise tornado.gen.Return('retry')
if self.opts.get('detect_mode') is True:
raise tornado.gen.Return('retry')
else:
raise SaltClientError('Attempt to authenticate with the salt master failed with timeout error')
if not isinstance(payload, dict):
log.error('Sign-in attempt failed: %s', payload)
raise tornado.gen.Return(False)
if 'load' in payload:
if 'ret' in payload['load']:
if not payload['load']['ret']:
if self.opts['rejected_retry']:
log.error(
'The Salt Master has rejected this minion\'s public '
'key.\nTo repair this issue, delete the public key '
'for this minion on the Salt Master.\nThe Salt '
'Minion will attempt to to re-authenicate.'
)
raise tornado.gen.Return('retry')
else:
log.critical(
'The Salt Master has rejected this minion\'s public '
'key!\nTo repair this issue, delete the public key '
'for this minion on the Salt Master and restart this '
'minion.\nOr restart the Salt Master in open mode to '
'clean out the keys. The Salt Minion will now exit.'
)
sys.exit(salt.defaults.exitcodes.EX_OK)
# has the master returned that its maxed out with minions?
elif payload['load']['ret'] == 'full':
raise tornado.gen.Return('full')
else:
log.error(
'The Salt Master has cached the public key for this '
'node, this salt minion will wait for {0} seconds '
'before attempting to re-authenticate'.format(
self.opts['acceptance_wait_time']
)
)
raise tornado.gen.Return('retry')
auth['aes'] = self.verify_master(payload, master_pub='token' in sign_in_payload)
if not auth['aes']:
log.critical(
'The Salt Master server\'s public key did not authenticate!\n'
'The master may need to be updated if it is a version of Salt '
'lower than {0}, or\n'
'If you are confident that you are connecting to a valid Salt '
'Master, then remove the master public key and restart the '
'Salt Minion.\nThe master public key can be found '
'at:\n{1}'.format(salt.version.__version__, m_pub_fn)
)
raise SaltClientError('Invalid master key')
if self.opts.get('syndic_master', False): # Is syndic
syndic_finger = self.opts.get('syndic_finger', self.opts.get('master_finger', False))
if syndic_finger:
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != syndic_finger:
self._finger_fail(syndic_finger, m_pub_fn)
else:
if self.opts.get('master_finger', False):
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != self.opts['master_finger']:
self._finger_fail(self.opts['master_finger'], m_pub_fn)
auth['publish_port'] = payload['publish_port']
raise tornado.gen.Return(auth)
def get_keys(self):
'''
Return keypair object for the minion.
:rtype: Crypto.PublicKey.RSA._RSAobj
:return: The RSA keypair
'''
# Make sure all key parent directories are accessible
user = self.opts.get('user', 'root')
salt.utils.verify.check_path_traversal(self.opts['pki_dir'], user)
if os.path.exists(self.rsa_path):
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
log.debug('Loaded minion key: {0}'.format(self.rsa_path))
else:
log.info('Generating keys: {0}'.format(self.opts['pki_dir']))
gen_keys(self.opts['pki_dir'],
'minion',
self.opts['keysize'],
self.opts.get('user'))
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
return key
def gen_token(self, clear_tok):
'''
Encrypt a string with the minion private key to verify identity
with the master.
:param str clear_tok: A plaintext token to encrypt
:return: Encrypted token
:rtype: str
'''
return private_encrypt(self.get_keys(), clear_tok)
def minion_sign_in_payload(self):
'''
Generates the payload used to authenticate with the master
server. This payload consists of the passed in id_ and the ssh
public key to encrypt the AES key sent back from the master.
:return: Payload dictionary
:rtype: dict
'''
payload = {}
payload['cmd'] = '_auth'
payload['id'] = self.opts['id']
try:
pubkey_path = os.path.join(self.opts['pki_dir'], self.mpub)
with salt.utils.fopen(pubkey_path) as f:
pub = RSA.importKey(f.read())
cipher = PKCS1_OAEP.new(pub)
payload['token'] = cipher.encrypt(self.token)
except Exception:
pass
with salt.utils.fopen(self.pub_path) as f:
payload['pub'] = f.read()
return payload
def decrypt_aes(self, payload, master_pub=True):
'''
This function is used to decrypt the AES seed phrase returned from
the master server. The seed phrase is decrypted with the SSH RSA
host key.
Pass in the encrypted AES key.
Returns the decrypted AES seed key, a string
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'sig': The message signature
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The public key of the sender.
:rtype: str
:return: The decrypted token that was provided, with padding.
:rtype: str
:return: The decrypted AES seed key
'''
if self.opts.get('auth_trb', False):
log.warning(
'Auth Called: {0}'.format(
''.join(traceback.format_stack())
)
)
else:
log.debug('Decrypting the current master AES key')
key = self.get_keys()
cipher = PKCS1_OAEP.new(key)
key_str = cipher.decrypt(payload['aes'])
if 'sig' in payload:
m_path = os.path.join(self.opts['pki_dir'], self.mpub)
if os.path.exists(m_path):
try:
with salt.utils.fopen(m_path) as f:
mkey = RSA.importKey(f.read())
except Exception:
return '', ''
digest = hashlib.sha256(key_str).hexdigest()
if six.PY3:
digest = salt.utils.to_bytes(digest)
m_digest = public_decrypt(mkey.publickey(), payload['sig'])
if m_digest != digest:
return '', ''
else:
return '', ''
if six.PY3:
key_str = salt.utils.to_str(key_str)
if '_|-' in key_str:
return key_str.split('_|-')
else:
if 'token' in payload:
token = cipher.decrypt(payload['token'])
return key_str, token
elif not master_pub:
return key_str, ''
return '', ''
def verify_pubkey_sig(self, message, sig):
'''
Wraps the verify_signature method so we have
additional checks.
:rtype: bool
:return: Success or failure of public key verification
'''
if self.opts['master_sign_key_name']:
path = os.path.join(self.opts['pki_dir'],
self.opts['master_sign_key_name'] + '.pub')
if os.path.isfile(path):
res = verify_signature(path,
message,
binascii.a2b_base64(sig))
else:
log.error('Verification public key {0} does not exist. You '
'need to copy it from the master to the minions '
'pki directory'.format(os.path.basename(path)))
return False
if res:
log.debug('Successfully verified signature of master '
'public key with verification public key '
'{0}'.format(self.opts['master_sign_key_name'] + '.pub'))
return True
else:
log.debug('Failed to verify signature of public key')
return False
else:
log.error('Failed to verify the signature of the message because '
'the verification key-pairs name is not defined. Please '
'make sure that master_sign_key_name is defined.')
return False
def verify_signing_master(self, payload):
try:
if self.verify_pubkey_sig(payload['pub_key'],
payload['pub_sig']):
log.info('Received signed and verified master pubkey '
'from master {0}'.format(self.opts['master']))
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
uid = salt.utils.get_uid(self.opts.get('user', None))
with salt.utils.fpopen(m_pub_fn, 'wb+', uid=uid) as wfh:
wfh.write(salt.utils.to_bytes(payload['pub_key']))
return True
else:
log.error('Received signed public-key from master {0} '
'but signature verification failed!'.format(self.opts['master']))
return False
except Exception as sign_exc:
log.error('There was an error while verifying the masters public-key signature')
raise Exception(sign_exc)
def check_auth_deps(self, payload):
'''
Checks if both master and minion either sign (master) and
verify (minion). If one side does not, it should fail.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', 'aes')
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
'''
# master and minion sign and verify
if 'pub_sig' in payload and self.opts['verify_master_pubkey_sign']:
return True
# master and minion do NOT sign and do NOT verify
elif 'pub_sig' not in payload and not self.opts['verify_master_pubkey_sign']:
return True
# master signs, but minion does NOT verify
elif 'pub_sig' in payload and not self.opts['verify_master_pubkey_sign']:
log.error('The masters sent its public-key signature, but signature '
'verification is not enabled on the minion. Either enable '
'signature verification on the minion or disable signing '
'the public key on the master!')
return False
# master does NOT sign but minion wants to verify
elif 'pub_sig' not in payload and self.opts['verify_master_pubkey_sign']:
log.error('The master did not send its public-key signature, but '
'signature verification is enabled on the minion. Either '
'disable signature verification on the minion or enable '
'signing the public on the master!')
return False
def extract_aes(self, payload, master_pub=True):
'''
Return the AES key received from the master after the minion has been
successfully authenticated.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
:rtype: str
:return: The shared AES key received from the master.
'''
if master_pub:
try:
aes, token = self.decrypt_aes(payload, master_pub)
if token != self.token:
log.error(
'The master failed to decrypt the random minion token'
)
return ''
except Exception:
log.error(
'The master failed to decrypt the random minion token'
)
return ''
return aes
else:
aes, token = self.decrypt_aes(payload, master_pub)
return aes
def verify_master(self, payload, master_pub=True):
'''
Verify that the master is the same one that was previously accepted.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
:param bool master_pub: Operate as if minion had no master pubkey when it sent auth request, i.e. don't verify
the minion signature
:rtype: str
:return: An empty string on verification failure. On success, the decrypted AES message in the payload.
'''
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
m_pub_exists = os.path.isfile(m_pub_fn)
if m_pub_exists and master_pub and not self.opts['open_mode']:
with salt.utils.fopen(m_pub_fn) as fp_:
local_master_pub = fp_.read()
if payload['pub_key'].replace('\n', '').replace('\r', '') != \
local_master_pub.replace('\n', '').replace('\r', ''):
if not self.check_auth_deps(payload):
return ''
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
else:
# This is not the last master we connected to
log.error('The master key has changed, the salt master could '
'have been subverted, verify salt master\'s public '
'key')
return ''
else:
if not self.check_auth_deps(payload):
return ''
# verify the signature of the pubkey even if it has
# not changed compared with the one we already have
if self.opts['always_verify_signature']:
if self.verify_signing_master(payload):
return self.extract_aes(payload)
else:
log.error('The masters public could not be verified. Is the '
'verification pubkey {0} up to date?'
''.format(self.opts['master_sign_key_name'] + '.pub'))
return ''
else:
return self.extract_aes(payload)
else:
if not self.check_auth_deps(payload):
return ''
# verify the masters pubkey signature if the minion
# has not received any masters pubkey before
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
else:
if not m_pub_exists:
# the minion has not received any masters pubkey yet, write
# the newly received pubkey to minion_master.pub
with salt.utils.fopen(m_pub_fn, 'wb+') as fp_:
fp_.write(salt.utils.to_bytes(payload['pub_key']))
return self.extract_aes(payload, master_pub=False)
def _finger_fail(self, finger, master_key):
log.critical(
'The specified fingerprint in the master configuration '
'file:\n{0}\nDoes not match the authenticating master\'s '
'key:\n{1}\nVerify that the configured fingerprint '
'matches the fingerprint of the correct master and that '
'this minion is not subject to a man-in-the-middle attack.'
.format(
finger,
salt.utils.pem_finger(master_key, sum_type=self.opts['hash_type'])
)
)
sys.exit(42)
# TODO: remove, we should just return a sync wrapper of AsyncAuth
class SAuth(AsyncAuth):
'''
Set up an object to maintain authentication with the salt master
'''
# This class is only a singleton per minion/master pair
instances = weakref.WeakValueDictionary()
def __new__(cls, opts, io_loop=None):
'''
Only create one instance of SAuth per __key()
'''
key = cls.__key(opts)
auth = SAuth.instances.get(key)
if auth is None:
log.debug('Initializing new SAuth for {0}'.format(key))
auth = object.__new__(cls)
auth.__singleton_init__(opts)
SAuth.instances[key] = auth
else:
log.debug('Re-using SAuth for {0}'.format(key))
return auth
@classmethod
def __key(cls, opts, io_loop=None):
return (opts['pki_dir'], # where the keys are stored
opts['id'], # minion ID
opts['master_uri'], # master ID
)
# has to remain empty for singletons, since __init__ will *always* be called
def __init__(self, opts, io_loop=None):
super(SAuth, self).__init__(opts, io_loop=io_loop)
# an init for the singleton instance to call
def __singleton_init__(self, opts, io_loop=None):
'''
Init an Auth instance
:param dict opts: Options for this server
:return: Auth instance
:rtype: Auth
'''
self.opts = opts
if six.PY2:
self.token = Crypticle.generate_key_string()
else:
self.token = salt.utils.to_bytes(Crypticle.generate_key_string())
self.serial = salt.payload.Serial(self.opts)
self.pub_path = os.path.join(self.opts['pki_dir'], 'minion.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
if 'syndic_master' in self.opts:
self.mpub = 'syndic_master.pub'
elif 'alert_master' in self.opts:
self.mpub = 'monitor_master.pub'
else:
self.mpub = 'minion_master.pub'
if not os.path.isfile(self.pub_path):
self.get_keys()
@property
def creds(self):
if not hasattr(self, '_creds'):
self.authenticate()
return self._creds
@property
def crypticle(self):
if not hasattr(self, '_crypticle'):
self.authenticate()
return self._crypticle
def authenticate(self, _=None): # TODO: remove unused var
'''
Authenticate with the master, this method breaks the functional
paradigm, it will update the master information from a fresh sign
in, signing in can occur as often as needed to keep up with the
revolving master AES key.
:rtype: Crypticle
:returns: A crypticle used for encryption operations
'''
acceptance_wait_time = self.opts['acceptance_wait_time']
acceptance_wait_time_max = self.opts['acceptance_wait_time_max']
channel = salt.transport.client.ReqChannel.factory(self.opts, crypt='clear')
if not acceptance_wait_time_max:
acceptance_wait_time_max = acceptance_wait_time
while True:
creds = self.sign_in(channel=channel)
if creds == 'retry':
if self.opts.get('caller'):
print('Minion failed to authenticate with the master, '
'has the minion key been accepted?')
sys.exit(2)
if acceptance_wait_time:
log.info('Waiting {0} seconds before retry.'.format(acceptance_wait_time))
time.sleep(acceptance_wait_time)
if acceptance_wait_time < acceptance_wait_time_max:
acceptance_wait_time += acceptance_wait_time
log.debug('Authentication wait time is {0}'.format(acceptance_wait_time))
continue
break
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
def sign_in(self, timeout=60, safe=True, tries=1, channel=None):
'''
Send a sign in request to the master, sets the key information and
returns a dict containing the master publish interface to bind to
and the decrypted aes key for transport decryption.
:param int timeout: Number of seconds to wait before timing out the sign-in request
:param bool safe: If True, do not raise an exception on timeout. Retry instead.
:param int tries: The number of times to try to authenticate before giving up.
:raises SaltReqTimeoutError: If the sign-in request has timed out and :param safe: is not set
:return: Return a string on failure indicating the reason for failure. On success, return a dictionary
with the publication port and the shared AES key.
'''
auth = {}
auth_timeout = self.opts.get('auth_timeout', None)
if auth_timeout is not None:
timeout = auth_timeout
auth_safemode = self.opts.get('auth_safemode', None)
if auth_safemode is not None:
safe = auth_safemode
auth_tries = self.opts.get('auth_tries', None)
if auth_tries is not None:
tries = auth_tries
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
auth['master_uri'] = self.opts['master_uri']
if not channel:
channel = salt.transport.client.ReqChannel.factory(self.opts, crypt='clear')
sign_in_payload = self.minion_sign_in_payload()
try:
payload = channel.send(
sign_in_payload,
tries=tries,
timeout=timeout
)
except SaltReqTimeoutError as e:
if safe:
log.warning('SaltReqTimeoutError: {0}'.format(e))
return 'retry'
raise SaltClientError('Attempt to authenticate with the salt master failed with timeout error')
if 'load' in payload:
if 'ret' in payload['load']:
if not payload['load']['ret']:
if self.opts['rejected_retry']:
log.error(
'The Salt Master has rejected this minion\'s public '
'key.\nTo repair this issue, delete the public key '
'for this minion on the Salt Master.\nThe Salt '
'Minion will attempt to to re-authenicate.'
)
return 'retry'
else:
log.critical(
'The Salt Master has rejected this minion\'s public '
'key!\nTo repair this issue, delete the public key '
'for this minion on the Salt Master and restart this '
'minion.\nOr restart the Salt Master in open mode to '
'clean out the keys. The Salt Minion will now exit.'
)
sys.exit(salt.defaults.exitcodes.EX_OK)
# has the master returned that its maxed out with minions?
elif payload['load']['ret'] == 'full':
return 'full'
else:
log.error(
'The Salt Master has cached the public key for this '
'node. If this is the first time connecting to this master '
'then this key may need to be accepted using \'salt-key -a {0}\' on '
'the salt master. This salt minion will wait for {1} seconds '
'before attempting to re-authenticate.'.format(
self.opts['id'],
self.opts['acceptance_wait_time']
)
)
return 'retry'
auth['aes'] = self.verify_master(payload, master_pub='token' in sign_in_payload)
if not auth['aes']:
log.critical(
'The Salt Master server\'s public key did not authenticate!\n'
'The master may need to be updated if it is a version of Salt '
'lower than {0}, or\n'
'If you are confident that you are connecting to a valid Salt '
'Master, then remove the master public key and restart the '
'Salt Minion.\nThe master public key can be found '
'at:\n{1}'.format(salt.version.__version__, m_pub_fn)
)
sys.exit(42)
if self.opts.get('syndic_master', False): # Is syndic
syndic_finger = self.opts.get('syndic_finger', self.opts.get('master_finger', False))
if syndic_finger:
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != syndic_finger:
self._finger_fail(syndic_finger, m_pub_fn)
else:
if self.opts.get('master_finger', False):
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != self.opts['master_finger']:
self._finger_fail(self.opts['master_finger'], m_pub_fn)
auth['publish_port'] = payload['publish_port']
return auth
class Crypticle(object):
'''
Authenticated encryption class
Encryption algorithm: AES-CBC
Signing algorithm: HMAC-SHA256
'''
PICKLE_PAD = b'pickle::'
AES_BLOCK_SIZE = 16
SIG_SIZE = hashlib.sha256().digest_size
def __init__(self, opts, key_string, key_size=192):
self.key_string = key_string
self.keys = self.extract_keys(self.key_string, key_size)
self.key_size = key_size
self.serial = salt.payload.Serial(opts)
@classmethod
def generate_key_string(cls, key_size=192):
key = os.urandom(key_size // 8 + cls.SIG_SIZE)
b64key = base64.b64encode(key)
if six.PY3:
b64key = b64key.decode('utf-8')
return b64key.replace('\n', '')
@classmethod
def extract_keys(cls, key_string, key_size):
if six.PY2:
key = key_string.decode('base64')
else:
key = salt.utils.to_bytes(base64.b64decode(key_string))
assert len(key) == key_size / 8 + cls.SIG_SIZE, 'invalid key'
return key[:-cls.SIG_SIZE], key[-cls.SIG_SIZE:]
def encrypt(self, data):
'''
encrypt data with AES-CBC and sign it with HMAC-SHA256
'''
aes_key, hmac_key = self.keys
pad = self.AES_BLOCK_SIZE - len(data) % self.AES_BLOCK_SIZE
if six.PY2:
data = data + pad * chr(pad)
else:
data = data + salt.utils.to_bytes(pad * chr(pad))
iv_bytes = os.urandom(self.AES_BLOCK_SIZE)
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
data = iv_bytes + cypher.encrypt(data)
sig = hmac.new(hmac_key, data, hashlib.sha256).digest()
return data + sig
def decrypt(self, data):
'''
verify HMAC-SHA256 signature and decrypt data with AES-CBC
'''
aes_key, hmac_key = self.keys
sig = data[-self.SIG_SIZE:]
data = data[:-self.SIG_SIZE]
if six.PY3 and not isinstance(data, bytes):
data = salt.utils.to_bytes(data)
mac_bytes = hmac.new(hmac_key, data, hashlib.sha256).digest()
if len(mac_bytes) != len(sig):
log.debug('Failed to authenticate message')
raise AuthenticationError('message authentication failed')
result = 0
if six.PY2:
for zipped_x, zipped_y in zip(mac_bytes, sig):
result |= ord(zipped_x) ^ ord(zipped_y)
else:
for zipped_x, zipped_y in zip(mac_bytes, sig):
result |= zipped_x ^ zipped_y
if result != 0:
log.debug('Failed to authenticate message')
raise AuthenticationError('message authentication failed')
iv_bytes = data[:self.AES_BLOCK_SIZE]
data = data[self.AES_BLOCK_SIZE:]
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
data = cypher.decrypt(data)
if six.PY2:
return data[:-ord(data[-1])]
else:
return data[:-data[-1]]
def dumps(self, obj):
'''
Serialize and encrypt a python object
'''
return self.encrypt(self.PICKLE_PAD + self.serial.dumps(obj))
def loads(self, data, raw=False):
'''
Decrypt and un-serialize a python object
'''
data = self.decrypt(data)
# simple integrity check to verify that we got meaningful data
if not data.startswith(self.PICKLE_PAD):
return {}
load = self.serial.loads(data[len(self.PICKLE_PAD):], raw=raw)
return load
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_2816_0 |
crossvul-python_data_bad_3500_4 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3500_4 |
crossvul-python_data_bad_2156_1 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_2156_1 |
crossvul-python_data_good_100_1 | # -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import six
from synapse.api.constants import MAX_DEPTH
from synapse.api.errors import SynapseError, Codes
from synapse.crypto.event_signing import check_event_content_hash
from synapse.events import FrozenEvent
from synapse.events.utils import prune_event
from synapse.http.servlet import assert_params_in_request
from synapse.util import unwrapFirstError, logcontext
from twisted.internet import defer
logger = logging.getLogger(__name__)
class FederationBase(object):
def __init__(self, hs):
self.hs = hs
self.server_name = hs.hostname
self.keyring = hs.get_keyring()
self.spam_checker = hs.get_spam_checker()
self.store = hs.get_datastore()
self._clock = hs.get_clock()
@defer.inlineCallbacks
def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False,
include_none=False):
"""Takes a list of PDUs and checks the signatures and hashs of each
one. If a PDU fails its signature check then we check if we have it in
the database and if not then request if from the originating server of
that PDU.
If a PDU fails its content hash check then it is redacted.
The given list of PDUs are not modified, instead the function returns
a new list.
Args:
pdu (list)
outlier (bool)
Returns:
Deferred : A list of PDUs that have valid signatures and hashes.
"""
deferreds = self._check_sigs_and_hashes(pdus)
@defer.inlineCallbacks
def handle_check_result(pdu, deferred):
try:
res = yield logcontext.make_deferred_yieldable(deferred)
except SynapseError:
res = None
if not res:
# Check local db.
res = yield self.store.get_event(
pdu.event_id,
allow_rejected=True,
allow_none=True,
)
if not res and pdu.origin != origin:
try:
res = yield self.get_pdu(
destinations=[pdu.origin],
event_id=pdu.event_id,
outlier=outlier,
timeout=10000,
)
except SynapseError:
pass
if not res:
logger.warn(
"Failed to find copy of %s with valid signature",
pdu.event_id,
)
defer.returnValue(res)
handle = logcontext.preserve_fn(handle_check_result)
deferreds2 = [
handle(pdu, deferred)
for pdu, deferred in zip(pdus, deferreds)
]
valid_pdus = yield logcontext.make_deferred_yieldable(
defer.gatherResults(
deferreds2,
consumeErrors=True,
)
).addErrback(unwrapFirstError)
if include_none:
defer.returnValue(valid_pdus)
else:
defer.returnValue([p for p in valid_pdus if p])
def _check_sigs_and_hash(self, pdu):
return logcontext.make_deferred_yieldable(
self._check_sigs_and_hashes([pdu])[0],
)
def _check_sigs_and_hashes(self, pdus):
"""Checks that each of the received events is correctly signed by the
sending server.
Args:
pdus (list[FrozenEvent]): the events to be checked
Returns:
list[Deferred]: for each input event, a deferred which:
* returns the original event if the checks pass
* returns a redacted version of the event (if the signature
matched but the hash did not)
* throws a SynapseError if the signature check failed.
The deferreds run their callbacks in the sentinel logcontext.
"""
redacted_pdus = [
prune_event(pdu)
for pdu in pdus
]
deferreds = self.keyring.verify_json_objects_for_server([
(p.origin, p.get_pdu_json())
for p in redacted_pdus
])
ctx = logcontext.LoggingContext.current_context()
def callback(_, pdu, redacted):
with logcontext.PreserveLoggingContext(ctx):
if not check_event_content_hash(pdu):
logger.warn(
"Event content has been tampered, redacting %s: %s",
pdu.event_id, pdu.get_pdu_json()
)
return redacted
if self.spam_checker.check_event_for_spam(pdu):
logger.warn(
"Event contains spam, redacting %s: %s",
pdu.event_id, pdu.get_pdu_json()
)
return redacted
return pdu
def errback(failure, pdu):
failure.trap(SynapseError)
with logcontext.PreserveLoggingContext(ctx):
logger.warn(
"Signature check failed for %s",
pdu.event_id,
)
return failure
for deferred, pdu, redacted in zip(deferreds, pdus, redacted_pdus):
deferred.addCallbacks(
callback, errback,
callbackArgs=[pdu, redacted],
errbackArgs=[pdu],
)
return deferreds
def event_from_pdu_json(pdu_json, outlier=False):
"""Construct a FrozenEvent from an event json received over federation
Args:
pdu_json (object): pdu as received over federation
outlier (bool): True to mark this event as an outlier
Returns:
FrozenEvent
Raises:
SynapseError: if the pdu is missing required fields or is otherwise
not a valid matrix event
"""
# we could probably enforce a bunch of other fields here (room_id, sender,
# origin, etc etc)
assert_params_in_request(pdu_json, ('event_id', 'type', 'depth'))
depth = pdu_json['depth']
if not isinstance(depth, six.integer_types):
raise SynapseError(400, "Depth %r not an intger" % (depth, ),
Codes.BAD_JSON)
if depth < 0:
raise SynapseError(400, "Depth too small", Codes.BAD_JSON)
elif depth > MAX_DEPTH:
raise SynapseError(400, "Depth too large", Codes.BAD_JSON)
event = FrozenEvent(
pdu_json
)
event.internal_metadata.outlier = outlier
return event
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_100_1 |
crossvul-python_data_bad_3660_2 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2011 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import context
from nova import db
from nova import flags
from nova import log as logging
from nova.openstack.common import cfg
from nova import utils
from nova.virt import netutils
LOG = logging.getLogger(__name__)
allow_same_net_traffic_opt = cfg.BoolOpt('allow_same_net_traffic',
default=True,
help='Whether to allow network traffic from same network')
FLAGS = flags.FLAGS
FLAGS.register_opt(allow_same_net_traffic_opt)
class FirewallDriver(object):
""" Firewall Driver base class.
Defines methods that any driver providing security groups
and provider fireall functionality should implement.
"""
def prepare_instance_filter(self, instance, network_info):
"""Prepare filters for the instance.
At this point, the instance isn't running yet."""
raise NotImplementedError()
def unfilter_instance(self, instance, network_info):
"""Stop filtering instance"""
raise NotImplementedError()
def apply_instance_filter(self, instance, network_info):
"""Apply instance filter.
Once this method returns, the instance should be firewalled
appropriately. This method should as far as possible be a
no-op. It's vastly preferred to get everything set up in
prepare_instance_filter.
"""
raise NotImplementedError()
def refresh_security_group_rules(self, security_group_id):
"""Refresh security group rules from data store
Gets called when a rule has been added to or removed from
the security group."""
raise NotImplementedError()
def refresh_security_group_members(self, security_group_id):
"""Refresh security group members from data store
Gets called when an instance gets added to or removed from
the security group."""
raise NotImplementedError()
def refresh_provider_fw_rules(self):
"""Refresh common rules for all hosts/instances from data store.
Gets called when a rule has been added to or removed from
the list of rules (via admin api).
"""
raise NotImplementedError()
def setup_basic_filtering(self, instance, network_info):
"""Create rules to block spoofing and allow dhcp.
This gets called when spawning an instance, before
:py:meth:`prepare_instance_filter`.
"""
raise NotImplementedError()
def instance_filter_exists(self, instance, network_info):
"""Check nova-instance-instance-xxx exists"""
raise NotImplementedError()
def _handle_network_info_model(self, network_info):
# make sure this is legacy network_info
try:
return network_info.legacy()
except AttributeError:
# no "legacy" function means network_info is legacy
return network_info
class IptablesFirewallDriver(FirewallDriver):
"""Driver which enforces security groups through iptables rules."""
def __init__(self, **kwargs):
from nova.network import linux_net
self.iptables = linux_net.iptables_manager
self.instances = {}
self.network_infos = {}
self.basicly_filtered = False
self.iptables.ipv4['filter'].add_chain('sg-fallback')
self.iptables.ipv4['filter'].add_rule('sg-fallback', '-j DROP')
self.iptables.ipv6['filter'].add_chain('sg-fallback')
self.iptables.ipv6['filter'].add_rule('sg-fallback', '-j DROP')
def setup_basic_filtering(self, instance, network_info):
pass
def apply_instance_filter(self, instance, network_info):
"""No-op. Everything is done in prepare_instance_filter."""
pass
def unfilter_instance(self, instance, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
if self.instances.pop(instance['id'], None):
# NOTE(vish): use the passed info instead of the stored info
self.network_infos.pop(instance['id'])
self.remove_filters_for_instance(instance)
self.iptables.apply()
else:
LOG.info(_('Attempted to unfilter instance which is not '
'filtered'), instance=instance)
def prepare_instance_filter(self, instance, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
self.instances[instance['id']] = instance
self.network_infos[instance['id']] = network_info
self.add_filters_for_instance(instance)
LOG.debug(_('Filters added to instance'), instance=instance)
self.refresh_provider_fw_rules()
LOG.debug(_('Provider Firewall Rules refreshed'), instance=instance)
self.iptables.apply()
def _create_filter(self, ips, chain_name):
return ['-d %s -j $%s' % (ip, chain_name) for ip in ips]
def _filters_for_instance(self, chain_name, network_info):
"""Creates a rule corresponding to each ip that defines a
jump to the corresponding instance - chain for all the traffic
destined to that ip."""
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
ips_v4 = [ip['ip'] for (_n, mapping) in network_info
for ip in mapping['ips']]
ipv4_rules = self._create_filter(ips_v4, chain_name)
ipv6_rules = []
if FLAGS.use_ipv6:
ips_v6 = [ip['ip'] for (_n, mapping) in network_info
for ip in mapping['ip6s']]
ipv6_rules = self._create_filter(ips_v6, chain_name)
return ipv4_rules, ipv6_rules
def _add_filters(self, chain_name, ipv4_rules, ipv6_rules):
for rule in ipv4_rules:
self.iptables.ipv4['filter'].add_rule(chain_name, rule)
if FLAGS.use_ipv6:
for rule in ipv6_rules:
self.iptables.ipv6['filter'].add_rule(chain_name, rule)
def add_filters_for_instance(self, instance):
network_info = self.network_infos[instance['id']]
chain_name = self._instance_chain_name(instance)
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].add_chain(chain_name)
self.iptables.ipv4['filter'].add_chain(chain_name)
ipv4_rules, ipv6_rules = self._filters_for_instance(chain_name,
network_info)
self._add_filters('local', ipv4_rules, ipv6_rules)
ipv4_rules, ipv6_rules = self.instance_rules(instance, network_info)
self._add_filters(chain_name, ipv4_rules, ipv6_rules)
def remove_filters_for_instance(self, instance):
chain_name = self._instance_chain_name(instance)
self.iptables.ipv4['filter'].remove_chain(chain_name)
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].remove_chain(chain_name)
@staticmethod
def _security_group_chain_name(security_group_id):
return 'nova-sg-%s' % (security_group_id,)
def _instance_chain_name(self, instance):
return 'inst-%s' % (instance['id'],)
def _do_basic_rules(self, ipv4_rules, ipv6_rules, network_info):
# Always drop invalid packets
ipv4_rules += ['-m state --state ' 'INVALID -j DROP']
ipv6_rules += ['-m state --state ' 'INVALID -j DROP']
# Allow established connections
ipv4_rules += ['-m state --state ESTABLISHED,RELATED -j ACCEPT']
ipv6_rules += ['-m state --state ESTABLISHED,RELATED -j ACCEPT']
# Pass through provider-wide drops
ipv4_rules += ['-j $provider']
ipv6_rules += ['-j $provider']
def _do_dhcp_rules(self, ipv4_rules, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
dhcp_servers = [info['dhcp_server'] for (_n, info) in network_info]
for dhcp_server in dhcp_servers:
if dhcp_server:
ipv4_rules.append('-s %s -p udp --sport 67 --dport 68 '
'-j ACCEPT' % (dhcp_server,))
def _do_project_network_rules(self, ipv4_rules, ipv6_rules, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
cidrs = [network['cidr'] for (network, _i) in network_info]
for cidr in cidrs:
ipv4_rules.append('-s %s -j ACCEPT' % (cidr,))
if FLAGS.use_ipv6:
cidrv6s = [network['cidr_v6'] for (network, _i) in
network_info]
for cidrv6 in cidrv6s:
ipv6_rules.append('-s %s -j ACCEPT' % (cidrv6,))
def _do_ra_rules(self, ipv6_rules, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
gateways_v6 = [mapping['gateway_v6'] for (_n, mapping) in
network_info]
for gateway_v6 in gateways_v6:
ipv6_rules.append(
'-s %s/128 -p icmpv6 -j ACCEPT' % (gateway_v6,))
def _build_icmp_rule(self, rule, version):
icmp_type = rule.from_port
icmp_code = rule.to_port
if icmp_type == -1:
icmp_type_arg = None
else:
icmp_type_arg = '%s' % icmp_type
if not icmp_code == -1:
icmp_type_arg += '/%s' % icmp_code
if icmp_type_arg:
if version == 4:
return ['-m', 'icmp', '--icmp-type', icmp_type_arg]
elif version == 6:
return ['-m', 'icmp6', '--icmpv6-type', icmp_type_arg]
# return empty list if icmp_type == -1
return []
def _build_tcp_udp_rule(self, rule, version):
if rule.from_port == rule.to_port:
return ['--dport', '%s' % (rule.from_port,)]
else:
return ['-m', 'multiport',
'--dports', '%s:%s' % (rule.from_port,
rule.to_port)]
def instance_rules(self, instance, network_info):
# make sure this is legacy nw_info
network_info = self._handle_network_info_model(network_info)
ctxt = context.get_admin_context()
ipv4_rules = []
ipv6_rules = []
# Initialize with basic rules
self._do_basic_rules(ipv4_rules, ipv6_rules, network_info)
# Set up rules to allow traffic to/from DHCP server
self._do_dhcp_rules(ipv4_rules, network_info)
#Allow project network traffic
if FLAGS.allow_same_net_traffic:
self._do_project_network_rules(ipv4_rules, ipv6_rules,
network_info)
# We wrap these in FLAGS.use_ipv6 because they might cause
# a DB lookup. The other ones are just list operations, so
# they're not worth the clutter.
if FLAGS.use_ipv6:
# Allow RA responses
self._do_ra_rules(ipv6_rules, network_info)
security_groups = db.security_group_get_by_instance(ctxt,
instance['id'])
# then, security group chains and rules
for security_group in security_groups:
rules = db.security_group_rule_get_by_security_group(ctxt,
security_group['id'])
for rule in rules:
LOG.debug(_('Adding security group rule: %r'), rule,
instance=instance)
if not rule.cidr:
version = 4
else:
version = netutils.get_ip_version(rule.cidr)
if version == 4:
fw_rules = ipv4_rules
else:
fw_rules = ipv6_rules
protocol = rule.protocol
if version == 6 and rule.protocol == 'icmp':
protocol = 'icmpv6'
args = ['-j ACCEPT']
if protocol:
args += ['-p', protocol]
if protocol in ['udp', 'tcp']:
args += self._build_tcp_udp_rule(rule, version)
elif protocol == 'icmp':
args += self._build_icmp_rule(rule, version)
if rule.cidr:
LOG.debug('Using cidr %r', rule.cidr, instance=instance)
args += ['-s', rule.cidr]
fw_rules += [' '.join(args)]
else:
if rule['grantee_group']:
# FIXME(jkoelker) This needs to be ported up into
# the compute manager which already
# has access to a nw_api handle,
# and should be the only one making
# making rpc calls.
import nova.network
nw_api = nova.network.API()
for instance in rule['grantee_group']['instances']:
nw_info = nw_api.get_instance_nw_info(ctxt,
instance)
ips = [ip['address']
for ip in nw_info.fixed_ips()
if ip['version'] == version]
LOG.debug('ips: %r', ips, instance=instance)
for ip in ips:
subrule = args + ['-s %s' % ip]
fw_rules += [' '.join(subrule)]
LOG.debug('Using fw_rules: %r', fw_rules, instance=instance)
ipv4_rules += ['-j $sg-fallback']
ipv6_rules += ['-j $sg-fallback']
return ipv4_rules, ipv6_rules
def instance_filter_exists(self, instance, network_info):
pass
def refresh_security_group_members(self, security_group):
self.do_refresh_security_group_rules(security_group)
self.iptables.apply()
def refresh_security_group_rules(self, security_group):
self.do_refresh_security_group_rules(security_group)
self.iptables.apply()
@utils.synchronized('iptables', external=True)
def do_refresh_security_group_rules(self, security_group):
for instance in self.instances.values():
self.remove_filters_for_instance(instance)
self.add_filters_for_instance(instance)
def refresh_provider_fw_rules(self):
"""See :class:`FirewallDriver` docs."""
self._do_refresh_provider_fw_rules()
self.iptables.apply()
@utils.synchronized('iptables', external=True)
def _do_refresh_provider_fw_rules(self):
"""Internal, synchronized version of refresh_provider_fw_rules."""
self._purge_provider_fw_rules()
self._build_provider_fw_rules()
def _purge_provider_fw_rules(self):
"""Remove all rules from the provider chains."""
self.iptables.ipv4['filter'].empty_chain('provider')
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].empty_chain('provider')
def _build_provider_fw_rules(self):
"""Create all rules for the provider IP DROPs."""
self.iptables.ipv4['filter'].add_chain('provider')
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].add_chain('provider')
ipv4_rules, ipv6_rules = self._provider_rules()
for rule in ipv4_rules:
self.iptables.ipv4['filter'].add_rule('provider', rule)
if FLAGS.use_ipv6:
for rule in ipv6_rules:
self.iptables.ipv6['filter'].add_rule('provider', rule)
@staticmethod
def _provider_rules():
"""Generate a list of rules from provider for IP4 & IP6."""
ctxt = context.get_admin_context()
ipv4_rules = []
ipv6_rules = []
rules = db.provider_fw_rule_get_all(ctxt)
for rule in rules:
LOG.debug(_('Adding provider rule: %s'), rule['cidr'])
version = netutils.get_ip_version(rule['cidr'])
if version == 4:
fw_rules = ipv4_rules
else:
fw_rules = ipv6_rules
protocol = rule['protocol']
if version == 6 and protocol == 'icmp':
protocol = 'icmpv6'
args = ['-p', protocol, '-s', rule['cidr']]
if protocol in ['udp', 'tcp']:
if rule['from_port'] == rule['to_port']:
args += ['--dport', '%s' % (rule['from_port'],)]
else:
args += ['-m', 'multiport',
'--dports', '%s:%s' % (rule['from_port'],
rule['to_port'])]
elif protocol == 'icmp':
icmp_type = rule['from_port']
icmp_code = rule['to_port']
if icmp_type == -1:
icmp_type_arg = None
else:
icmp_type_arg = '%s' % icmp_type
if not icmp_code == -1:
icmp_type_arg += '/%s' % icmp_code
if icmp_type_arg:
if version == 4:
args += ['-m', 'icmp', '--icmp-type',
icmp_type_arg]
elif version == 6:
args += ['-m', 'icmp6', '--icmpv6-type',
icmp_type_arg]
args += ['-j DROP']
fw_rules += [' '.join(args)]
return ipv4_rules, ipv6_rules
class NoopFirewallDriver(object):
"""Firewall driver which just provides No-op methods."""
def __init__(*args, **kwargs):
pass
def _noop(*args, **kwargs):
pass
def __getattr__(self, key):
return self._noop
def instance_filter_exists(self, instance, network_info):
return True
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3660_2 |
crossvul-python_data_bad_1739_1 | """A contents manager that uses the local file system for storage."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import io
import os
import shutil
import mimetypes
from tornado import web
from .filecheckpoints import FileCheckpoints
from .fileio import FileManagerMixin
from .manager import ContentsManager
from IPython import nbformat
from IPython.utils.importstring import import_item
from IPython.utils.traitlets import Any, Unicode, Bool, TraitError
from IPython.utils.py3compat import getcwd, string_types
from IPython.utils import tz
from IPython.html.utils import (
is_hidden,
to_api_path,
)
_script_exporter = None
def _post_save_script(model, os_path, contents_manager, **kwargs):
"""convert notebooks to Python script after save with nbconvert
replaces `ipython notebook --script`
"""
from IPython.nbconvert.exporters.script import ScriptExporter
if model['type'] != 'notebook':
return
global _script_exporter
if _script_exporter is None:
_script_exporter = ScriptExporter(parent=contents_manager)
log = contents_manager.log
base, ext = os.path.splitext(os_path)
py_fname = base + '.py'
script, resources = _script_exporter.from_filename(os_path)
script_fname = base + resources.get('output_extension', '.txt')
log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir))
with io.open(script_fname, 'w', encoding='utf-8') as f:
f.write(script)
class FileContentsManager(FileManagerMixin, ContentsManager):
root_dir = Unicode(config=True)
def _root_dir_default(self):
try:
return self.parent.notebook_dir
except AttributeError:
return getcwd()
save_script = Bool(False, config=True, help='DEPRECATED, use post_save_hook')
def _save_script_changed(self):
self.log.warn("""
`--script` is deprecated. You can trigger nbconvert via pre- or post-save hooks:
ContentsManager.pre_save_hook
FileContentsManager.post_save_hook
A post-save hook has been registered that calls:
ipython nbconvert --to script [notebook]
which behaves similarly to `--script`.
""")
self.post_save_hook = _post_save_script
post_save_hook = Any(None, config=True,
help="""Python callable or importstring thereof
to be called on the path of a file just saved.
This can be used to process the file on disk,
such as converting the notebook to a script or HTML via nbconvert.
It will be called as (all arguments passed by keyword)::
hook(os_path=os_path, model=model, contents_manager=instance)
- path: the filesystem path to the file just written
- model: the model representing the file
- contents_manager: this ContentsManager instance
"""
)
def _post_save_hook_changed(self, name, old, new):
if new and isinstance(new, string_types):
self.post_save_hook = import_item(self.post_save_hook)
elif new:
if not callable(new):
raise TraitError("post_save_hook must be callable")
def run_post_save_hook(self, model, os_path):
"""Run the post-save hook if defined, and log errors"""
if self.post_save_hook:
try:
self.log.debug("Running post-save hook on %s", os_path)
self.post_save_hook(os_path=os_path, model=model, contents_manager=self)
except Exception:
self.log.error("Post-save hook failed on %s", os_path, exc_info=True)
def _root_dir_changed(self, name, old, new):
"""Do a bit of validation of the root_dir."""
if not os.path.isabs(new):
# If we receive a non-absolute path, make it absolute.
self.root_dir = os.path.abspath(new)
return
if not os.path.isdir(new):
raise TraitError("%r is not a directory" % new)
def _checkpoints_class_default(self):
return FileCheckpoints
def is_hidden(self, path):
"""Does the API style path correspond to a hidden directory or file?
Parameters
----------
path : string
The path to check. This is an API path (`/` separated,
relative to root_dir).
Returns
-------
hidden : bool
Whether the path exists and is hidden.
"""
path = path.strip('/')
os_path = self._get_os_path(path=path)
return is_hidden(os_path, self.root_dir)
def file_exists(self, path):
"""Returns True if the file exists, else returns False.
API-style wrapper for os.path.isfile
Parameters
----------
path : string
The relative path to the file (with '/' as separator)
Returns
-------
exists : bool
Whether the file exists.
"""
path = path.strip('/')
os_path = self._get_os_path(path)
return os.path.isfile(os_path)
def dir_exists(self, path):
"""Does the API-style path refer to an extant directory?
API-style wrapper for os.path.isdir
Parameters
----------
path : string
The path to check. This is an API path (`/` separated,
relative to root_dir).
Returns
-------
exists : bool
Whether the path is indeed a directory.
"""
path = path.strip('/')
os_path = self._get_os_path(path=path)
return os.path.isdir(os_path)
def exists(self, path):
"""Returns True if the path exists, else returns False.
API-style wrapper for os.path.exists
Parameters
----------
path : string
The API path to the file (with '/' as separator)
Returns
-------
exists : bool
Whether the target exists.
"""
path = path.strip('/')
os_path = self._get_os_path(path=path)
return os.path.exists(os_path)
def _base_model(self, path):
"""Build the common base of a contents model"""
os_path = self._get_os_path(path)
info = os.stat(os_path)
last_modified = tz.utcfromtimestamp(info.st_mtime)
created = tz.utcfromtimestamp(info.st_ctime)
# Create the base model.
model = {}
model['name'] = path.rsplit('/', 1)[-1]
model['path'] = path
model['last_modified'] = last_modified
model['created'] = created
model['content'] = None
model['format'] = None
model['mimetype'] = None
try:
model['writable'] = os.access(os_path, os.W_OK)
except OSError:
self.log.error("Failed to check write permissions on %s", os_path)
model['writable'] = False
return model
def _dir_model(self, path, content=True):
"""Build a model for a directory
if content is requested, will include a listing of the directory
"""
os_path = self._get_os_path(path)
four_o_four = u'directory does not exist: %r' % path
if not os.path.isdir(os_path):
raise web.HTTPError(404, four_o_four)
elif is_hidden(os_path, self.root_dir):
self.log.info("Refusing to serve hidden directory %r, via 404 Error",
os_path
)
raise web.HTTPError(404, four_o_four)
model = self._base_model(path)
model['type'] = 'directory'
if content:
model['content'] = contents = []
os_dir = self._get_os_path(path)
for name in os.listdir(os_dir):
os_path = os.path.join(os_dir, name)
# skip over broken symlinks in listing
if not os.path.exists(os_path):
self.log.warn("%s doesn't exist", os_path)
continue
elif not os.path.isfile(os_path) and not os.path.isdir(os_path):
self.log.debug("%s not a regular file", os_path)
continue
if self.should_list(name) and not is_hidden(os_path, self.root_dir):
contents.append(self.get(
path='%s/%s' % (path, name),
content=False)
)
model['format'] = 'json'
return model
def _file_model(self, path, content=True, format=None):
"""Build a model for a file
if content is requested, include the file contents.
format:
If 'text', the contents will be decoded as UTF-8.
If 'base64', the raw bytes contents will be encoded as base64.
If not specified, try to decode as UTF-8, and fall back to base64
"""
model = self._base_model(path)
model['type'] = 'file'
os_path = self._get_os_path(path)
if content:
content, format = self._read_file(os_path, format)
default_mime = {
'text': 'text/plain',
'base64': 'application/octet-stream'
}[format]
model.update(
content=content,
format=format,
mimetype=mimetypes.guess_type(os_path)[0] or default_mime,
)
return model
def _notebook_model(self, path, content=True):
"""Build a notebook model
if content is requested, the notebook content will be populated
as a JSON structure (not double-serialized)
"""
model = self._base_model(path)
model['type'] = 'notebook'
if content:
os_path = self._get_os_path(path)
nb = self._read_notebook(os_path, as_version=4)
self.mark_trusted_cells(nb, path)
model['content'] = nb
model['format'] = 'json'
self.validate_notebook_model(model)
return model
def get(self, path, content=True, type=None, format=None):
""" Takes a path for an entity and returns its model
Parameters
----------
path : str
the API path that describes the relative path for the target
content : bool
Whether to include the contents in the reply
type : str, optional
The requested type - 'file', 'notebook', or 'directory'.
Will raise HTTPError 400 if the content doesn't match.
format : str, optional
The requested format for file contents. 'text' or 'base64'.
Ignored if this returns a notebook or directory model.
Returns
-------
model : dict
the contents model. If content=True, returns the contents
of the file or directory as well.
"""
path = path.strip('/')
if not self.exists(path):
raise web.HTTPError(404, u'No such file or directory: %s' % path)
os_path = self._get_os_path(path)
if os.path.isdir(os_path):
if type not in (None, 'directory'):
raise web.HTTPError(400,
u'%s is a directory, not a %s' % (path, type), reason='bad type')
model = self._dir_model(path, content=content)
elif type == 'notebook' or (type is None and path.endswith('.ipynb')):
model = self._notebook_model(path, content=content)
else:
if type == 'directory':
raise web.HTTPError(400,
u'%s is not a directory' % path, reason='bad type')
model = self._file_model(path, content=content, format=format)
return model
def _save_directory(self, os_path, model, path=''):
"""create a directory"""
if is_hidden(os_path, self.root_dir):
raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path)
if not os.path.exists(os_path):
with self.perm_to_403():
os.mkdir(os_path)
elif not os.path.isdir(os_path):
raise web.HTTPError(400, u'Not a directory: %s' % (os_path))
else:
self.log.debug("Directory %r already exists", os_path)
def save(self, model, path=''):
"""Save the file model and return the model with no content."""
path = path.strip('/')
if 'type' not in model:
raise web.HTTPError(400, u'No file type provided')
if 'content' not in model and model['type'] != 'directory':
raise web.HTTPError(400, u'No file content provided')
os_path = self._get_os_path(path)
self.log.debug("Saving %s", os_path)
self.run_pre_save_hook(model=model, path=path)
try:
if model['type'] == 'notebook':
nb = nbformat.from_dict(model['content'])
self.check_and_sign(nb, path)
self._save_notebook(os_path, nb)
# One checkpoint should always exist for notebooks.
if not self.checkpoints.list_checkpoints(path):
self.create_checkpoint(path)
elif model['type'] == 'file':
# Missing format will be handled internally by _save_file.
self._save_file(os_path, model['content'], model.get('format'))
elif model['type'] == 'directory':
self._save_directory(os_path, model, path)
else:
raise web.HTTPError(400, "Unhandled contents type: %s" % model['type'])
except web.HTTPError:
raise
except Exception as e:
self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True)
raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e))
validation_message = None
if model['type'] == 'notebook':
self.validate_notebook_model(model)
validation_message = model.get('message', None)
model = self.get(path, content=False)
if validation_message:
model['message'] = validation_message
self.run_post_save_hook(model=model, os_path=os_path)
return model
def delete_file(self, path):
"""Delete file at path."""
path = path.strip('/')
os_path = self._get_os_path(path)
rm = os.unlink
if os.path.isdir(os_path):
listing = os.listdir(os_path)
# Don't delete non-empty directories.
# A directory containing only leftover checkpoints is
# considered empty.
cp_dir = getattr(self.checkpoints, 'checkpoint_dir', None)
for entry in listing:
if entry != cp_dir:
raise web.HTTPError(400, u'Directory %s not empty' % os_path)
elif not os.path.isfile(os_path):
raise web.HTTPError(404, u'File does not exist: %s' % os_path)
if os.path.isdir(os_path):
self.log.debug("Removing directory %s", os_path)
with self.perm_to_403():
shutil.rmtree(os_path)
else:
self.log.debug("Unlinking file %s", os_path)
with self.perm_to_403():
rm(os_path)
def rename_file(self, old_path, new_path):
"""Rename a file."""
old_path = old_path.strip('/')
new_path = new_path.strip('/')
if new_path == old_path:
return
new_os_path = self._get_os_path(new_path)
old_os_path = self._get_os_path(old_path)
# Should we proceed with the move?
if os.path.exists(new_os_path):
raise web.HTTPError(409, u'File already exists: %s' % new_path)
# Move the file
try:
with self.perm_to_403():
shutil.move(old_os_path, new_os_path)
except web.HTTPError:
raise
except Exception as e:
raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % (old_path, e))
def info_string(self):
return "Serving notebooks from local directory: %s" % self.root_dir
def get_kernel_path(self, path, model=None):
"""Return the initial API path of a kernel associated with a given notebook"""
if '/' in path:
parent_dir = path.rsplit('/', 1)[0]
else:
parent_dir = ''
return parent_dir
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_1739_1 |
crossvul-python_data_bad_3499_0 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3499_0 |
crossvul-python_data_good_1065_0 | """Header value parser implementing various email-related RFC parsing rules.
The parsing methods defined in this module implement various email related
parsing rules. Principal among them is RFC 5322, which is the followon
to RFC 2822 and primarily a clarification of the former. It also implements
RFC 2047 encoded word decoding.
RFC 5322 goes to considerable trouble to maintain backward compatibility with
RFC 822 in the parse phase, while cleaning up the structure on the generation
phase. This parser supports correct RFC 5322 generation by tagging white space
as folding white space only when folding is allowed in the non-obsolete rule
sets. Actually, the parser is even more generous when accepting input than RFC
5322 mandates, following the spirit of Postel's Law, which RFC 5322 encourages.
Where possible deviations from the standard are annotated on the 'defects'
attribute of tokens that deviate.
The general structure of the parser follows RFC 5322, and uses its terminology
where there is a direct correspondence. Where the implementation requires a
somewhat different structure than that used by the formal grammar, new terms
that mimic the closest existing terms are used. Thus, it really helps to have
a copy of RFC 5322 handy when studying this code.
Input to the parser is a string that has already been unfolded according to
RFC 5322 rules. According to the RFC this unfolding is the very first step, and
this parser leaves the unfolding step to a higher level message parser, which
will have already detected the line breaks that need unfolding while
determining the beginning and end of each header.
The output of the parser is a TokenList object, which is a list subclass. A
TokenList is a recursive data structure. The terminal nodes of the structure
are Terminal objects, which are subclasses of str. These do not correspond
directly to terminal objects in the formal grammar, but are instead more
practical higher level combinations of true terminals.
All TokenList and Terminal objects have a 'value' attribute, which produces the
semantically meaningful value of that part of the parse subtree. The value of
all whitespace tokens (no matter how many sub-tokens they may contain) is a
single space, as per the RFC rules. This includes 'CFWS', which is herein
included in the general class of whitespace tokens. There is one exception to
the rule that whitespace tokens are collapsed into single spaces in values: in
the value of a 'bare-quoted-string' (a quoted-string with no leading or
trailing whitespace), any whitespace that appeared between the quotation marks
is preserved in the returned value. Note that in all Terminal strings quoted
pairs are turned into their unquoted values.
All TokenList and Terminal objects also have a string value, which attempts to
be a "canonical" representation of the RFC-compliant form of the substring that
produced the parsed subtree, including minimal use of quoted pair quoting.
Whitespace runs are not collapsed.
Comment tokens also have a 'content' attribute providing the string found
between the parens (including any nested comments) with whitespace preserved.
All TokenList and Terminal objects have a 'defects' attribute which is a
possibly empty list all of the defects found while creating the token. Defects
may appear on any token in the tree, and a composite list of all defects in the
subtree is available through the 'all_defects' attribute of any node. (For
Terminal notes x.defects == x.all_defects.)
Each object in a parse tree is called a 'token', and each has a 'token_type'
attribute that gives the name from the RFC 5322 grammar that it represents.
Not all RFC 5322 nodes are produced, and there is one non-RFC 5322 node that
may be produced: 'ptext'. A 'ptext' is a string of printable ascii characters.
It is returned in place of lists of (ctext/quoted-pair) and
(qtext/quoted-pair).
XXX: provide complete list of token types.
"""
import re
import sys
import urllib # For urllib.parse.unquote
from string import hexdigits
from operator import itemgetter
from email import _encoded_words as _ew
from email import errors
from email import utils
#
# Useful constants and functions
#
WSP = set(' \t')
CFWS_LEADER = WSP | set('(')
SPECIALS = set(r'()<>@,:;.\"[]')
ATOM_ENDS = SPECIALS | WSP
DOT_ATOM_ENDS = ATOM_ENDS - set('.')
# '.', '"', and '(' do not end phrases in order to support obs-phrase
PHRASE_ENDS = SPECIALS - set('."(')
TSPECIALS = (SPECIALS | set('/?=')) - set('.')
TOKEN_ENDS = TSPECIALS | WSP
ASPECIALS = TSPECIALS | set("*'%")
ATTRIBUTE_ENDS = ASPECIALS | WSP
EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%')
def quote_string(value):
return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"'
# Match a RFC 2047 word, looks like =?utf-8?q?someword?=
rfc2047_matcher = re.compile(r'''
=\? # literal =?
[^?]* # charset
\? # literal ?
[qQbB] # literal 'q' or 'b', case insensitive
\? # literal ?
.*? # encoded word
\?= # literal ?=
''', re.VERBOSE | re.MULTILINE)
#
# TokenList and its subclasses
#
class TokenList(list):
token_type = None
syntactic_break = True
ew_combine_allowed = True
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.defects = []
def __str__(self):
return ''.join(str(x) for x in self)
def __repr__(self):
return '{}({})'.format(self.__class__.__name__,
super().__repr__())
@property
def value(self):
return ''.join(x.value for x in self if x.value)
@property
def all_defects(self):
return sum((x.all_defects for x in self), self.defects)
def startswith_fws(self):
return self[0].startswith_fws()
@property
def as_ew_allowed(self):
"""True if all top level tokens of this part may be RFC2047 encoded."""
return all(part.as_ew_allowed for part in self)
@property
def comments(self):
comments = []
for token in self:
comments.extend(token.comments)
return comments
def fold(self, *, policy):
return _refold_parse_tree(self, policy=policy)
def pprint(self, indent=''):
print(self.ppstr(indent=indent))
def ppstr(self, indent=''):
return '\n'.join(self._pp(indent=indent))
def _pp(self, indent=''):
yield '{}{}/{}('.format(
indent,
self.__class__.__name__,
self.token_type)
for token in self:
if not hasattr(token, '_pp'):
yield (indent + ' !! invalid element in token '
'list: {!r}'.format(token))
else:
yield from token._pp(indent+' ')
if self.defects:
extra = ' Defects: {}'.format(self.defects)
else:
extra = ''
yield '{}){}'.format(indent, extra)
class WhiteSpaceTokenList(TokenList):
@property
def value(self):
return ' '
@property
def comments(self):
return [x.content for x in self if x.token_type=='comment']
class UnstructuredTokenList(TokenList):
token_type = 'unstructured'
class Phrase(TokenList):
token_type = 'phrase'
class Word(TokenList):
token_type = 'word'
class CFWSList(WhiteSpaceTokenList):
token_type = 'cfws'
class Atom(TokenList):
token_type = 'atom'
class Token(TokenList):
token_type = 'token'
encode_as_ew = False
class EncodedWord(TokenList):
token_type = 'encoded-word'
cte = None
charset = None
lang = None
class QuotedString(TokenList):
token_type = 'quoted-string'
@property
def content(self):
for x in self:
if x.token_type == 'bare-quoted-string':
return x.value
@property
def quoted_value(self):
res = []
for x in self:
if x.token_type == 'bare-quoted-string':
res.append(str(x))
else:
res.append(x.value)
return ''.join(res)
@property
def stripped_value(self):
for token in self:
if token.token_type == 'bare-quoted-string':
return token.value
class BareQuotedString(QuotedString):
token_type = 'bare-quoted-string'
def __str__(self):
return quote_string(''.join(str(x) for x in self))
@property
def value(self):
return ''.join(str(x) for x in self)
class Comment(WhiteSpaceTokenList):
token_type = 'comment'
def __str__(self):
return ''.join(sum([
["("],
[self.quote(x) for x in self],
[")"],
], []))
def quote(self, value):
if value.token_type == 'comment':
return str(value)
return str(value).replace('\\', '\\\\').replace(
'(', r'\(').replace(
')', r'\)')
@property
def content(self):
return ''.join(str(x) for x in self)
@property
def comments(self):
return [self.content]
class AddressList(TokenList):
token_type = 'address-list'
@property
def addresses(self):
return [x for x in self if x.token_type=='address']
@property
def mailboxes(self):
return sum((x.mailboxes
for x in self if x.token_type=='address'), [])
@property
def all_mailboxes(self):
return sum((x.all_mailboxes
for x in self if x.token_type=='address'), [])
class Address(TokenList):
token_type = 'address'
@property
def display_name(self):
if self[0].token_type == 'group':
return self[0].display_name
@property
def mailboxes(self):
if self[0].token_type == 'mailbox':
return [self[0]]
elif self[0].token_type == 'invalid-mailbox':
return []
return self[0].mailboxes
@property
def all_mailboxes(self):
if self[0].token_type == 'mailbox':
return [self[0]]
elif self[0].token_type == 'invalid-mailbox':
return [self[0]]
return self[0].all_mailboxes
class MailboxList(TokenList):
token_type = 'mailbox-list'
@property
def mailboxes(self):
return [x for x in self if x.token_type=='mailbox']
@property
def all_mailboxes(self):
return [x for x in self
if x.token_type in ('mailbox', 'invalid-mailbox')]
class GroupList(TokenList):
token_type = 'group-list'
@property
def mailboxes(self):
if not self or self[0].token_type != 'mailbox-list':
return []
return self[0].mailboxes
@property
def all_mailboxes(self):
if not self or self[0].token_type != 'mailbox-list':
return []
return self[0].all_mailboxes
class Group(TokenList):
token_type = "group"
@property
def mailboxes(self):
if self[2].token_type != 'group-list':
return []
return self[2].mailboxes
@property
def all_mailboxes(self):
if self[2].token_type != 'group-list':
return []
return self[2].all_mailboxes
@property
def display_name(self):
return self[0].display_name
class NameAddr(TokenList):
token_type = 'name-addr'
@property
def display_name(self):
if len(self) == 1:
return None
return self[0].display_name
@property
def local_part(self):
return self[-1].local_part
@property
def domain(self):
return self[-1].domain
@property
def route(self):
return self[-1].route
@property
def addr_spec(self):
return self[-1].addr_spec
class AngleAddr(TokenList):
token_type = 'angle-addr'
@property
def local_part(self):
for x in self:
if x.token_type == 'addr-spec':
return x.local_part
@property
def domain(self):
for x in self:
if x.token_type == 'addr-spec':
return x.domain
@property
def route(self):
for x in self:
if x.token_type == 'obs-route':
return x.domains
@property
def addr_spec(self):
for x in self:
if x.token_type == 'addr-spec':
if x.local_part:
return x.addr_spec
else:
return quote_string(x.local_part) + x.addr_spec
else:
return '<>'
class ObsRoute(TokenList):
token_type = 'obs-route'
@property
def domains(self):
return [x.domain for x in self if x.token_type == 'domain']
class Mailbox(TokenList):
token_type = 'mailbox'
@property
def display_name(self):
if self[0].token_type == 'name-addr':
return self[0].display_name
@property
def local_part(self):
return self[0].local_part
@property
def domain(self):
return self[0].domain
@property
def route(self):
if self[0].token_type == 'name-addr':
return self[0].route
@property
def addr_spec(self):
return self[0].addr_spec
class InvalidMailbox(TokenList):
token_type = 'invalid-mailbox'
@property
def display_name(self):
return None
local_part = domain = route = addr_spec = display_name
class Domain(TokenList):
token_type = 'domain'
as_ew_allowed = False
@property
def domain(self):
return ''.join(super().value.split())
class DotAtom(TokenList):
token_type = 'dot-atom'
class DotAtomText(TokenList):
token_type = 'dot-atom-text'
as_ew_allowed = True
class NoFoldLiteral(TokenList):
token_type = 'no-fold-literal'
as_ew_allowed = False
class AddrSpec(TokenList):
token_type = 'addr-spec'
as_ew_allowed = False
@property
def local_part(self):
return self[0].local_part
@property
def domain(self):
if len(self) < 3:
return None
return self[-1].domain
@property
def value(self):
if len(self) < 3:
return self[0].value
return self[0].value.rstrip()+self[1].value+self[2].value.lstrip()
@property
def addr_spec(self):
nameset = set(self.local_part)
if len(nameset) > len(nameset-DOT_ATOM_ENDS):
lp = quote_string(self.local_part)
else:
lp = self.local_part
if self.domain is not None:
return lp + '@' + self.domain
return lp
class ObsLocalPart(TokenList):
token_type = 'obs-local-part'
as_ew_allowed = False
class DisplayName(Phrase):
token_type = 'display-name'
ew_combine_allowed = False
@property
def display_name(self):
res = TokenList(self)
if res[0].token_type == 'cfws':
res.pop(0)
else:
if res[0][0].token_type == 'cfws':
res[0] = TokenList(res[0][1:])
if res[-1].token_type == 'cfws':
res.pop()
else:
if res[-1][-1].token_type == 'cfws':
res[-1] = TokenList(res[-1][:-1])
return res.value
@property
def value(self):
quote = False
if self.defects:
quote = True
else:
for x in self:
if x.token_type == 'quoted-string':
quote = True
if quote:
pre = post = ''
if self[0].token_type=='cfws' or self[0][0].token_type=='cfws':
pre = ' '
if self[-1].token_type=='cfws' or self[-1][-1].token_type=='cfws':
post = ' '
return pre+quote_string(self.display_name)+post
else:
return super().value
class LocalPart(TokenList):
token_type = 'local-part'
as_ew_allowed = False
@property
def value(self):
if self[0].token_type == "quoted-string":
return self[0].quoted_value
else:
return self[0].value
@property
def local_part(self):
# Strip whitespace from front, back, and around dots.
res = [DOT]
last = DOT
last_is_tl = False
for tok in self[0] + [DOT]:
if tok.token_type == 'cfws':
continue
if (last_is_tl and tok.token_type == 'dot' and
last[-1].token_type == 'cfws'):
res[-1] = TokenList(last[:-1])
is_tl = isinstance(tok, TokenList)
if (is_tl and last.token_type == 'dot' and
tok[0].token_type == 'cfws'):
res.append(TokenList(tok[1:]))
else:
res.append(tok)
last = res[-1]
last_is_tl = is_tl
res = TokenList(res[1:-1])
return res.value
class DomainLiteral(TokenList):
token_type = 'domain-literal'
as_ew_allowed = False
@property
def domain(self):
return ''.join(super().value.split())
@property
def ip(self):
for x in self:
if x.token_type == 'ptext':
return x.value
class MIMEVersion(TokenList):
token_type = 'mime-version'
major = None
minor = None
class Parameter(TokenList):
token_type = 'parameter'
sectioned = False
extended = False
charset = 'us-ascii'
@property
def section_number(self):
# Because the first token, the attribute (name) eats CFWS, the second
# token is always the section if there is one.
return self[1].number if self.sectioned else 0
@property
def param_value(self):
# This is part of the "handle quoted extended parameters" hack.
for token in self:
if token.token_type == 'value':
return token.stripped_value
if token.token_type == 'quoted-string':
for token in token:
if token.token_type == 'bare-quoted-string':
for token in token:
if token.token_type == 'value':
return token.stripped_value
return ''
class InvalidParameter(Parameter):
token_type = 'invalid-parameter'
class Attribute(TokenList):
token_type = 'attribute'
@property
def stripped_value(self):
for token in self:
if token.token_type.endswith('attrtext'):
return token.value
class Section(TokenList):
token_type = 'section'
number = None
class Value(TokenList):
token_type = 'value'
@property
def stripped_value(self):
token = self[0]
if token.token_type == 'cfws':
token = self[1]
if token.token_type.endswith(
('quoted-string', 'attribute', 'extended-attribute')):
return token.stripped_value
return self.value
class MimeParameters(TokenList):
token_type = 'mime-parameters'
syntactic_break = False
@property
def params(self):
# The RFC specifically states that the ordering of parameters is not
# guaranteed and may be reordered by the transport layer. So we have
# to assume the RFC 2231 pieces can come in any order. However, we
# output them in the order that we first see a given name, which gives
# us a stable __str__.
params = {} # Using order preserving dict from Python 3.7+
for token in self:
if not token.token_type.endswith('parameter'):
continue
if token[0].token_type != 'attribute':
continue
name = token[0].value.strip()
if name not in params:
params[name] = []
params[name].append((token.section_number, token))
for name, parts in params.items():
parts = sorted(parts, key=itemgetter(0))
first_param = parts[0][1]
charset = first_param.charset
# Our arbitrary error recovery is to ignore duplicate parameters,
# to use appearance order if there are duplicate rfc 2231 parts,
# and to ignore gaps. This mimics the error recovery of get_param.
if not first_param.extended and len(parts) > 1:
if parts[1][0] == 0:
parts[1][1].defects.append(errors.InvalidHeaderDefect(
'duplicate parameter name; duplicate(s) ignored'))
parts = parts[:1]
# Else assume the *0* was missing...note that this is different
# from get_param, but we registered a defect for this earlier.
value_parts = []
i = 0
for section_number, param in parts:
if section_number != i:
# We could get fancier here and look for a complete
# duplicate extended parameter and ignore the second one
# seen. But we're not doing that. The old code didn't.
if not param.extended:
param.defects.append(errors.InvalidHeaderDefect(
'duplicate parameter name; duplicate ignored'))
continue
else:
param.defects.append(errors.InvalidHeaderDefect(
"inconsistent RFC2231 parameter numbering"))
i += 1
value = param.param_value
if param.extended:
try:
value = urllib.parse.unquote_to_bytes(value)
except UnicodeEncodeError:
# source had surrogate escaped bytes. What we do now
# is a bit of an open question. I'm not sure this is
# the best choice, but it is what the old algorithm did
value = urllib.parse.unquote(value, encoding='latin-1')
else:
try:
value = value.decode(charset, 'surrogateescape')
except LookupError:
# XXX: there should really be a custom defect for
# unknown character set to make it easy to find,
# because otherwise unknown charset is a silent
# failure.
value = value.decode('us-ascii', 'surrogateescape')
if utils._has_surrogates(value):
param.defects.append(errors.UndecodableBytesDefect())
value_parts.append(value)
value = ''.join(value_parts)
yield name, value
def __str__(self):
params = []
for name, value in self.params:
if value:
params.append('{}={}'.format(name, quote_string(value)))
else:
params.append(name)
params = '; '.join(params)
return ' ' + params if params else ''
class ParameterizedHeaderValue(TokenList):
# Set this false so that the value doesn't wind up on a new line even
# if it and the parameters would fit there but not on the first line.
syntactic_break = False
@property
def params(self):
for token in reversed(self):
if token.token_type == 'mime-parameters':
return token.params
return {}
class ContentType(ParameterizedHeaderValue):
token_type = 'content-type'
as_ew_allowed = False
maintype = 'text'
subtype = 'plain'
class ContentDisposition(ParameterizedHeaderValue):
token_type = 'content-disposition'
as_ew_allowed = False
content_disposition = None
class ContentTransferEncoding(TokenList):
token_type = 'content-transfer-encoding'
as_ew_allowed = False
cte = '7bit'
class HeaderLabel(TokenList):
token_type = 'header-label'
as_ew_allowed = False
class MsgID(TokenList):
token_type = 'msg-id'
as_ew_allowed = False
def fold(self, policy):
# message-id tokens may not be folded.
return str(self) + policy.linesep
class MessageID(MsgID):
token_type = 'message-id'
class Header(TokenList):
token_type = 'header'
#
# Terminal classes and instances
#
class Terminal(str):
as_ew_allowed = True
ew_combine_allowed = True
syntactic_break = True
def __new__(cls, value, token_type):
self = super().__new__(cls, value)
self.token_type = token_type
self.defects = []
return self
def __repr__(self):
return "{}({})".format(self.__class__.__name__, super().__repr__())
def pprint(self):
print(self.__class__.__name__ + '/' + self.token_type)
@property
def all_defects(self):
return list(self.defects)
def _pp(self, indent=''):
return ["{}{}/{}({}){}".format(
indent,
self.__class__.__name__,
self.token_type,
super().__repr__(),
'' if not self.defects else ' {}'.format(self.defects),
)]
def pop_trailing_ws(self):
# This terminates the recursion.
return None
@property
def comments(self):
return []
def __getnewargs__(self):
return(str(self), self.token_type)
class WhiteSpaceTerminal(Terminal):
@property
def value(self):
return ' '
def startswith_fws(self):
return True
class ValueTerminal(Terminal):
@property
def value(self):
return self
def startswith_fws(self):
return False
class EWWhiteSpaceTerminal(WhiteSpaceTerminal):
@property
def value(self):
return ''
def __str__(self):
return ''
# XXX these need to become classes and used as instances so
# that a program can't change them in a parse tree and screw
# up other parse trees. Maybe should have tests for that, too.
DOT = ValueTerminal('.', 'dot')
ListSeparator = ValueTerminal(',', 'list-separator')
RouteComponentMarker = ValueTerminal('@', 'route-component-marker')
#
# Parser
#
# Parse strings according to RFC822/2047/2822/5322 rules.
#
# This is a stateless parser. Each get_XXX function accepts a string and
# returns either a Terminal or a TokenList representing the RFC object named
# by the method and a string containing the remaining unparsed characters
# from the input. Thus a parser method consumes the next syntactic construct
# of a given type and returns a token representing the construct plus the
# unparsed remainder of the input string.
#
# For example, if the first element of a structured header is a 'phrase',
# then:
#
# phrase, value = get_phrase(value)
#
# returns the complete phrase from the start of the string value, plus any
# characters left in the string after the phrase is removed.
_wsp_splitter = re.compile(r'([{}]+)'.format(''.join(WSP))).split
_non_atom_end_matcher = re.compile(r"[^{}]+".format(
re.escape(''.join(ATOM_ENDS)))).match
_non_printable_finder = re.compile(r"[\x00-\x20\x7F]").findall
_non_token_end_matcher = re.compile(r"[^{}]+".format(
re.escape(''.join(TOKEN_ENDS)))).match
_non_attribute_end_matcher = re.compile(r"[^{}]+".format(
re.escape(''.join(ATTRIBUTE_ENDS)))).match
_non_extended_attribute_end_matcher = re.compile(r"[^{}]+".format(
re.escape(''.join(EXTENDED_ATTRIBUTE_ENDS)))).match
def _validate_xtext(xtext):
"""If input token contains ASCII non-printables, register a defect."""
non_printables = _non_printable_finder(xtext)
if non_printables:
xtext.defects.append(errors.NonPrintableDefect(non_printables))
if utils._has_surrogates(xtext):
xtext.defects.append(errors.UndecodableBytesDefect(
"Non-ASCII characters found in header token"))
def _get_ptext_to_endchars(value, endchars):
"""Scan printables/quoted-pairs until endchars and return unquoted ptext.
This function turns a run of qcontent, ccontent-without-comments, or
dtext-with-quoted-printables into a single string by unquoting any
quoted printables. It returns the string, the remaining value, and
a flag that is True iff there were any quoted printables decoded.
"""
fragment, *remainder = _wsp_splitter(value, 1)
vchars = []
escape = False
had_qp = False
for pos in range(len(fragment)):
if fragment[pos] == '\\':
if escape:
escape = False
had_qp = True
else:
escape = True
continue
if escape:
escape = False
elif fragment[pos] in endchars:
break
vchars.append(fragment[pos])
else:
pos = pos + 1
return ''.join(vchars), ''.join([fragment[pos:]] + remainder), had_qp
def get_fws(value):
"""FWS = 1*WSP
This isn't the RFC definition. We're using fws to represent tokens where
folding can be done, but when we are parsing the *un*folding has already
been done so we don't need to watch out for CRLF.
"""
newvalue = value.lstrip()
fws = WhiteSpaceTerminal(value[:len(value)-len(newvalue)], 'fws')
return fws, newvalue
def get_encoded_word(value):
""" encoded-word = "=?" charset "?" encoding "?" encoded-text "?="
"""
ew = EncodedWord()
if not value.startswith('=?'):
raise errors.HeaderParseError(
"expected encoded word but found {}".format(value))
tok, *remainder = value[2:].split('?=', 1)
if tok == value[2:]:
raise errors.HeaderParseError(
"expected encoded word but found {}".format(value))
remstr = ''.join(remainder)
if len(remstr) > 1 and remstr[0] in hexdigits and remstr[1] in hexdigits:
# The ? after the CTE was followed by an encoded word escape (=XX).
rest, *remainder = remstr.split('?=', 1)
tok = tok + '?=' + rest
if len(tok.split()) > 1:
ew.defects.append(errors.InvalidHeaderDefect(
"whitespace inside encoded word"))
ew.cte = value
value = ''.join(remainder)
try:
text, charset, lang, defects = _ew.decode('=?' + tok + '?=')
except ValueError:
raise errors.HeaderParseError(
"encoded word format invalid: '{}'".format(ew.cte))
ew.charset = charset
ew.lang = lang
ew.defects.extend(defects)
while text:
if text[0] in WSP:
token, text = get_fws(text)
ew.append(token)
continue
chars, *remainder = _wsp_splitter(text, 1)
vtext = ValueTerminal(chars, 'vtext')
_validate_xtext(vtext)
ew.append(vtext)
text = ''.join(remainder)
# Encoded words should be followed by a WS
if value and value[0] not in WSP:
ew.defects.append(errors.InvalidHeaderDefect(
"missing trailing whitespace after encoded-word"))
return ew, value
def get_unstructured(value):
"""unstructured = (*([FWS] vchar) *WSP) / obs-unstruct
obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS)
obs-utext = %d0 / obs-NO-WS-CTL / LF / CR
obs-NO-WS-CTL is control characters except WSP/CR/LF.
So, basically, we have printable runs, plus control characters or nulls in
the obsolete syntax, separated by whitespace. Since RFC 2047 uses the
obsolete syntax in its specification, but requires whitespace on either
side of the encoded words, I can see no reason to need to separate the
non-printable-non-whitespace from the printable runs if they occur, so we
parse this into xtext tokens separated by WSP tokens.
Because an 'unstructured' value must by definition constitute the entire
value, this 'get' routine does not return a remaining value, only the
parsed TokenList.
"""
# XXX: but what about bare CR and LF? They might signal the start or
# end of an encoded word. YAGNI for now, since our current parsers
# will never send us strings with bare CR or LF.
unstructured = UnstructuredTokenList()
while value:
if value[0] in WSP:
token, value = get_fws(value)
unstructured.append(token)
continue
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: Need to figure out how to register defects when
# appropriate here.
pass
else:
have_ws = True
if len(unstructured) > 0:
if unstructured[-1].token_type != 'fws':
unstructured.defects.append(errors.InvalidHeaderDefect(
"missing whitespace before encoded word"))
have_ws = False
if have_ws and len(unstructured) > 1:
if unstructured[-2].token_type == 'encoded-word':
unstructured[-1] = EWWhiteSpaceTerminal(
unstructured[-1], 'fws')
unstructured.append(token)
continue
tok, *remainder = _wsp_splitter(value, 1)
# Split in the middle of an atom if there is a rfc2047 encoded word
# which does not have WSP on both sides. The defect will be registered
# the next time through the loop.
if rfc2047_matcher.search(tok):
tok, *remainder = value.partition('=?')
vtext = ValueTerminal(tok, 'vtext')
_validate_xtext(vtext)
unstructured.append(vtext)
value = ''.join(remainder)
return unstructured
def get_qp_ctext(value):
r"""ctext = <printable ascii except \ ( )>
This is not the RFC ctext, since we are handling nested comments in comment
and unquoting quoted-pairs here. We allow anything except the '()'
characters, but if we find any ASCII other than the RFC defined printable
ASCII, a NonPrintableDefect is added to the token's defects list. Since
quoted pairs are converted to their unquoted values, what is returned is
a 'ptext' token. In this case it is a WhiteSpaceTerminal, so it's value
is ' '.
"""
ptext, value, _ = _get_ptext_to_endchars(value, '()')
ptext = WhiteSpaceTerminal(ptext, 'ptext')
_validate_xtext(ptext)
return ptext, value
def get_qcontent(value):
"""qcontent = qtext / quoted-pair
We allow anything except the DQUOTE character, but if we find any ASCII
other than the RFC defined printable ASCII, a NonPrintableDefect is
added to the token's defects list. Any quoted pairs are converted to their
unquoted values, so what is returned is a 'ptext' token. In this case it
is a ValueTerminal.
"""
ptext, value, _ = _get_ptext_to_endchars(value, '"')
ptext = ValueTerminal(ptext, 'ptext')
_validate_xtext(ptext)
return ptext, value
def get_atext(value):
"""atext = <matches _atext_matcher>
We allow any non-ATOM_ENDS in atext, but add an InvalidATextDefect to
the token's defects list if we find non-atext characters.
"""
m = _non_atom_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected atext but found '{}'".format(value))
atext = m.group()
value = value[len(atext):]
atext = ValueTerminal(atext, 'atext')
_validate_xtext(atext)
return atext, value
def get_bare_quoted_string(value):
"""bare-quoted-string = DQUOTE *([FWS] qcontent) [FWS] DQUOTE
A quoted-string without the leading or trailing white space. Its
value is the text between the quote marks, with whitespace
preserved and quoted pairs decoded.
"""
if value[0] != '"':
raise errors.HeaderParseError(
"expected '\"' but found '{}'".format(value))
bare_quoted_string = BareQuotedString()
value = value[1:]
if value and value[0] == '"':
token, value = get_qcontent(value)
bare_quoted_string.append(token)
while value and value[0] != '"':
if value[0] in WSP:
token, value = get_fws(value)
elif value[:2] == '=?':
try:
token, value = get_encoded_word(value)
bare_quoted_string.defects.append(errors.InvalidHeaderDefect(
"encoded word inside quoted string"))
except errors.HeaderParseError:
token, value = get_qcontent(value)
else:
token, value = get_qcontent(value)
bare_quoted_string.append(token)
if not value:
bare_quoted_string.defects.append(errors.InvalidHeaderDefect(
"end of header inside quoted string"))
return bare_quoted_string, value
return bare_quoted_string, value[1:]
def get_comment(value):
"""comment = "(" *([FWS] ccontent) [FWS] ")"
ccontent = ctext / quoted-pair / comment
We handle nested comments here, and quoted-pair in our qp-ctext routine.
"""
if value and value[0] != '(':
raise errors.HeaderParseError(
"expected '(' but found '{}'".format(value))
comment = Comment()
value = value[1:]
while value and value[0] != ")":
if value[0] in WSP:
token, value = get_fws(value)
elif value[0] == '(':
token, value = get_comment(value)
else:
token, value = get_qp_ctext(value)
comment.append(token)
if not value:
comment.defects.append(errors.InvalidHeaderDefect(
"end of header inside comment"))
return comment, value
return comment, value[1:]
def get_cfws(value):
"""CFWS = (1*([FWS] comment) [FWS]) / FWS
"""
cfws = CFWSList()
while value and value[0] in CFWS_LEADER:
if value[0] in WSP:
token, value = get_fws(value)
else:
token, value = get_comment(value)
cfws.append(token)
return cfws, value
def get_quoted_string(value):
"""quoted-string = [CFWS] <bare-quoted-string> [CFWS]
'bare-quoted-string' is an intermediate class defined by this
parser and not by the RFC grammar. It is the quoted string
without any attached CFWS.
"""
quoted_string = QuotedString()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
quoted_string.append(token)
token, value = get_bare_quoted_string(value)
quoted_string.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
quoted_string.append(token)
return quoted_string, value
def get_atom(value):
"""atom = [CFWS] 1*atext [CFWS]
An atom could be an rfc2047 encoded word.
"""
atom = Atom()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
atom.append(token)
if value and value[0] in ATOM_ENDS:
raise errors.HeaderParseError(
"expected atom but found '{}'".format(value))
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: need to figure out how to register defects when
# appropriate here.
token, value = get_atext(value)
else:
token, value = get_atext(value)
atom.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
atom.append(token)
return atom, value
def get_dot_atom_text(value):
""" dot-text = 1*atext *("." 1*atext)
"""
dot_atom_text = DotAtomText()
if not value or value[0] in ATOM_ENDS:
raise errors.HeaderParseError("expected atom at a start of "
"dot-atom-text but found '{}'".format(value))
while value and value[0] not in ATOM_ENDS:
token, value = get_atext(value)
dot_atom_text.append(token)
if value and value[0] == '.':
dot_atom_text.append(DOT)
value = value[1:]
if dot_atom_text[-1] is DOT:
raise errors.HeaderParseError("expected atom at end of dot-atom-text "
"but found '{}'".format('.'+value))
return dot_atom_text, value
def get_dot_atom(value):
""" dot-atom = [CFWS] dot-atom-text [CFWS]
Any place we can have a dot atom, we could instead have an rfc2047 encoded
word.
"""
dot_atom = DotAtom()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
dot_atom.append(token)
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: need to figure out how to register defects when
# appropriate here.
token, value = get_dot_atom_text(value)
else:
token, value = get_dot_atom_text(value)
dot_atom.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
dot_atom.append(token)
return dot_atom, value
def get_word(value):
"""word = atom / quoted-string
Either atom or quoted-string may start with CFWS. We have to peel off this
CFWS first to determine which type of word to parse. Afterward we splice
the leading CFWS, if any, into the parsed sub-token.
If neither an atom or a quoted-string is found before the next special, a
HeaderParseError is raised.
The token returned is either an Atom or a QuotedString, as appropriate.
This means the 'word' level of the formal grammar is not represented in the
parse tree; this is because having that extra layer when manipulating the
parse tree is more confusing than it is helpful.
"""
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
else:
leader = None
if not value:
raise errors.HeaderParseError(
"Expected 'atom' or 'quoted-string' but found nothing.")
if value[0]=='"':
token, value = get_quoted_string(value)
elif value[0] in SPECIALS:
raise errors.HeaderParseError("Expected 'atom' or 'quoted-string' "
"but found '{}'".format(value))
else:
token, value = get_atom(value)
if leader is not None:
token[:0] = [leader]
return token, value
def get_phrase(value):
""" phrase = 1*word / obs-phrase
obs-phrase = word *(word / "." / CFWS)
This means a phrase can be a sequence of words, periods, and CFWS in any
order as long as it starts with at least one word. If anything other than
words is detected, an ObsoleteHeaderDefect is added to the token's defect
list. We also accept a phrase that starts with CFWS followed by a dot;
this is registered as an InvalidHeaderDefect, since it is not supported by
even the obsolete grammar.
"""
phrase = Phrase()
try:
token, value = get_word(value)
phrase.append(token)
except errors.HeaderParseError:
phrase.defects.append(errors.InvalidHeaderDefect(
"phrase does not start with word"))
while value and value[0] not in PHRASE_ENDS:
if value[0]=='.':
phrase.append(DOT)
phrase.defects.append(errors.ObsoleteHeaderDefect(
"period in 'phrase'"))
value = value[1:]
else:
try:
token, value = get_word(value)
except errors.HeaderParseError:
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
phrase.defects.append(errors.ObsoleteHeaderDefect(
"comment found without atom"))
else:
raise
phrase.append(token)
return phrase, value
def get_local_part(value):
""" local-part = dot-atom / quoted-string / obs-local-part
"""
local_part = LocalPart()
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected local-part but found '{}'".format(value))
try:
token, value = get_dot_atom(value)
except errors.HeaderParseError:
try:
token, value = get_word(value)
except errors.HeaderParseError:
if value[0] != '\\' and value[0] in PHRASE_ENDS:
raise
token = TokenList()
if leader is not None:
token[:0] = [leader]
local_part.append(token)
if value and (value[0]=='\\' or value[0] not in PHRASE_ENDS):
obs_local_part, value = get_obs_local_part(str(local_part) + value)
if obs_local_part.token_type == 'invalid-obs-local-part':
local_part.defects.append(errors.InvalidHeaderDefect(
"local-part is not dot-atom, quoted-string, or obs-local-part"))
else:
local_part.defects.append(errors.ObsoleteHeaderDefect(
"local-part is not a dot-atom (contains CFWS)"))
local_part[0] = obs_local_part
try:
local_part.value.encode('ascii')
except UnicodeEncodeError:
local_part.defects.append(errors.NonASCIILocalPartDefect(
"local-part contains non-ASCII characters)"))
return local_part, value
def get_obs_local_part(value):
""" obs-local-part = word *("." word)
"""
obs_local_part = ObsLocalPart()
last_non_ws_was_dot = False
while value and (value[0]=='\\' or value[0] not in PHRASE_ENDS):
if value[0] == '.':
if last_non_ws_was_dot:
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"invalid repeated '.'"))
obs_local_part.append(DOT)
last_non_ws_was_dot = True
value = value[1:]
continue
elif value[0]=='\\':
obs_local_part.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"'\\' character outside of quoted-string/ccontent"))
last_non_ws_was_dot = False
continue
if obs_local_part and obs_local_part[-1].token_type != 'dot':
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"missing '.' between words"))
try:
token, value = get_word(value)
last_non_ws_was_dot = False
except errors.HeaderParseError:
if value[0] not in CFWS_LEADER:
raise
token, value = get_cfws(value)
obs_local_part.append(token)
if (obs_local_part[0].token_type == 'dot' or
obs_local_part[0].token_type=='cfws' and
obs_local_part[1].token_type=='dot'):
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"Invalid leading '.' in local part"))
if (obs_local_part[-1].token_type == 'dot' or
obs_local_part[-1].token_type=='cfws' and
obs_local_part[-2].token_type=='dot'):
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"Invalid trailing '.' in local part"))
if obs_local_part.defects:
obs_local_part.token_type = 'invalid-obs-local-part'
return obs_local_part, value
def get_dtext(value):
r""" dtext = <printable ascii except \ [ ]> / obs-dtext
obs-dtext = obs-NO-WS-CTL / quoted-pair
We allow anything except the excluded characters, but if we find any
ASCII other than the RFC defined printable ASCII, a NonPrintableDefect is
added to the token's defects list. Quoted pairs are converted to their
unquoted values, so what is returned is a ptext token, in this case a
ValueTerminal. If there were quoted-printables, an ObsoleteHeaderDefect is
added to the returned token's defect list.
"""
ptext, value, had_qp = _get_ptext_to_endchars(value, '[]')
ptext = ValueTerminal(ptext, 'ptext')
if had_qp:
ptext.defects.append(errors.ObsoleteHeaderDefect(
"quoted printable found in domain-literal"))
_validate_xtext(ptext)
return ptext, value
def _check_for_early_dl_end(value, domain_literal):
if value:
return False
domain_literal.append(errors.InvalidHeaderDefect(
"end of input inside domain-literal"))
domain_literal.append(ValueTerminal(']', 'domain-literal-end'))
return True
def get_domain_literal(value):
""" domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS]
"""
domain_literal = DomainLiteral()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
domain_literal.append(token)
if not value:
raise errors.HeaderParseError("expected domain-literal")
if value[0] != '[':
raise errors.HeaderParseError("expected '[' at start of domain-literal "
"but found '{}'".format(value))
value = value[1:]
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
domain_literal.append(ValueTerminal('[', 'domain-literal-start'))
if value[0] in WSP:
token, value = get_fws(value)
domain_literal.append(token)
token, value = get_dtext(value)
domain_literal.append(token)
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
if value[0] in WSP:
token, value = get_fws(value)
domain_literal.append(token)
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
if value[0] != ']':
raise errors.HeaderParseError("expected ']' at end of domain-literal "
"but found '{}'".format(value))
domain_literal.append(ValueTerminal(']', 'domain-literal-end'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
domain_literal.append(token)
return domain_literal, value
def get_domain(value):
""" domain = dot-atom / domain-literal / obs-domain
obs-domain = atom *("." atom))
"""
domain = Domain()
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected domain but found '{}'".format(value))
if value[0] == '[':
token, value = get_domain_literal(value)
if leader is not None:
token[:0] = [leader]
domain.append(token)
return domain, value
try:
token, value = get_dot_atom(value)
except errors.HeaderParseError:
token, value = get_atom(value)
if value and value[0] == '@':
raise errors.HeaderParseError('Invalid Domain')
if leader is not None:
token[:0] = [leader]
domain.append(token)
if value and value[0] == '.':
domain.defects.append(errors.ObsoleteHeaderDefect(
"domain is not a dot-atom (contains CFWS)"))
if domain[0].token_type == 'dot-atom':
domain[:] = domain[0]
while value and value[0] == '.':
domain.append(DOT)
token, value = get_atom(value[1:])
domain.append(token)
return domain, value
def get_addr_spec(value):
""" addr-spec = local-part "@" domain
"""
addr_spec = AddrSpec()
token, value = get_local_part(value)
addr_spec.append(token)
if not value or value[0] != '@':
addr_spec.defects.append(errors.InvalidHeaderDefect(
"addr-spec local part with no domain"))
return addr_spec, value
addr_spec.append(ValueTerminal('@', 'address-at-symbol'))
token, value = get_domain(value[1:])
addr_spec.append(token)
return addr_spec, value
def get_obs_route(value):
""" obs-route = obs-domain-list ":"
obs-domain-list = *(CFWS / ",") "@" domain *("," [CFWS] ["@" domain])
Returns an obs-route token with the appropriate sub-tokens (that is,
there is no obs-domain-list in the parse tree).
"""
obs_route = ObsRoute()
while value and (value[0]==',' or value[0] in CFWS_LEADER):
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
obs_route.append(token)
elif value[0] == ',':
obs_route.append(ListSeparator)
value = value[1:]
if not value or value[0] != '@':
raise errors.HeaderParseError(
"expected obs-route domain but found '{}'".format(value))
obs_route.append(RouteComponentMarker)
token, value = get_domain(value[1:])
obs_route.append(token)
while value and value[0]==',':
obs_route.append(ListSeparator)
value = value[1:]
if not value:
break
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
obs_route.append(token)
if value[0] == '@':
obs_route.append(RouteComponentMarker)
token, value = get_domain(value[1:])
obs_route.append(token)
if not value:
raise errors.HeaderParseError("end of header while parsing obs-route")
if value[0] != ':':
raise errors.HeaderParseError( "expected ':' marking end of "
"obs-route but found '{}'".format(value))
obs_route.append(ValueTerminal(':', 'end-of-obs-route-marker'))
return obs_route, value[1:]
def get_angle_addr(value):
""" angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr
obs-angle-addr = [CFWS] "<" obs-route addr-spec ">" [CFWS]
"""
angle_addr = AngleAddr()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
angle_addr.append(token)
if not value or value[0] != '<':
raise errors.HeaderParseError(
"expected angle-addr but found '{}'".format(value))
angle_addr.append(ValueTerminal('<', 'angle-addr-start'))
value = value[1:]
# Although it is not legal per RFC5322, SMTP uses '<>' in certain
# circumstances.
if value[0] == '>':
angle_addr.append(ValueTerminal('>', 'angle-addr-end'))
angle_addr.defects.append(errors.InvalidHeaderDefect(
"null addr-spec in angle-addr"))
value = value[1:]
return angle_addr, value
try:
token, value = get_addr_spec(value)
except errors.HeaderParseError:
try:
token, value = get_obs_route(value)
angle_addr.defects.append(errors.ObsoleteHeaderDefect(
"obsolete route specification in angle-addr"))
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected addr-spec or obs-route but found '{}'".format(value))
angle_addr.append(token)
token, value = get_addr_spec(value)
angle_addr.append(token)
if value and value[0] == '>':
value = value[1:]
else:
angle_addr.defects.append(errors.InvalidHeaderDefect(
"missing trailing '>' on angle-addr"))
angle_addr.append(ValueTerminal('>', 'angle-addr-end'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
angle_addr.append(token)
return angle_addr, value
def get_display_name(value):
""" display-name = phrase
Because this is simply a name-rule, we don't return a display-name
token containing a phrase, but rather a display-name token with
the content of the phrase.
"""
display_name = DisplayName()
token, value = get_phrase(value)
display_name.extend(token[:])
display_name.defects = token.defects[:]
return display_name, value
def get_name_addr(value):
""" name-addr = [display-name] angle-addr
"""
name_addr = NameAddr()
# Both the optional display name and the angle-addr can start with cfws.
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(leader))
if value[0] != '<':
if value[0] in PHRASE_ENDS:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(value))
token, value = get_display_name(value)
if not value:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(token))
if leader is not None:
token[0][:0] = [leader]
leader = None
name_addr.append(token)
token, value = get_angle_addr(value)
if leader is not None:
token[:0] = [leader]
name_addr.append(token)
return name_addr, value
def get_mailbox(value):
""" mailbox = name-addr / addr-spec
"""
# The only way to figure out if we are dealing with a name-addr or an
# addr-spec is to try parsing each one.
mailbox = Mailbox()
try:
token, value = get_name_addr(value)
except errors.HeaderParseError:
try:
token, value = get_addr_spec(value)
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected mailbox but found '{}'".format(value))
if any(isinstance(x, errors.InvalidHeaderDefect)
for x in token.all_defects):
mailbox.token_type = 'invalid-mailbox'
mailbox.append(token)
return mailbox, value
def get_invalid_mailbox(value, endchars):
""" Read everything up to one of the chars in endchars.
This is outside the formal grammar. The InvalidMailbox TokenList that is
returned acts like a Mailbox, but the data attributes are None.
"""
invalid_mailbox = InvalidMailbox()
while value and value[0] not in endchars:
if value[0] in PHRASE_ENDS:
invalid_mailbox.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
invalid_mailbox.append(token)
return invalid_mailbox, value
def get_mailbox_list(value):
""" mailbox-list = (mailbox *("," mailbox)) / obs-mbox-list
obs-mbox-list = *([CFWS] ",") mailbox *("," [mailbox / CFWS])
For this routine we go outside the formal grammar in order to improve error
handling. We recognize the end of the mailbox list only at the end of the
value or at a ';' (the group terminator). This is so that we can turn
invalid mailboxes into InvalidMailbox tokens and continue parsing any
remaining valid mailboxes. We also allow all mailbox entries to be null,
and this condition is handled appropriately at a higher level.
"""
mailbox_list = MailboxList()
while value and value[0] != ';':
try:
token, value = get_mailbox(value)
mailbox_list.append(token)
except errors.HeaderParseError:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value or value[0] in ',;':
mailbox_list.append(leader)
mailbox_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in mailbox-list"))
else:
token, value = get_invalid_mailbox(value, ',;')
if leader is not None:
token[:0] = [leader]
mailbox_list.append(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
elif value[0] == ',':
mailbox_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in mailbox-list"))
else:
token, value = get_invalid_mailbox(value, ',;')
if leader is not None:
token[:0] = [leader]
mailbox_list.append(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
if value and value[0] not in ',;':
# Crap after mailbox; treat it as an invalid mailbox.
# The mailbox info will still be available.
mailbox = mailbox_list[-1]
mailbox.token_type = 'invalid-mailbox'
token, value = get_invalid_mailbox(value, ',;')
mailbox.extend(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
if value and value[0] == ',':
mailbox_list.append(ListSeparator)
value = value[1:]
return mailbox_list, value
def get_group_list(value):
""" group-list = mailbox-list / CFWS / obs-group-list
obs-group-list = 1*([CFWS] ",") [CFWS]
"""
group_list = GroupList()
if not value:
group_list.defects.append(errors.InvalidHeaderDefect(
"end of header before group-list"))
return group_list, value
leader = None
if value and value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
# This should never happen in email parsing, since CFWS-only is a
# legal alternative to group-list in a group, which is the only
# place group-list appears.
group_list.defects.append(errors.InvalidHeaderDefect(
"end of header in group-list"))
group_list.append(leader)
return group_list, value
if value[0] == ';':
group_list.append(leader)
return group_list, value
token, value = get_mailbox_list(value)
if len(token.all_mailboxes)==0:
if leader is not None:
group_list.append(leader)
group_list.extend(token)
group_list.defects.append(errors.ObsoleteHeaderDefect(
"group-list with empty entries"))
return group_list, value
if leader is not None:
token[:0] = [leader]
group_list.append(token)
return group_list, value
def get_group(value):
""" group = display-name ":" [group-list] ";" [CFWS]
"""
group = Group()
token, value = get_display_name(value)
if not value or value[0] != ':':
raise errors.HeaderParseError("expected ':' at end of group "
"display name but found '{}'".format(value))
group.append(token)
group.append(ValueTerminal(':', 'group-display-name-terminator'))
value = value[1:]
if value and value[0] == ';':
group.append(ValueTerminal(';', 'group-terminator'))
return group, value[1:]
token, value = get_group_list(value)
group.append(token)
if not value:
group.defects.append(errors.InvalidHeaderDefect(
"end of header in group"))
elif value[0] != ';':
raise errors.HeaderParseError(
"expected ';' at end of group but found {}".format(value))
group.append(ValueTerminal(';', 'group-terminator'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
group.append(token)
return group, value
def get_address(value):
""" address = mailbox / group
Note that counter-intuitively, an address can be either a single address or
a list of addresses (a group). This is why the returned Address object has
a 'mailboxes' attribute which treats a single address as a list of length
one. When you need to differentiate between to two cases, extract the single
element, which is either a mailbox or a group token.
"""
# The formal grammar isn't very helpful when parsing an address. mailbox
# and group, especially when allowing for obsolete forms, start off very
# similarly. It is only when you reach one of @, <, or : that you know
# what you've got. So, we try each one in turn, starting with the more
# likely of the two. We could perhaps make this more efficient by looking
# for a phrase and then branching based on the next character, but that
# would be a premature optimization.
address = Address()
try:
token, value = get_group(value)
except errors.HeaderParseError:
try:
token, value = get_mailbox(value)
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected address but found '{}'".format(value))
address.append(token)
return address, value
def get_address_list(value):
""" address_list = (address *("," address)) / obs-addr-list
obs-addr-list = *([CFWS] ",") address *("," [address / CFWS])
We depart from the formal grammar here by continuing to parse until the end
of the input, assuming the input to be entirely composed of an
address-list. This is always true in email parsing, and allows us
to skip invalid addresses to parse additional valid ones.
"""
address_list = AddressList()
while value:
try:
token, value = get_address(value)
address_list.append(token)
except errors.HeaderParseError as err:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value or value[0] == ',':
address_list.append(leader)
address_list.defects.append(errors.ObsoleteHeaderDefect(
"address-list entry with no content"))
else:
token, value = get_invalid_mailbox(value, ',')
if leader is not None:
token[:0] = [leader]
address_list.append(Address([token]))
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
elif value[0] == ',':
address_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in address-list"))
else:
token, value = get_invalid_mailbox(value, ',')
if leader is not None:
token[:0] = [leader]
address_list.append(Address([token]))
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
if value and value[0] != ',':
# Crap after address; treat it as an invalid mailbox.
# The mailbox info will still be available.
mailbox = address_list[-1][0]
mailbox.token_type = 'invalid-mailbox'
token, value = get_invalid_mailbox(value, ',')
mailbox.extend(token)
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
if value: # Must be a , at this point.
address_list.append(ValueTerminal(',', 'list-separator'))
value = value[1:]
return address_list, value
def get_no_fold_literal(value):
""" no-fold-literal = "[" *dtext "]"
"""
no_fold_literal = NoFoldLiteral()
if not value:
raise errors.HeaderParseError(
"expected no-fold-literal but found '{}'".format(value))
if value[0] != '[':
raise errors.HeaderParseError(
"expected '[' at the start of no-fold-literal "
"but found '{}'".format(value))
no_fold_literal.append(ValueTerminal('[', 'no-fold-literal-start'))
value = value[1:]
token, value = get_dtext(value)
no_fold_literal.append(token)
if not value or value[0] != ']':
raise errors.HeaderParseError(
"expected ']' at the end of no-fold-literal "
"but found '{}'".format(value))
no_fold_literal.append(ValueTerminal(']', 'no-fold-literal-end'))
return no_fold_literal, value[1:]
def get_msg_id(value):
"""msg-id = [CFWS] "<" id-left '@' id-right ">" [CFWS]
id-left = dot-atom-text / obs-id-left
id-right = dot-atom-text / no-fold-literal / obs-id-right
no-fold-literal = "[" *dtext "]"
"""
msg_id = MsgID()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
msg_id.append(token)
if not value or value[0] != '<':
raise errors.HeaderParseError(
"expected msg-id but found '{}'".format(value))
msg_id.append(ValueTerminal('<', 'msg-id-start'))
value = value[1:]
# Parse id-left.
try:
token, value = get_dot_atom_text(value)
except errors.HeaderParseError:
try:
# obs-id-left is same as local-part of add-spec.
token, value = get_obs_local_part(value)
msg_id.defects.append(errors.ObsoleteHeaderDefect(
"obsolete id-left in msg-id"))
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected dot-atom-text or obs-id-left"
" but found '{}'".format(value))
msg_id.append(token)
if not value or value[0] != '@':
msg_id.defects.append(errors.InvalidHeaderDefect(
"msg-id with no id-right"))
# Even though there is no id-right, if the local part
# ends with `>` let's just parse it too and return
# along with the defect.
if value and value[0] == '>':
msg_id.append(ValueTerminal('>', 'msg-id-end'))
value = value[1:]
return msg_id, value
msg_id.append(ValueTerminal('@', 'address-at-symbol'))
value = value[1:]
# Parse id-right.
try:
token, value = get_dot_atom_text(value)
except errors.HeaderParseError:
try:
token, value = get_no_fold_literal(value)
except errors.HeaderParseError as e:
try:
token, value = get_domain(value)
msg_id.defects.append(errors.ObsoleteHeaderDefect(
"obsolete id-right in msg-id"))
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected dot-atom-text, no-fold-literal or obs-id-right"
" but found '{}'".format(value))
msg_id.append(token)
if value and value[0] == '>':
value = value[1:]
else:
msg_id.defects.append(errors.InvalidHeaderDefect(
"missing trailing '>' on msg-id"))
msg_id.append(ValueTerminal('>', 'msg-id-end'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
msg_id.append(token)
return msg_id, value
def parse_message_id(value):
"""message-id = "Message-ID:" msg-id CRLF
"""
message_id = MessageID()
try:
token, value = get_msg_id(value)
except errors.HeaderParseError:
message_id.defects.append(errors.InvalidHeaderDefect(
"Expected msg-id but found {!r}".format(value)))
message_id.append(token)
return message_id
#
# XXX: As I begin to add additional header parsers, I'm realizing we probably
# have two level of parser routines: the get_XXX methods that get a token in
# the grammar, and parse_XXX methods that parse an entire field value. So
# get_address_list above should really be a parse_ method, as probably should
# be get_unstructured.
#
def parse_mime_version(value):
""" mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS]
"""
# The [CFWS] is implicit in the RFC 2045 BNF.
# XXX: This routine is a bit verbose, should factor out a get_int method.
mime_version = MIMEVersion()
if not value:
mime_version.defects.append(errors.HeaderMissingRequiredValue(
"Missing MIME version number (eg: 1.0)"))
return mime_version
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value:
mime_version.defects.append(errors.HeaderMissingRequiredValue(
"Expected MIME version number but found only CFWS"))
digits = ''
while value and value[0] != '.' and value[0] not in CFWS_LEADER:
digits += value[0]
value = value[1:]
if not digits.isdigit():
mime_version.defects.append(errors.InvalidHeaderDefect(
"Expected MIME major version number but found {!r}".format(digits)))
mime_version.append(ValueTerminal(digits, 'xtext'))
else:
mime_version.major = int(digits)
mime_version.append(ValueTerminal(digits, 'digits'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value or value[0] != '.':
if mime_version.major is not None:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Incomplete MIME version; found only major number"))
if value:
mime_version.append(ValueTerminal(value, 'xtext'))
return mime_version
mime_version.append(ValueTerminal('.', 'version-separator'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value:
if mime_version.major is not None:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Incomplete MIME version; found only major number"))
return mime_version
digits = ''
while value and value[0] not in CFWS_LEADER:
digits += value[0]
value = value[1:]
if not digits.isdigit():
mime_version.defects.append(errors.InvalidHeaderDefect(
"Expected MIME minor version number but found {!r}".format(digits)))
mime_version.append(ValueTerminal(digits, 'xtext'))
else:
mime_version.minor = int(digits)
mime_version.append(ValueTerminal(digits, 'digits'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if value:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Excess non-CFWS text after MIME version"))
mime_version.append(ValueTerminal(value, 'xtext'))
return mime_version
def get_invalid_parameter(value):
""" Read everything up to the next ';'.
This is outside the formal grammar. The InvalidParameter TokenList that is
returned acts like a Parameter, but the data attributes are None.
"""
invalid_parameter = InvalidParameter()
while value and value[0] != ';':
if value[0] in PHRASE_ENDS:
invalid_parameter.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
invalid_parameter.append(token)
return invalid_parameter, value
def get_ttext(value):
"""ttext = <matches _ttext_matcher>
We allow any non-TOKEN_ENDS in ttext, but add defects to the token's
defects list if we find non-ttext characters. We also register defects for
*any* non-printables even though the RFC doesn't exclude all of them,
because we follow the spirit of RFC 5322.
"""
m = _non_token_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected ttext but found '{}'".format(value))
ttext = m.group()
value = value[len(ttext):]
ttext = ValueTerminal(ttext, 'ttext')
_validate_xtext(ttext)
return ttext, value
def get_token(value):
"""token = [CFWS] 1*ttext [CFWS]
The RFC equivalent of ttext is any US-ASCII chars except space, ctls, or
tspecials. We also exclude tabs even though the RFC doesn't.
The RFC implies the CFWS but is not explicit about it in the BNF.
"""
mtoken = Token()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mtoken.append(token)
if value and value[0] in TOKEN_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_ttext(value)
mtoken.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mtoken.append(token)
return mtoken, value
def get_attrtext(value):
"""attrtext = 1*(any non-ATTRIBUTE_ENDS character)
We allow any non-ATTRIBUTE_ENDS in attrtext, but add defects to the
token's defects list if we find non-attrtext characters. We also register
defects for *any* non-printables even though the RFC doesn't exclude all of
them, because we follow the spirit of RFC 5322.
"""
m = _non_attribute_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected attrtext but found {!r}".format(value))
attrtext = m.group()
value = value[len(attrtext):]
attrtext = ValueTerminal(attrtext, 'attrtext')
_validate_xtext(attrtext)
return attrtext, value
def get_attribute(value):
""" [CFWS] 1*attrtext [CFWS]
This version of the BNF makes the CFWS explicit, and as usual we use a
value terminal for the actual run of characters. The RFC equivalent of
attrtext is the token characters, with the subtraction of '*', "'", and '%'.
We include tab in the excluded set just as we do for token.
"""
attribute = Attribute()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
if value and value[0] in ATTRIBUTE_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_attrtext(value)
attribute.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
return attribute, value
def get_extended_attrtext(value):
"""attrtext = 1*(any non-ATTRIBUTE_ENDS character plus '%')
This is a special parsing routine so that we get a value that
includes % escapes as a single string (which we decode as a single
string later).
"""
m = _non_extended_attribute_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected extended attrtext but found {!r}".format(value))
attrtext = m.group()
value = value[len(attrtext):]
attrtext = ValueTerminal(attrtext, 'extended-attrtext')
_validate_xtext(attrtext)
return attrtext, value
def get_extended_attribute(value):
""" [CFWS] 1*extended_attrtext [CFWS]
This is like the non-extended version except we allow % characters, so that
we can pick up an encoded value as a single string.
"""
# XXX: should we have an ExtendedAttribute TokenList?
attribute = Attribute()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
if value and value[0] in EXTENDED_ATTRIBUTE_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_extended_attrtext(value)
attribute.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
return attribute, value
def get_section(value):
""" '*' digits
The formal BNF is more complicated because leading 0s are not allowed. We
check for that and add a defect. We also assume no CFWS is allowed between
the '*' and the digits, though the RFC is not crystal clear on that.
The caller should already have dealt with leading CFWS.
"""
section = Section()
if not value or value[0] != '*':
raise errors.HeaderParseError("Expected section but found {}".format(
value))
section.append(ValueTerminal('*', 'section-marker'))
value = value[1:]
if not value or not value[0].isdigit():
raise errors.HeaderParseError("Expected section number but "
"found {}".format(value))
digits = ''
while value and value[0].isdigit():
digits += value[0]
value = value[1:]
if digits[0] == '0' and digits != '0':
section.defects.append(errors.InvalidHeaderError(
"section number has an invalid leading 0"))
section.number = int(digits)
section.append(ValueTerminal(digits, 'digits'))
return section, value
def get_value(value):
""" quoted-string / attribute
"""
v = Value()
if not value:
raise errors.HeaderParseError("Expected value but found end of string")
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError("Expected value but found "
"only {}".format(leader))
if value[0] == '"':
token, value = get_quoted_string(value)
else:
token, value = get_extended_attribute(value)
if leader is not None:
token[:0] = [leader]
v.append(token)
return v, value
def get_parameter(value):
""" attribute [section] ["*"] [CFWS] "=" value
The CFWS is implied by the RFC but not made explicit in the BNF. This
simplified form of the BNF from the RFC is made to conform with the RFC BNF
through some extra checks. We do it this way because it makes both error
recovery and working with the resulting parse tree easier.
"""
# It is possible CFWS would also be implicitly allowed between the section
# and the 'extended-attribute' marker (the '*') , but we've never seen that
# in the wild and we will therefore ignore the possibility.
param = Parameter()
token, value = get_attribute(value)
param.append(token)
if not value or value[0] == ';':
param.defects.append(errors.InvalidHeaderDefect("Parameter contains "
"name ({}) but no value".format(token)))
return param, value
if value[0] == '*':
try:
token, value = get_section(value)
param.sectioned = True
param.append(token)
except errors.HeaderParseError:
pass
if not value:
raise errors.HeaderParseError("Incomplete parameter")
if value[0] == '*':
param.append(ValueTerminal('*', 'extended-parameter-marker'))
value = value[1:]
param.extended = True
if value[0] != '=':
raise errors.HeaderParseError("Parameter not followed by '='")
param.append(ValueTerminal('=', 'parameter-separator'))
value = value[1:]
leader = None
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
param.append(token)
remainder = None
appendto = param
if param.extended and value and value[0] == '"':
# Now for some serious hackery to handle the common invalid case of
# double quotes around an extended value. We also accept (with defect)
# a value marked as encoded that isn't really.
qstring, remainder = get_quoted_string(value)
inner_value = qstring.stripped_value
semi_valid = False
if param.section_number == 0:
if inner_value and inner_value[0] == "'":
semi_valid = True
else:
token, rest = get_attrtext(inner_value)
if rest and rest[0] == "'":
semi_valid = True
else:
try:
token, rest = get_extended_attrtext(inner_value)
except:
pass
else:
if not rest:
semi_valid = True
if semi_valid:
param.defects.append(errors.InvalidHeaderDefect(
"Quoted string value for extended parameter is invalid"))
param.append(qstring)
for t in qstring:
if t.token_type == 'bare-quoted-string':
t[:] = []
appendto = t
break
value = inner_value
else:
remainder = None
param.defects.append(errors.InvalidHeaderDefect(
"Parameter marked as extended but appears to have a "
"quoted string value that is non-encoded"))
if value and value[0] == "'":
token = None
else:
token, value = get_value(value)
if not param.extended or param.section_number > 0:
if not value or value[0] != "'":
appendto.append(token)
if remainder is not None:
assert not value, value
value = remainder
return param, value
param.defects.append(errors.InvalidHeaderDefect(
"Apparent initial-extended-value but attribute "
"was not marked as extended or was not initial section"))
if not value:
# Assume the charset/lang is missing and the token is the value.
param.defects.append(errors.InvalidHeaderDefect(
"Missing required charset/lang delimiters"))
appendto.append(token)
if remainder is None:
return param, value
else:
if token is not None:
for t in token:
if t.token_type == 'extended-attrtext':
break
t.token_type == 'attrtext'
appendto.append(t)
param.charset = t.value
if value[0] != "'":
raise errors.HeaderParseError("Expected RFC2231 char/lang encoding "
"delimiter, but found {!r}".format(value))
appendto.append(ValueTerminal("'", 'RFC2231-delimiter'))
value = value[1:]
if value and value[0] != "'":
token, value = get_attrtext(value)
appendto.append(token)
param.lang = token.value
if not value or value[0] != "'":
raise errors.HeaderParseError("Expected RFC2231 char/lang encoding "
"delimiter, but found {}".format(value))
appendto.append(ValueTerminal("'", 'RFC2231-delimiter'))
value = value[1:]
if remainder is not None:
# Treat the rest of value as bare quoted string content.
v = Value()
while value:
if value[0] in WSP:
token, value = get_fws(value)
elif value[0] == '"':
token = ValueTerminal('"', 'DQUOTE')
value = value[1:]
else:
token, value = get_qcontent(value)
v.append(token)
token = v
else:
token, value = get_value(value)
appendto.append(token)
if remainder is not None:
assert not value, value
value = remainder
return param, value
def parse_mime_parameters(value):
""" parameter *( ";" parameter )
That BNF is meant to indicate this routine should only be called after
finding and handling the leading ';'. There is no corresponding rule in
the formal RFC grammar, but it is more convenient for us for the set of
parameters to be treated as its own TokenList.
This is 'parse' routine because it consumes the remaining value, but it
would never be called to parse a full header. Instead it is called to
parse everything after the non-parameter value of a specific MIME header.
"""
mime_parameters = MimeParameters()
while value:
try:
token, value = get_parameter(value)
mime_parameters.append(token)
except errors.HeaderParseError as err:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
mime_parameters.append(leader)
return mime_parameters
if value[0] == ';':
if leader is not None:
mime_parameters.append(leader)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"parameter entry with no content"))
else:
token, value = get_invalid_parameter(value)
if leader:
token[:0] = [leader]
mime_parameters.append(token)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"invalid parameter {!r}".format(token)))
if value and value[0] != ';':
# Junk after the otherwise valid parameter. Mark it as
# invalid, but it will have a value.
param = mime_parameters[-1]
param.token_type = 'invalid-parameter'
token, value = get_invalid_parameter(value)
param.extend(token)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"parameter with invalid trailing text {!r}".format(token)))
if value:
# Must be a ';' at this point.
mime_parameters.append(ValueTerminal(';', 'parameter-separator'))
value = value[1:]
return mime_parameters
def _find_mime_parameters(tokenlist, value):
"""Do our best to find the parameters in an invalid MIME header
"""
while value and value[0] != ';':
if value[0] in PHRASE_ENDS:
tokenlist.append(ValueTerminal(value[0], 'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
tokenlist.append(token)
if not value:
return
tokenlist.append(ValueTerminal(';', 'parameter-separator'))
tokenlist.append(parse_mime_parameters(value[1:]))
def parse_content_type_header(value):
""" maintype "/" subtype *( ";" parameter )
The maintype and substype are tokens. Theoretically they could
be checked against the official IANA list + x-token, but we
don't do that.
"""
ctype = ContentType()
recover = False
if not value:
ctype.defects.append(errors.HeaderMissingRequiredValue(
"Missing content type specification"))
return ctype
try:
token, value = get_token(value)
except errors.HeaderParseError:
ctype.defects.append(errors.InvalidHeaderDefect(
"Expected content maintype but found {!r}".format(value)))
_find_mime_parameters(ctype, value)
return ctype
ctype.append(token)
# XXX: If we really want to follow the formal grammar we should make
# mantype and subtype specialized TokenLists here. Probably not worth it.
if not value or value[0] != '/':
ctype.defects.append(errors.InvalidHeaderDefect(
"Invalid content type"))
if value:
_find_mime_parameters(ctype, value)
return ctype
ctype.maintype = token.value.strip().lower()
ctype.append(ValueTerminal('/', 'content-type-separator'))
value = value[1:]
try:
token, value = get_token(value)
except errors.HeaderParseError:
ctype.defects.append(errors.InvalidHeaderDefect(
"Expected content subtype but found {!r}".format(value)))
_find_mime_parameters(ctype, value)
return ctype
ctype.append(token)
ctype.subtype = token.value.strip().lower()
if not value:
return ctype
if value[0] != ';':
ctype.defects.append(errors.InvalidHeaderDefect(
"Only parameters are valid after content type, but "
"found {!r}".format(value)))
# The RFC requires that a syntactically invalid content-type be treated
# as text/plain. Perhaps we should postel this, but we should probably
# only do that if we were checking the subtype value against IANA.
del ctype.maintype, ctype.subtype
_find_mime_parameters(ctype, value)
return ctype
ctype.append(ValueTerminal(';', 'parameter-separator'))
ctype.append(parse_mime_parameters(value[1:]))
return ctype
def parse_content_disposition_header(value):
""" disposition-type *( ";" parameter )
"""
disp_header = ContentDisposition()
if not value:
disp_header.defects.append(errors.HeaderMissingRequiredValue(
"Missing content disposition"))
return disp_header
try:
token, value = get_token(value)
except errors.HeaderParseError:
disp_header.defects.append(errors.InvalidHeaderDefect(
"Expected content disposition but found {!r}".format(value)))
_find_mime_parameters(disp_header, value)
return disp_header
disp_header.append(token)
disp_header.content_disposition = token.value.strip().lower()
if not value:
return disp_header
if value[0] != ';':
disp_header.defects.append(errors.InvalidHeaderDefect(
"Only parameters are valid after content disposition, but "
"found {!r}".format(value)))
_find_mime_parameters(disp_header, value)
return disp_header
disp_header.append(ValueTerminal(';', 'parameter-separator'))
disp_header.append(parse_mime_parameters(value[1:]))
return disp_header
def parse_content_transfer_encoding_header(value):
""" mechanism
"""
# We should probably validate the values, since the list is fixed.
cte_header = ContentTransferEncoding()
if not value:
cte_header.defects.append(errors.HeaderMissingRequiredValue(
"Missing content transfer encoding"))
return cte_header
try:
token, value = get_token(value)
except errors.HeaderParseError:
cte_header.defects.append(errors.InvalidHeaderDefect(
"Expected content transfer encoding but found {!r}".format(value)))
else:
cte_header.append(token)
cte_header.cte = token.value.strip().lower()
if not value:
return cte_header
while value:
cte_header.defects.append(errors.InvalidHeaderDefect(
"Extra text after content transfer encoding"))
if value[0] in PHRASE_ENDS:
cte_header.append(ValueTerminal(value[0], 'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
cte_header.append(token)
return cte_header
#
# Header folding
#
# Header folding is complex, with lots of rules and corner cases. The
# following code does its best to obey the rules and handle the corner
# cases, but you can be sure there are few bugs:)
#
# This folder generally canonicalizes as it goes, preferring the stringified
# version of each token. The tokens contain information that supports the
# folder, including which tokens can be encoded in which ways.
#
# Folded text is accumulated in a simple list of strings ('lines'), each
# one of which should be less than policy.max_line_length ('maxlen').
#
def _steal_trailing_WSP_if_exists(lines):
wsp = ''
if lines and lines[-1] and lines[-1][-1] in WSP:
wsp = lines[-1][-1]
lines[-1] = lines[-1][:-1]
return wsp
def _refold_parse_tree(parse_tree, *, policy):
"""Return string of contents of parse_tree folded according to RFC rules.
"""
# max_line_length 0/None means no limit, ie: infinitely long.
maxlen = policy.max_line_length or sys.maxsize
encoding = 'utf-8' if policy.utf8 else 'us-ascii'
lines = ['']
last_ew = None
wrap_as_ew_blocked = 0
want_encoding = False
end_ew_not_allowed = Terminal('', 'wrap_as_ew_blocked')
parts = list(parse_tree)
while parts:
part = parts.pop(0)
if part is end_ew_not_allowed:
wrap_as_ew_blocked -= 1
continue
tstr = str(part)
try:
tstr.encode(encoding)
charset = encoding
except UnicodeEncodeError:
if any(isinstance(x, errors.UndecodableBytesDefect)
for x in part.all_defects):
charset = 'unknown-8bit'
else:
# If policy.utf8 is false this should really be taken from a
# 'charset' property on the policy.
charset = 'utf-8'
want_encoding = True
if part.token_type == 'mime-parameters':
# Mime parameter folding (using RFC2231) is extra special.
_fold_mime_parameters(part, lines, maxlen, encoding)
continue
if want_encoding and not wrap_as_ew_blocked:
if not part.as_ew_allowed:
want_encoding = False
last_ew = None
if part.syntactic_break:
encoded_part = part.fold(policy=policy)[:-len(policy.linesep)]
if policy.linesep not in encoded_part:
# It fits on a single line
if len(encoded_part) > maxlen - len(lines[-1]):
# But not on this one, so start a new one.
newline = _steal_trailing_WSP_if_exists(lines)
# XXX what if encoded_part has no leading FWS?
lines.append(newline)
lines[-1] += encoded_part
continue
# Either this is not a major syntactic break, so we don't
# want it on a line by itself even if it fits, or it
# doesn't fit on a line by itself. Either way, fall through
# to unpacking the subparts and wrapping them.
if not hasattr(part, 'encode'):
# It's not a Terminal, do each piece individually.
parts = list(part) + parts
else:
# It's a terminal, wrap it as an encoded word, possibly
# combining it with previously encoded words if allowed.
last_ew = _fold_as_ew(tstr, lines, maxlen, last_ew,
part.ew_combine_allowed, charset)
want_encoding = False
continue
if len(tstr) <= maxlen - len(lines[-1]):
lines[-1] += tstr
continue
# This part is too long to fit. The RFC wants us to break at
# "major syntactic breaks", so unless we don't consider this
# to be one, check if it will fit on the next line by itself.
if (part.syntactic_break and
len(tstr) + 1 <= maxlen):
newline = _steal_trailing_WSP_if_exists(lines)
if newline or part.startswith_fws():
lines.append(newline + tstr)
last_ew = None
continue
if not hasattr(part, 'encode'):
# It's not a terminal, try folding the subparts.
newparts = list(part)
if not part.as_ew_allowed:
wrap_as_ew_blocked += 1
newparts.append(end_ew_not_allowed)
parts = newparts + parts
continue
if part.as_ew_allowed and not wrap_as_ew_blocked:
# It doesn't need CTE encoding, but encode it anyway so we can
# wrap it.
parts.insert(0, part)
want_encoding = True
continue
# We can't figure out how to wrap, it, so give up.
newline = _steal_trailing_WSP_if_exists(lines)
if newline or part.startswith_fws():
lines.append(newline + tstr)
else:
# We can't fold it onto the next line either...
lines[-1] += tstr
return policy.linesep.join(lines) + policy.linesep
def _fold_as_ew(to_encode, lines, maxlen, last_ew, ew_combine_allowed, charset):
"""Fold string to_encode into lines as encoded word, combining if allowed.
Return the new value for last_ew, or None if ew_combine_allowed is False.
If there is already an encoded word in the last line of lines (indicated by
a non-None value for last_ew) and ew_combine_allowed is true, decode the
existing ew, combine it with to_encode, and re-encode. Otherwise, encode
to_encode. In either case, split to_encode as necessary so that the
encoded segments fit within maxlen.
"""
if last_ew is not None and ew_combine_allowed:
to_encode = str(
get_unstructured(lines[-1][last_ew:] + to_encode))
lines[-1] = lines[-1][:last_ew]
if to_encode[0] in WSP:
# We're joining this to non-encoded text, so don't encode
# the leading blank.
leading_wsp = to_encode[0]
to_encode = to_encode[1:]
if (len(lines[-1]) == maxlen):
lines.append(_steal_trailing_WSP_if_exists(lines))
lines[-1] += leading_wsp
trailing_wsp = ''
if to_encode[-1] in WSP:
# Likewise for the trailing space.
trailing_wsp = to_encode[-1]
to_encode = to_encode[:-1]
new_last_ew = len(lines[-1]) if last_ew is None else last_ew
encode_as = 'utf-8' if charset == 'us-ascii' else charset
# The RFC2047 chrome takes up 7 characters plus the length
# of the charset name.
chrome_len = len(encode_as) + 7
if (chrome_len + 1) >= maxlen:
raise errors.HeaderParseError(
"max_line_length is too small to fit an encoded word")
while to_encode:
remaining_space = maxlen - len(lines[-1])
text_space = remaining_space - chrome_len
if text_space <= 0:
lines.append(' ')
continue
to_encode_word = to_encode[:text_space]
encoded_word = _ew.encode(to_encode_word, charset=encode_as)
excess = len(encoded_word) - remaining_space
while excess > 0:
# Since the chunk to encode is guaranteed to fit into less than 100 characters,
# shrinking it by one at a time shouldn't take long.
to_encode_word = to_encode_word[:-1]
encoded_word = _ew.encode(to_encode_word, charset=encode_as)
excess = len(encoded_word) - remaining_space
lines[-1] += encoded_word
to_encode = to_encode[len(to_encode_word):]
if to_encode:
lines.append(' ')
new_last_ew = len(lines[-1])
lines[-1] += trailing_wsp
return new_last_ew if ew_combine_allowed else None
def _fold_mime_parameters(part, lines, maxlen, encoding):
"""Fold TokenList 'part' into the 'lines' list as mime parameters.
Using the decoded list of parameters and values, format them according to
the RFC rules, including using RFC2231 encoding if the value cannot be
expressed in 'encoding' and/or the parameter+value is too long to fit
within 'maxlen'.
"""
# Special case for RFC2231 encoding: start from decoded values and use
# RFC2231 encoding iff needed.
#
# Note that the 1 and 2s being added to the length calculations are
# accounting for the possibly-needed spaces and semicolons we'll be adding.
#
for name, value in part.params:
# XXX What if this ';' puts us over maxlen the first time through the
# loop? We should split the header value onto a newline in that case,
# but to do that we need to recognize the need earlier or reparse the
# header, so I'm going to ignore that bug for now. It'll only put us
# one character over.
if not lines[-1].rstrip().endswith(';'):
lines[-1] += ';'
charset = encoding
error_handler = 'strict'
try:
value.encode(encoding)
encoding_required = False
except UnicodeEncodeError:
encoding_required = True
if utils._has_surrogates(value):
charset = 'unknown-8bit'
error_handler = 'surrogateescape'
else:
charset = 'utf-8'
if encoding_required:
encoded_value = urllib.parse.quote(
value, safe='', errors=error_handler)
tstr = "{}*={}''{}".format(name, charset, encoded_value)
else:
tstr = '{}={}'.format(name, quote_string(value))
if len(lines[-1]) + len(tstr) + 1 < maxlen:
lines[-1] = lines[-1] + ' ' + tstr
continue
elif len(tstr) + 2 <= maxlen:
lines.append(' ' + tstr)
continue
# We need multiple sections. We are allowed to mix encoded and
# non-encoded sections, but we aren't going to. We'll encode them all.
section = 0
extra_chrome = charset + "''"
while value:
chrome_len = len(name) + len(str(section)) + 3 + len(extra_chrome)
if maxlen <= chrome_len + 3:
# We need room for the leading blank, the trailing semicolon,
# and at least one character of the value. If we don't
# have that, we'd be stuck, so in that case fall back to
# the RFC standard width.
maxlen = 78
splitpoint = maxchars = maxlen - chrome_len - 2
while True:
partial = value[:splitpoint]
encoded_value = urllib.parse.quote(
partial, safe='', errors=error_handler)
if len(encoded_value) <= maxchars:
break
splitpoint -= 1
lines.append(" {}*{}*={}{}".format(
name, section, extra_chrome, encoded_value))
extra_chrome = ''
section += 1
value = value[splitpoint:]
if value:
lines[-1] += ';'
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_1065_0 |
crossvul-python_data_good_117_2 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.primitives import ciphers
from cryptography.hazmat.primitives.ciphers import modes
@utils.register_interface(ciphers.CipherContext)
@utils.register_interface(ciphers.AEADCipherContext)
@utils.register_interface(ciphers.AEADEncryptionContext)
@utils.register_interface(ciphers.AEADDecryptionContext)
class _CipherContext(object):
_ENCRYPT = 1
_DECRYPT = 0
def __init__(self, backend, cipher, mode, operation):
self._backend = backend
self._cipher = cipher
self._mode = mode
self._operation = operation
self._tag = None
if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
self._block_size_bytes = self._cipher.block_size // 8
else:
self._block_size_bytes = 1
ctx = self._backend._lib.EVP_CIPHER_CTX_new()
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.EVP_CIPHER_CTX_free
)
registry = self._backend._cipher_registry
try:
adapter = registry[type(cipher), type(mode)]
except KeyError:
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported "
"by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
evp_cipher = adapter(self._backend, cipher, mode)
if evp_cipher == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported "
"by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
if isinstance(mode, modes.ModeWithInitializationVector):
iv_nonce = mode.initialization_vector
elif isinstance(mode, modes.ModeWithTweak):
iv_nonce = mode.tweak
elif isinstance(mode, modes.ModeWithNonce):
iv_nonce = mode.nonce
elif isinstance(cipher, modes.ModeWithNonce):
iv_nonce = cipher.nonce
else:
iv_nonce = self._backend._ffi.NULL
# begin init with cipher and operation type
res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
operation)
self._backend.openssl_assert(res != 0)
# set the key length to handle variable key ciphers
res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
ctx, len(cipher.key)
)
self._backend.openssl_assert(res != 0)
if isinstance(mode, modes.GCM):
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
len(iv_nonce), self._backend._ffi.NULL
)
self._backend.openssl_assert(res != 0)
if mode.tag is not None:
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
len(mode.tag), mode.tag
)
self._backend.openssl_assert(res != 0)
self._tag = mode.tag
elif (
self._operation == self._DECRYPT and
self._backend._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and
not self._backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
):
raise NotImplementedError(
"delayed passing of GCM tag requires OpenSSL >= 1.0.2."
" To use this feature please update OpenSSL"
)
# pass key/iv
res = self._backend._lib.EVP_CipherInit_ex(
ctx,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
cipher.key,
iv_nonce,
operation
)
self._backend.openssl_assert(res != 0)
# We purposely disable padding here as it's handled higher up in the
# API.
self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
self._ctx = ctx
def update(self, data):
buf = bytearray(len(data) + self._block_size_bytes - 1)
n = self.update_into(data, buf)
return bytes(buf[:n])
def update_into(self, data, buf):
if len(buf) < (len(data) + self._block_size_bytes - 1):
raise ValueError(
"buffer must be at least {0} bytes for this "
"payload".format(len(data) + self._block_size_bytes - 1)
)
buf = self._backend._ffi.cast(
"unsigned char *", self._backend._ffi.from_buffer(buf)
)
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen,
data, len(data))
self._backend.openssl_assert(res != 0)
return outlen[0]
def finalize(self):
# OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions)
# appears to have a bug where you must make at least one call to update
# even if you are only using authenticate_additional_data or the
# GCM tag will be wrong. An (empty) call to update resolves this
# and is harmless for all other versions of OpenSSL.
if isinstance(self._mode, modes.GCM):
self.update(b"")
if (
self._operation == self._DECRYPT and
isinstance(self._mode, modes.ModeWithAuthenticationTag) and
self.tag is None
):
raise ValueError(
"Authentication tag must be provided when decrypting."
)
buf = self._backend._ffi.new("unsigned char[]", self._block_size_bytes)
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
if res == 0:
errors = self._backend._consume_errors()
if not errors and isinstance(self._mode, modes.GCM):
raise InvalidTag
self._backend.openssl_assert(
errors[0]._lib_reason_match(
self._backend._lib.ERR_LIB_EVP,
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
)
)
raise ValueError(
"The length of the provided data is not a multiple of "
"the block length."
)
if (isinstance(self._mode, modes.GCM) and
self._operation == self._ENCRYPT):
tag_buf = self._backend._ffi.new(
"unsigned char[]", self._block_size_bytes
)
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
self._ctx, self._backend._lib.EVP_CTRL_AEAD_GET_TAG,
self._block_size_bytes, tag_buf
)
self._backend.openssl_assert(res != 0)
self._tag = self._backend._ffi.buffer(tag_buf)[:]
res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx)
self._backend.openssl_assert(res == 1)
return self._backend._ffi.buffer(buf)[:outlen[0]]
def finalize_with_tag(self, tag):
if (
self._backend._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and
not self._backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
):
raise NotImplementedError(
"finalize_with_tag requires OpenSSL >= 1.0.2. To use this "
"method please update OpenSSL"
)
if len(tag) < self._mode._min_tag_length:
raise ValueError(
"Authentication tag must be {0} bytes or longer.".format(
self._mode._min_tag_length)
)
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
len(tag), tag
)
self._backend.openssl_assert(res != 0)
self._tag = tag
return self.finalize()
def authenticate_additional_data(self, data):
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherUpdate(
self._ctx, self._backend._ffi.NULL, outlen, data, len(data)
)
self._backend.openssl_assert(res != 0)
tag = utils.read_only_property("_tag")
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_117_2 |
crossvul-python_data_bad_3523_0 | import datetime
from StringIO import StringIO
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.serializers import json
from django.utils import simplejson
from django.utils.encoding import force_unicode
from tastypie.bundle import Bundle
from tastypie.exceptions import UnsupportedFormat
from tastypie.utils import format_datetime, format_date, format_time
try:
import lxml
from lxml.etree import parse as parse_xml
from lxml.etree import Element, tostring
except ImportError:
lxml = None
try:
import yaml
from django.core.serializers import pyyaml
except ImportError:
yaml = None
try:
import biplist
except ImportError:
biplist = None
class Serializer(object):
"""
A swappable class for serialization.
This handles most types of data as well as the following output formats::
* json
* jsonp
* xml
* yaml
* html
* plist (see http://explorapp.com/biplist/)
It was designed to make changing behavior easy, either by overridding the
various format methods (i.e. ``to_json``), by changing the
``formats/content_types`` options or by altering the other hook methods.
"""
formats = ['json', 'jsonp', 'xml', 'yaml', 'html', 'plist']
content_types = {
'json': 'application/json',
'jsonp': 'text/javascript',
'xml': 'application/xml',
'yaml': 'text/yaml',
'html': 'text/html',
'plist': 'application/x-plist',
}
def __init__(self, formats=None, content_types=None, datetime_formatting=None):
self.supported_formats = []
self.datetime_formatting = getattr(settings, 'TASTYPIE_DATETIME_FORMATTING', 'iso-8601')
if formats is not None:
self.formats = formats
if content_types is not None:
self.content_types = content_types
if datetime_formatting is not None:
self.datetime_formatting = datetime_formatting
for format in self.formats:
try:
self.supported_formats.append(self.content_types[format])
except KeyError:
raise ImproperlyConfigured("Content type for specified type '%s' not found. Please provide it at either the class level or via the arguments." % format)
def get_mime_for_format(self, format):
"""
Given a format, attempts to determine the correct MIME type.
If not available on the current ``Serializer``, returns
``application/json`` by default.
"""
try:
return self.content_types[format]
except KeyError:
return 'application/json'
def format_datetime(self, data):
"""
A hook to control how datetimes are formatted.
Can be overridden at the ``Serializer`` level (``datetime_formatting``)
or globally (via ``settings.TASTYPIE_DATETIME_FORMATTING``).
Default is ``iso-8601``, which looks like "2010-12-16T03:02:14".
"""
if self.datetime_formatting == 'rfc-2822':
return format_datetime(data)
return data.isoformat()
def format_date(self, data):
"""
A hook to control how dates are formatted.
Can be overridden at the ``Serializer`` level (``datetime_formatting``)
or globally (via ``settings.TASTYPIE_DATETIME_FORMATTING``).
Default is ``iso-8601``, which looks like "2010-12-16".
"""
if self.datetime_formatting == 'rfc-2822':
return format_date(data)
return data.isoformat()
def format_time(self, data):
"""
A hook to control how times are formatted.
Can be overridden at the ``Serializer`` level (``datetime_formatting``)
or globally (via ``settings.TASTYPIE_DATETIME_FORMATTING``).
Default is ``iso-8601``, which looks like "03:02:14".
"""
if self.datetime_formatting == 'rfc-2822':
return format_time(data)
return data.isoformat()
def serialize(self, bundle, format='application/json', options={}):
"""
Given some data and a format, calls the correct method to serialize
the data and returns the result.
"""
desired_format = None
for short_format, long_format in self.content_types.items():
if format == long_format:
if hasattr(self, "to_%s" % short_format):
desired_format = short_format
break
if desired_format is None:
raise UnsupportedFormat("The format indicated '%s' had no available serialization method. Please check your ``formats`` and ``content_types`` on your Serializer." % format)
serialized = getattr(self, "to_%s" % desired_format)(bundle, options)
return serialized
def deserialize(self, content, format='application/json'):
"""
Given some data and a format, calls the correct method to deserialize
the data and returns the result.
"""
desired_format = None
format = format.split(';')[0]
for short_format, long_format in self.content_types.items():
if format == long_format:
if hasattr(self, "from_%s" % short_format):
desired_format = short_format
break
if desired_format is None:
raise UnsupportedFormat("The format indicated '%s' had no available deserialization method. Please check your ``formats`` and ``content_types`` on your Serializer." % format)
deserialized = getattr(self, "from_%s" % desired_format)(content)
return deserialized
def to_simple(self, data, options):
"""
For a piece of data, attempts to recognize it and provide a simplified
form of something complex.
This brings complex Python data structures down to native types of the
serialization format(s).
"""
if isinstance(data, (list, tuple)):
return [self.to_simple(item, options) for item in data]
if isinstance(data, dict):
return dict((key, self.to_simple(val, options)) for (key, val) in data.iteritems())
elif isinstance(data, Bundle):
return dict((key, self.to_simple(val, options)) for (key, val) in data.data.iteritems())
elif hasattr(data, 'dehydrated_type'):
if getattr(data, 'dehydrated_type', None) == 'related' and data.is_m2m == False:
if data.full:
return self.to_simple(data.fk_resource, options)
else:
return self.to_simple(data.value, options)
elif getattr(data, 'dehydrated_type', None) == 'related' and data.is_m2m == True:
if data.full:
return [self.to_simple(bundle, options) for bundle in data.m2m_bundles]
else:
return [self.to_simple(val, options) for val in data.value]
else:
return self.to_simple(data.value, options)
elif isinstance(data, datetime.datetime):
return self.format_datetime(data)
elif isinstance(data, datetime.date):
return self.format_date(data)
elif isinstance(data, datetime.time):
return self.format_time(data)
elif isinstance(data, bool):
return data
elif type(data) in (long, int, float):
return data
elif data is None:
return None
else:
return force_unicode(data)
def to_etree(self, data, options=None, name=None, depth=0):
"""
Given some data, converts that data to an ``etree.Element`` suitable
for use in the XML output.
"""
if isinstance(data, (list, tuple)):
element = Element(name or 'objects')
if name:
element = Element(name)
element.set('type', 'list')
else:
element = Element('objects')
for item in data:
element.append(self.to_etree(item, options, depth=depth+1))
elif isinstance(data, dict):
if depth == 0:
element = Element(name or 'response')
else:
element = Element(name or 'object')
element.set('type', 'hash')
for (key, value) in data.iteritems():
element.append(self.to_etree(value, options, name=key, depth=depth+1))
elif isinstance(data, Bundle):
element = Element(name or 'object')
for field_name, field_object in data.data.items():
element.append(self.to_etree(field_object, options, name=field_name, depth=depth+1))
elif hasattr(data, 'dehydrated_type'):
if getattr(data, 'dehydrated_type', None) == 'related' and data.is_m2m == False:
if data.full:
return self.to_etree(data.fk_resource, options, name, depth+1)
else:
return self.to_etree(data.value, options, name, depth+1)
elif getattr(data, 'dehydrated_type', None) == 'related' and data.is_m2m == True:
if data.full:
element = Element(name or 'objects')
for bundle in data.m2m_bundles:
element.append(self.to_etree(bundle, options, bundle.resource_name, depth+1))
else:
element = Element(name or 'objects')
for value in data.value:
element.append(self.to_etree(value, options, name, depth=depth+1))
else:
return self.to_etree(data.value, options, name)
else:
element = Element(name or 'value')
simple_data = self.to_simple(data, options)
data_type = get_type_string(simple_data)
if data_type != 'string':
element.set('type', get_type_string(simple_data))
if data_type != 'null':
element.text = force_unicode(simple_data)
return element
def from_etree(self, data):
"""
Not the smartest deserializer on the planet. At the request level,
it first tries to output the deserialized subelement called "object"
or "objects" and falls back to deserializing based on hinted types in
the XML element attribute "type".
"""
if data.tag == 'request':
# if "object" or "objects" exists, return deserialized forms.
elements = data.getchildren()
for element in elements:
if element.tag in ('object', 'objects'):
return self.from_etree(element)
return dict((element.tag, self.from_etree(element)) for element in elements)
elif data.tag == 'object' or data.get('type') == 'hash':
return dict((element.tag, self.from_etree(element)) for element in data.getchildren())
elif data.tag == 'objects' or data.get('type') == 'list':
return [self.from_etree(element) for element in data.getchildren()]
else:
type_string = data.get('type')
if type_string in ('string', None):
return data.text
elif type_string == 'integer':
return int(data.text)
elif type_string == 'float':
return float(data.text)
elif type_string == 'boolean':
if data.text == 'True':
return True
else:
return False
else:
return None
def to_json(self, data, options=None):
"""
Given some Python data, produces JSON output.
"""
options = options or {}
data = self.to_simple(data, options)
return simplejson.dumps(data, cls=json.DjangoJSONEncoder, sort_keys=True)
def from_json(self, content):
"""
Given some JSON data, returns a Python dictionary of the decoded data.
"""
return simplejson.loads(content)
def to_jsonp(self, data, options=None):
"""
Given some Python data, produces JSON output wrapped in the provided
callback.
"""
options = options or {}
return '%s(%s)' % (options['callback'], self.to_json(data, options))
def to_xml(self, data, options=None):
"""
Given some Python data, produces XML output.
"""
options = options or {}
if lxml is None:
raise ImproperlyConfigured("Usage of the XML aspects requires lxml.")
return tostring(self.to_etree(data, options), xml_declaration=True, encoding='utf-8')
def from_xml(self, content):
"""
Given some XML data, returns a Python dictionary of the decoded data.
"""
if lxml is None:
raise ImproperlyConfigured("Usage of the XML aspects requires lxml.")
return self.from_etree(parse_xml(StringIO(content)).getroot())
def to_yaml(self, data, options=None):
"""
Given some Python data, produces YAML output.
"""
options = options or {}
if yaml is None:
raise ImproperlyConfigured("Usage of the YAML aspects requires yaml.")
return yaml.dump(self.to_simple(data, options))
def from_yaml(self, content):
"""
Given some YAML data, returns a Python dictionary of the decoded data.
"""
if yaml is None:
raise ImproperlyConfigured("Usage of the YAML aspects requires yaml.")
return yaml.load(content)
def to_plist(self, data, options=None):
"""
Given some Python data, produces binary plist output.
"""
options = options or {}
if biplist is None:
raise ImproperlyConfigured("Usage of the plist aspects requires biplist.")
return biplist.writePlistToString(self.to_simple(data, options))
def from_plist(self, content):
"""
Given some binary plist data, returns a Python dictionary of the decoded data.
"""
if biplist is None:
raise ImproperlyConfigured("Usage of the plist aspects requires biplist.")
return biplist.readPlistFromString(content)
def to_html(self, data, options=None):
"""
Reserved for future usage.
The desire is to provide HTML output of a resource, making an API
available to a browser. This is on the TODO list but not currently
implemented.
"""
options = options or {}
return 'Sorry, not implemented yet. Please append "?format=json" to your URL.'
def from_html(self, content):
"""
Reserved for future usage.
The desire is to handle form-based (maybe Javascript?) input, making an
API available to a browser. This is on the TODO list but not currently
implemented.
"""
pass
def get_type_string(data):
"""
Translates a Python data type into a string format.
"""
data_type = type(data)
if data_type in (int, long):
return 'integer'
elif data_type == float:
return 'float'
elif data_type == bool:
return 'boolean'
elif data_type in (list, tuple):
return 'list'
elif data_type == dict:
return 'hash'
elif data is None:
return 'null'
elif isinstance(data, basestring):
return 'string'
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3523_0 |
crossvul-python_data_good_3268_2 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import tempfile
from string import ascii_letters, digits
from ansible.errors import AnsibleOptionsError
from ansible.module_utils.six import string_types
from ansible.module_utils.six.moves import configparser
from ansible.module_utils._text import to_text
from ansible.parsing.quoting import unquote
from ansible.utils.path import makedirs_safe
BOOL_TRUE = frozenset([ "true", "t", "y", "1", "yes", "on" ])
def mk_boolean(value):
ret = value
if not isinstance(value, bool):
if value is None:
ret = False
ret = (str(value).lower() in BOOL_TRUE)
return ret
def shell_expand(path, expand_relative_paths=False):
'''
shell_expand is needed as os.path.expanduser does not work
when path is None, which is the default for ANSIBLE_PRIVATE_KEY_FILE
'''
if path:
path = os.path.expanduser(os.path.expandvars(path))
if expand_relative_paths and not path.startswith('/'):
# paths are always 'relative' to the config?
if 'CONFIG_FILE' in globals():
CFGDIR = os.path.dirname(CONFIG_FILE)
path = os.path.join(CFGDIR, path)
path = os.path.abspath(path)
return path
def get_config(p, section, key, env_var, default, value_type=None, expand_relative_paths=False):
''' return a configuration variable with casting
:arg p: A ConfigParser object to look for the configuration in
:arg section: A section of the ini config that should be examined for this section.
:arg key: The config key to get this config from
:arg env_var: An Environment variable to check for the config var. If
this is set to None then no environment variable will be used.
:arg default: A default value to assign to the config var if nothing else sets it.
:kwarg value_type: The type of the value. This can be any of the following strings:
:boolean: sets the value to a True or False value
:integer: Sets the value to an integer or raises a ValueType error
:float: Sets the value to a float or raises a ValueType error
:list: Treats the value as a comma separated list. Split the value
and return it as a python list.
:none: Sets the value to None
:path: Expands any environment variables and tilde's in the value.
:tmp_path: Create a unique temporary directory inside of the directory
specified by value and return its path.
:pathlist: Treat the value as a typical PATH string. (On POSIX, this
means colon separated strings.) Split the value and then expand
each part for environment variables and tildes.
:kwarg expand_relative_paths: for pathlist and path types, if this is set
to True then also change any relative paths into absolute paths. The
default is False.
'''
value = _get_config(p, section, key, env_var, default)
if value_type == 'boolean':
value = mk_boolean(value)
elif value:
if value_type == 'integer':
value = int(value)
elif value_type == 'float':
value = float(value)
elif value_type == 'list':
if isinstance(value, string_types):
value = [x.strip() for x in value.split(',')]
elif value_type == 'none':
if value == "None":
value = None
elif value_type == 'path':
value = shell_expand(value, expand_relative_paths=expand_relative_paths)
elif value_type == 'tmppath':
value = shell_expand(value)
if not os.path.exists(value):
makedirs_safe(value, 0o700)
prefix = 'ansible-local-%s' % os.getpid()
value = tempfile.mkdtemp(prefix=prefix, dir=value)
elif value_type == 'pathlist':
if isinstance(value, string_types):
value = [shell_expand(x, expand_relative_paths=expand_relative_paths) \
for x in value.split(os.pathsep)]
elif isinstance(value, string_types):
value = unquote(value)
return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
def _get_config(p, section, key, env_var, default):
''' helper function for get_config '''
value = default
if p is not None:
try:
value = p.get(section, key, raw=True)
except:
pass
if env_var is not None:
env_value = os.environ.get(env_var, None)
if env_value is not None:
value = env_value
return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
def load_config_file():
''' Load Config File order(first found is used): ENV, CWD, HOME, /etc/ansible '''
p = configparser.ConfigParser()
path0 = os.getenv("ANSIBLE_CONFIG", None)
if path0 is not None:
path0 = os.path.expanduser(path0)
if os.path.isdir(path0):
path0 += "/ansible.cfg"
try:
path1 = os.getcwd() + "/ansible.cfg"
except OSError:
path1 = None
path2 = os.path.expanduser("~/.ansible.cfg")
path3 = "/etc/ansible/ansible.cfg"
for path in [path0, path1, path2, path3]:
if path is not None and os.path.exists(path):
try:
p.read(path)
except configparser.Error as e:
raise AnsibleOptionsError("Error reading config file: \n{0}".format(e))
return p, path
return None, ''
p, CONFIG_FILE = load_config_file()
# check all of these extensions when looking for yaml files for things like
# group variables -- really anything we can load
YAML_FILENAME_EXTENSIONS = [ "", ".yml", ".yaml", ".json" ]
# the default whitelist for cow stencils
DEFAULT_COW_WHITELIST = ['bud-frogs', 'bunny', 'cheese', 'daemon', 'default', 'dragon', 'elephant-in-snake', 'elephant',
'eyes', 'hellokitty', 'kitty', 'luke-koala', 'meow', 'milk', 'moofasa', 'moose', 'ren', 'sheep',
'small', 'stegosaurus', 'stimpy', 'supermilker', 'three-eyes', 'turkey', 'turtle', 'tux', 'udder',
'vader-koala', 'vader', 'www',]
# sections in config file
DEFAULTS='defaults'
# FIXME: add deprecation warning when these get set
#### DEPRECATED VARS ####
#
#### If --tags or --skip-tags is given multiple times on the CLI and this is
# True, merge the lists of tags together. If False, let the last argument
# overwrite any previous ones. Behaviour is overwrite through 2.2. 2.3
# overwrites but prints deprecation. 2.4 the default is to merge.
MERGE_MULTIPLE_CLI_TAGS = get_config(p, DEFAULTS, 'merge_multiple_cli_tags', 'ANSIBLE_MERGE_MULTIPLE_CLI_TAGS', True, value_type='boolean')
#### GENERALLY CONFIGURABLE THINGS ####
DEFAULT_DEBUG = get_config(p, DEFAULTS, 'debug', 'ANSIBLE_DEBUG', False, value_type='boolean')
DEFAULT_VERBOSITY = get_config(p, DEFAULTS, 'verbosity', 'ANSIBLE_VERBOSITY', 0, value_type='integer')
DEFAULT_HOST_LIST = get_config(p, DEFAULTS,'inventory', 'ANSIBLE_INVENTORY', '/etc/ansible/hosts', value_type='path')
DEFAULT_ROLES_PATH = get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH',
'~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles',
value_type='pathlist', expand_relative_paths=True)
DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '~/.ansible/tmp')
DEFAULT_LOCAL_TMP = get_config(p, DEFAULTS, 'local_tmp', 'ANSIBLE_LOCAL_TEMP', '~/.ansible/tmp', value_type='tmppath')
DEFAULT_MODULE_NAME = get_config(p, DEFAULTS, 'module_name', None, 'command')
DEFAULT_FACT_PATH = get_config(p, DEFAULTS, 'fact_path', 'ANSIBLE_FACT_PATH', None, value_type='path')
DEFAULT_FORKS = get_config(p, DEFAULTS, 'forks', 'ANSIBLE_FORKS', 5, value_type='integer')
DEFAULT_MODULE_ARGS = get_config(p, DEFAULTS, 'module_args', 'ANSIBLE_MODULE_ARGS', '')
DEFAULT_MODULE_LANG = get_config(p, DEFAULTS, 'module_lang', 'ANSIBLE_MODULE_LANG', os.getenv('LANG', 'en_US.UTF-8'))
DEFAULT_MODULE_SET_LOCALE = get_config(p, DEFAULTS, 'module_set_locale','ANSIBLE_MODULE_SET_LOCALE',False, value_type='boolean')
DEFAULT_MODULE_COMPRESSION= get_config(p, DEFAULTS, 'module_compression', None, 'ZIP_DEFLATED')
DEFAULT_TIMEOUT = get_config(p, DEFAULTS, 'timeout', 'ANSIBLE_TIMEOUT', 10, value_type='integer')
DEFAULT_POLL_INTERVAL = get_config(p, DEFAULTS, 'poll_interval', 'ANSIBLE_POLL_INTERVAL', 15, value_type='integer')
DEFAULT_REMOTE_USER = get_config(p, DEFAULTS, 'remote_user', 'ANSIBLE_REMOTE_USER', None)
DEFAULT_ASK_PASS = get_config(p, DEFAULTS, 'ask_pass', 'ANSIBLE_ASK_PASS', False, value_type='boolean')
DEFAULT_PRIVATE_KEY_FILE = get_config(p, DEFAULTS, 'private_key_file', 'ANSIBLE_PRIVATE_KEY_FILE', None, value_type='path')
DEFAULT_REMOTE_PORT = get_config(p, DEFAULTS, 'remote_port', 'ANSIBLE_REMOTE_PORT', None, value_type='integer')
DEFAULT_ASK_VAULT_PASS = get_config(p, DEFAULTS, 'ask_vault_pass', 'ANSIBLE_ASK_VAULT_PASS', False, value_type='boolean')
DEFAULT_VAULT_PASSWORD_FILE = get_config(p, DEFAULTS, 'vault_password_file', 'ANSIBLE_VAULT_PASSWORD_FILE', None, value_type='path')
DEFAULT_TRANSPORT = get_config(p, DEFAULTS, 'transport', 'ANSIBLE_TRANSPORT', 'smart')
DEFAULT_SCP_IF_SSH = get_config(p, 'ssh_connection', 'scp_if_ssh', 'ANSIBLE_SCP_IF_SSH', 'smart')
DEFAULT_SFTP_BATCH_MODE = get_config(p, 'ssh_connection', 'sftp_batch_mode', 'ANSIBLE_SFTP_BATCH_MODE', True, value_type='boolean')
DEFAULT_SSH_TRANSFER_METHOD = get_config(p, 'ssh_connection', 'transfer_method', 'ANSIBLE_SSH_TRANSFER_METHOD', None)
DEFAULT_MANAGED_STR = get_config(p, DEFAULTS, 'ansible_managed', None, 'Ansible managed')
DEFAULT_SYSLOG_FACILITY = get_config(p, DEFAULTS, 'syslog_facility', 'ANSIBLE_SYSLOG_FACILITY', 'LOG_USER')
DEFAULT_KEEP_REMOTE_FILES = get_config(p, DEFAULTS, 'keep_remote_files', 'ANSIBLE_KEEP_REMOTE_FILES', False, value_type='boolean')
DEFAULT_HASH_BEHAVIOUR = get_config(p, DEFAULTS, 'hash_behaviour', 'ANSIBLE_HASH_BEHAVIOUR', 'replace')
DEFAULT_PRIVATE_ROLE_VARS = get_config(p, DEFAULTS, 'private_role_vars', 'ANSIBLE_PRIVATE_ROLE_VARS', False, value_type='boolean')
DEFAULT_JINJA2_EXTENSIONS = get_config(p, DEFAULTS, 'jinja2_extensions', 'ANSIBLE_JINJA2_EXTENSIONS', None)
DEFAULT_EXECUTABLE = get_config(p, DEFAULTS, 'executable', 'ANSIBLE_EXECUTABLE', '/bin/sh')
DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower()
DEFAULT_GATHER_SUBSET = get_config(p, DEFAULTS, 'gather_subset', 'ANSIBLE_GATHER_SUBSET', 'all').lower()
DEFAULT_GATHER_TIMEOUT = get_config(p, DEFAULTS, 'gather_timeout', 'ANSIBLE_GATHER_TIMEOUT', 10, value_type='integer')
DEFAULT_LOG_PATH = get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', '', value_type='path')
DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, value_type='boolean')
DEFAULT_INVENTORY_IGNORE = get_config(p, DEFAULTS, 'inventory_ignore_extensions', 'ANSIBLE_INVENTORY_IGNORE',
["~", ".orig", ".bak", ".ini", ".cfg", ".retry", ".pyc", ".pyo"], value_type='list')
DEFAULT_VAR_COMPRESSION_LEVEL = get_config(p, DEFAULTS, 'var_compression_level', 'ANSIBLE_VAR_COMPRESSION_LEVEL', 0, value_type='integer')
DEFAULT_INTERNAL_POLL_INTERVAL = get_config(p, DEFAULTS, 'internal_poll_interval', None, 0.001, value_type='float')
DEFAULT_ALLOW_UNSAFE_LOOKUPS = get_config(p, DEFAULTS, 'allow_unsafe_lookups', None, False, value_type='boolean')
ERROR_ON_MISSING_HANDLER = get_config(p, DEFAULTS, 'error_on_missing_handler', 'ANSIBLE_ERROR_ON_MISSING_HANDLER', True, value_type='boolean')
SHOW_CUSTOM_STATS = get_config(p, DEFAULTS, 'show_custom_stats', 'ANSIBLE_SHOW_CUSTOM_STATS', False, value_type='boolean')
NAMESPACE_FACTS = get_config(p, DEFAULTS, 'restrict_facts_namespace', 'ANSIBLE_RESTRICT_FACTS', False, value_type='boolean')
# static includes
DEFAULT_TASK_INCLUDES_STATIC = get_config(p, DEFAULTS, 'task_includes_static', 'ANSIBLE_TASK_INCLUDES_STATIC', False, value_type='boolean')
DEFAULT_HANDLER_INCLUDES_STATIC = get_config(p, DEFAULTS, 'handler_includes_static', 'ANSIBLE_HANDLER_INCLUDES_STATIC', False, value_type='boolean')
# disclosure
DEFAULT_NO_LOG = get_config(p, DEFAULTS, 'no_log', 'ANSIBLE_NO_LOG', False, value_type='boolean')
DEFAULT_NO_TARGET_SYSLOG = get_config(p, DEFAULTS, 'no_target_syslog', 'ANSIBLE_NO_TARGET_SYSLOG', False, value_type='boolean')
ALLOW_WORLD_READABLE_TMPFILES = get_config(p, DEFAULTS, 'allow_world_readable_tmpfiles', None, False, value_type='boolean')
# selinux
DEFAULT_SELINUX_SPECIAL_FS = get_config(p, 'selinux', 'special_context_filesystems', None, 'fuse, nfs, vboxsf, ramfs, 9p', value_type='list')
DEFAULT_LIBVIRT_LXC_NOSECLABEL = get_config(p, 'selinux', 'libvirt_lxc_noseclabel', 'LIBVIRT_LXC_NOSECLABEL', False, value_type='boolean')
### PRIVILEGE ESCALATION ###
# Backwards Compat
DEFAULT_SU = get_config(p, DEFAULTS, 'su', 'ANSIBLE_SU', False, value_type='boolean')
DEFAULT_SU_USER = get_config(p, DEFAULTS, 'su_user', 'ANSIBLE_SU_USER', 'root')
DEFAULT_SU_EXE = get_config(p, DEFAULTS, 'su_exe', 'ANSIBLE_SU_EXE', None)
DEFAULT_SU_FLAGS = get_config(p, DEFAULTS, 'su_flags', 'ANSIBLE_SU_FLAGS', None)
DEFAULT_ASK_SU_PASS = get_config(p, DEFAULTS, 'ask_su_pass', 'ANSIBLE_ASK_SU_PASS', False, value_type='boolean')
DEFAULT_SUDO = get_config(p, DEFAULTS, 'sudo', 'ANSIBLE_SUDO', False, value_type='boolean')
DEFAULT_SUDO_USER = get_config(p, DEFAULTS, 'sudo_user', 'ANSIBLE_SUDO_USER', 'root')
DEFAULT_SUDO_EXE = get_config(p, DEFAULTS, 'sudo_exe', 'ANSIBLE_SUDO_EXE', None)
DEFAULT_SUDO_FLAGS = get_config(p, DEFAULTS, 'sudo_flags', 'ANSIBLE_SUDO_FLAGS', '-H -S -n')
DEFAULT_ASK_SUDO_PASS = get_config(p, DEFAULTS, 'ask_sudo_pass', 'ANSIBLE_ASK_SUDO_PASS', False, value_type='boolean')
# Become
BECOME_ERROR_STRINGS = {
'sudo': 'Sorry, try again.',
'su': 'Authentication failure',
'pbrun': '',
'pfexec': '',
'doas': 'Permission denied',
'dzdo': '',
'ksu': 'Password incorrect'
} # FIXME: deal with i18n
BECOME_MISSING_STRINGS = {
'sudo': 'sorry, a password is required to run sudo',
'su': '',
'pbrun': '',
'pfexec': '',
'doas': 'Authorization required',
'dzdo': '',
'ksu': 'No password given'
} # FIXME: deal with i18n
BECOME_METHODS = ['sudo','su','pbrun','pfexec','doas','dzdo','ksu','runas']
BECOME_ALLOW_SAME_USER = get_config(p, 'privilege_escalation', 'become_allow_same_user', 'ANSIBLE_BECOME_ALLOW_SAME_USER', False, value_type='boolean')
DEFAULT_BECOME_METHOD = get_config(p, 'privilege_escalation', 'become_method', 'ANSIBLE_BECOME_METHOD',
'sudo' if DEFAULT_SUDO else 'su' if DEFAULT_SU else 'sudo').lower()
DEFAULT_BECOME = get_config(p, 'privilege_escalation', 'become', 'ANSIBLE_BECOME',False, value_type='boolean')
DEFAULT_BECOME_USER = get_config(p, 'privilege_escalation', 'become_user', 'ANSIBLE_BECOME_USER', 'root')
DEFAULT_BECOME_EXE = get_config(p, 'privilege_escalation', 'become_exe', 'ANSIBLE_BECOME_EXE', None)
DEFAULT_BECOME_FLAGS = get_config(p, 'privilege_escalation', 'become_flags', 'ANSIBLE_BECOME_FLAGS', None)
DEFAULT_BECOME_ASK_PASS = get_config(p, 'privilege_escalation', 'become_ask_pass', 'ANSIBLE_BECOME_ASK_PASS', False, value_type='boolean')
# PLUGINS
# Modules that can optimize with_items loops into a single call. Currently
# these modules must (1) take a "name" or "pkg" parameter that is a list. If
# the module takes both, bad things could happen.
# In the future we should probably generalize this even further
# (mapping of param: squash field)
DEFAULT_SQUASH_ACTIONS = get_config(p, DEFAULTS, 'squash_actions', 'ANSIBLE_SQUASH_ACTIONS',
"apk, apt, dnf, homebrew, openbsd_pkg, pacman, pkgng, yum, zypper", value_type='list')
# paths
DEFAULT_ACTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'action_plugins', 'ANSIBLE_ACTION_PLUGINS',
'~/.ansible/plugins/action:/usr/share/ansible/plugins/action', value_type='pathlist')
DEFAULT_CACHE_PLUGIN_PATH = get_config(p, DEFAULTS, 'cache_plugins', 'ANSIBLE_CACHE_PLUGINS',
'~/.ansible/plugins/cache:/usr/share/ansible/plugins/cache', value_type='pathlist')
DEFAULT_CALLBACK_PLUGIN_PATH = get_config(p, DEFAULTS, 'callback_plugins', 'ANSIBLE_CALLBACK_PLUGINS',
'~/.ansible/plugins/callback:/usr/share/ansible/plugins/callback', value_type='pathlist')
DEFAULT_CONNECTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'connection_plugins', 'ANSIBLE_CONNECTION_PLUGINS',
'~/.ansible/plugins/connection:/usr/share/ansible/plugins/connection', value_type='pathlist')
DEFAULT_LOOKUP_PLUGIN_PATH = get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS',
'~/.ansible/plugins/lookup:/usr/share/ansible/plugins/lookup', value_type='pathlist')
DEFAULT_MODULE_PATH = get_config(p, DEFAULTS, 'library', 'ANSIBLE_LIBRARY',
'~/.ansible/plugins/modules:/usr/share/ansible/plugins/modules', value_type='pathlist')
DEFAULT_MODULE_UTILS_PATH = get_config(p, DEFAULTS, 'module_utils', 'ANSIBLE_MODULE_UTILS',
'~/.ansible/plugins/module_utils:/usr/share/ansible/plugins/module_utils', value_type='pathlist')
DEFAULT_INVENTORY_PLUGIN_PATH = get_config(p, DEFAULTS, 'inventory_plugins', 'ANSIBLE_INVENTORY_PLUGINS',
'~/.ansible/plugins/inventory:/usr/share/ansible/plugins/inventory', value_type='pathlist')
DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', 'ANSIBLE_VARS_PLUGINS',
'~/.ansible/plugins/vars:/usr/share/ansible/plugins/vars', value_type='pathlist')
DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS',
'~/.ansible/plugins/filter:/usr/share/ansible/plugins/filter', value_type='pathlist')
DEFAULT_TEST_PLUGIN_PATH = get_config(p, DEFAULTS, 'test_plugins', 'ANSIBLE_TEST_PLUGINS',
'~/.ansible/plugins/test:/usr/share/ansible/plugins/test', value_type='pathlist')
DEFAULT_STRATEGY_PLUGIN_PATH = get_config(p, DEFAULTS, 'strategy_plugins', 'ANSIBLE_STRATEGY_PLUGINS',
'~/.ansible/plugins/strategy:/usr/share/ansible/plugins/strategy', value_type='pathlist')
NETWORK_GROUP_MODULES = get_config(p, DEFAULTS, 'network_group_modules','NETWORK_GROUP_MODULES', ['eos', 'nxos', 'ios', 'iosxr', 'junos',
'vyos', 'sros', 'dellos9', 'dellos10', 'dellos6'],
value_type='list')
DEFAULT_STRATEGY = get_config(p, DEFAULTS, 'strategy', 'ANSIBLE_STRATEGY', 'linear')
DEFAULT_STDOUT_CALLBACK = get_config(p, DEFAULTS, 'stdout_callback', 'ANSIBLE_STDOUT_CALLBACK', 'default')
# cache
CACHE_PLUGIN = get_config(p, DEFAULTS, 'fact_caching', 'ANSIBLE_CACHE_PLUGIN', 'memory')
CACHE_PLUGIN_CONNECTION = get_config(p, DEFAULTS, 'fact_caching_connection', 'ANSIBLE_CACHE_PLUGIN_CONNECTION', None)
CACHE_PLUGIN_PREFIX = get_config(p, DEFAULTS, 'fact_caching_prefix', 'ANSIBLE_CACHE_PLUGIN_PREFIX', 'ansible_facts')
CACHE_PLUGIN_TIMEOUT = get_config(p, DEFAULTS, 'fact_caching_timeout', 'ANSIBLE_CACHE_PLUGIN_TIMEOUT', 24 * 60 * 60, value_type='integer')
# Display
ANSIBLE_FORCE_COLOR = get_config(p, DEFAULTS, 'force_color', 'ANSIBLE_FORCE_COLOR', None, value_type='boolean')
ANSIBLE_NOCOLOR = get_config(p, DEFAULTS, 'nocolor', 'ANSIBLE_NOCOLOR', None, value_type='boolean')
ANSIBLE_NOCOWS = get_config(p, DEFAULTS, 'nocows', 'ANSIBLE_NOCOWS', None, value_type='boolean')
ANSIBLE_COW_SELECTION = get_config(p, DEFAULTS, 'cow_selection', 'ANSIBLE_COW_SELECTION', 'default')
ANSIBLE_COW_WHITELIST = get_config(p, DEFAULTS, 'cow_whitelist', 'ANSIBLE_COW_WHITELIST', DEFAULT_COW_WHITELIST, value_type='list')
DISPLAY_SKIPPED_HOSTS = get_config(p, DEFAULTS, 'display_skipped_hosts', 'DISPLAY_SKIPPED_HOSTS', True, value_type='boolean')
DEFAULT_UNDEFINED_VAR_BEHAVIOR = get_config(p, DEFAULTS, 'error_on_undefined_vars', 'ANSIBLE_ERROR_ON_UNDEFINED_VARS', True, value_type='boolean')
HOST_KEY_CHECKING = get_config(p, DEFAULTS, 'host_key_checking', 'ANSIBLE_HOST_KEY_CHECKING', True, value_type='boolean')
SYSTEM_WARNINGS = get_config(p, DEFAULTS, 'system_warnings', 'ANSIBLE_SYSTEM_WARNINGS', True, value_type='boolean')
DEPRECATION_WARNINGS = get_config(p, DEFAULTS, 'deprecation_warnings', 'ANSIBLE_DEPRECATION_WARNINGS', True, value_type='boolean')
DEFAULT_CALLABLE_WHITELIST = get_config(p, DEFAULTS, 'callable_whitelist', 'ANSIBLE_CALLABLE_WHITELIST', [], value_type='list')
COMMAND_WARNINGS = get_config(p, DEFAULTS, 'command_warnings', 'ANSIBLE_COMMAND_WARNINGS', True, value_type='boolean')
DEFAULT_LOAD_CALLBACK_PLUGINS = get_config(p, DEFAULTS, 'bin_ansible_callbacks', 'ANSIBLE_LOAD_CALLBACK_PLUGINS', False, value_type='boolean')
DEFAULT_CALLBACK_WHITELIST = get_config(p, DEFAULTS, 'callback_whitelist', 'ANSIBLE_CALLBACK_WHITELIST', [], value_type='list')
RETRY_FILES_ENABLED = get_config(p, DEFAULTS, 'retry_files_enabled', 'ANSIBLE_RETRY_FILES_ENABLED', True, value_type='boolean')
RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path', 'ANSIBLE_RETRY_FILES_SAVE_PATH', None, value_type='path')
DEFAULT_NULL_REPRESENTATION = get_config(p, DEFAULTS, 'null_representation', 'ANSIBLE_NULL_REPRESENTATION', None, value_type='none')
DISPLAY_ARGS_TO_STDOUT = get_config(p, DEFAULTS, 'display_args_to_stdout', 'ANSIBLE_DISPLAY_ARGS_TO_STDOUT', False, value_type='boolean')
MAX_FILE_SIZE_FOR_DIFF = get_config(p, DEFAULTS, 'max_diff_size', 'ANSIBLE_MAX_DIFF_SIZE', 1024*1024, value_type='integer')
# CONNECTION RELATED
USE_PERSISTENT_CONNECTIONS = get_config(p, DEFAULTS, 'use_persistent_connections', 'ANSIBLE_USE_PERSISTENT_CONNECTIONS', False, value_type='boolean')
ANSIBLE_SSH_ARGS = get_config(p, 'ssh_connection', 'ssh_args', 'ANSIBLE_SSH_ARGS', '-C -o ControlMaster=auto -o ControlPersist=60s')
### WARNING: Someone might be tempted to switch this from percent-formatting
# to .format() in the future. be sure to read this:
# http://lucumr.pocoo.org/2016/12/29/careful-with-str-format/ and understand
# that it may be a security risk to do so.
ANSIBLE_SSH_CONTROL_PATH = get_config(p, 'ssh_connection', 'control_path', 'ANSIBLE_SSH_CONTROL_PATH', None)
ANSIBLE_SSH_CONTROL_PATH_DIR = get_config(p, 'ssh_connection', 'control_path_dir', 'ANSIBLE_SSH_CONTROL_PATH_DIR', u'~/.ansible/cp')
ANSIBLE_SSH_PIPELINING = get_config(p, 'ssh_connection', 'pipelining', 'ANSIBLE_SSH_PIPELINING', False, value_type='boolean')
ANSIBLE_SSH_RETRIES = get_config(p, 'ssh_connection', 'retries', 'ANSIBLE_SSH_RETRIES', 0, value_type='integer')
ANSIBLE_SSH_EXECUTABLE = get_config(p, 'ssh_connection', 'ssh_executable', 'ANSIBLE_SSH_EXECUTABLE', 'ssh')
PARAMIKO_RECORD_HOST_KEYS = get_config(p, 'paramiko_connection', 'record_host_keys', 'ANSIBLE_PARAMIKO_RECORD_HOST_KEYS', True, value_type='boolean')
PARAMIKO_HOST_KEY_AUTO_ADD = get_config(p, 'paramiko_connection', 'host_key_auto_add', 'ANSIBLE_PARAMIKO_HOST_KEY_AUTO_ADD', False, value_type='boolean')
PARAMIKO_PROXY_COMMAND = get_config(p, 'paramiko_connection', 'proxy_command', 'ANSIBLE_PARAMIKO_PROXY_COMMAND', None)
PARAMIKO_LOOK_FOR_KEYS = get_config(p, 'paramiko_connection', 'look_for_keys', 'ANSIBLE_PARAMIKO_LOOK_FOR_KEYS', True, value_type='boolean')
PERSISTENT_CONNECT_TIMEOUT = get_config(p, 'persistent_connection', 'connect_timeout', 'ANSIBLE_PERSISTENT_CONNECT_TIMEOUT', 30, value_type='integer')
PERSISTENT_CONNECT_RETRIES = get_config(p, 'persistent_connection', 'connect_retries', 'ANSIBLE_PERSISTENT_CONNECT_RETRIES', 30, value_type='integer')
PERSISTENT_CONNECT_INTERVAL = get_config(p, 'persistent_connection', 'connect_interval', 'ANSIBLE_PERSISTENT_CONNECT_INTERVAL', 1, value_type='integer')
# obsolete -- will be formally removed
ACCELERATE_PORT = get_config(p, 'accelerate', 'accelerate_port', 'ACCELERATE_PORT', 5099, value_type='integer')
ACCELERATE_TIMEOUT = get_config(p, 'accelerate', 'accelerate_timeout', 'ACCELERATE_TIMEOUT', 30, value_type='integer')
ACCELERATE_CONNECT_TIMEOUT = get_config(p, 'accelerate', 'accelerate_connect_timeout', 'ACCELERATE_CONNECT_TIMEOUT', 1.0, value_type='float')
ACCELERATE_DAEMON_TIMEOUT = get_config(p, 'accelerate', 'accelerate_daemon_timeout', 'ACCELERATE_DAEMON_TIMEOUT', 30, value_type='integer')
ACCELERATE_KEYS_DIR = get_config(p, 'accelerate', 'accelerate_keys_dir', 'ACCELERATE_KEYS_DIR', '~/.fireball.keys')
ACCELERATE_KEYS_DIR_PERMS = get_config(p, 'accelerate', 'accelerate_keys_dir_perms', 'ACCELERATE_KEYS_DIR_PERMS', '700')
ACCELERATE_KEYS_FILE_PERMS = get_config(p, 'accelerate', 'accelerate_keys_file_perms', 'ACCELERATE_KEYS_FILE_PERMS', '600')
ACCELERATE_MULTI_KEY = get_config(p, 'accelerate', 'accelerate_multi_key', 'ACCELERATE_MULTI_KEY', False, value_type='boolean')
PARAMIKO_PTY = get_config(p, 'paramiko_connection', 'pty', 'ANSIBLE_PARAMIKO_PTY', True, value_type='boolean')
# galaxy related
GALAXY_SERVER = get_config(p, 'galaxy', 'server', 'ANSIBLE_GALAXY_SERVER', 'https://galaxy.ansible.com')
GALAXY_IGNORE_CERTS = get_config(p, 'galaxy', 'ignore_certs', 'ANSIBLE_GALAXY_IGNORE', False, value_type='boolean')
# this can be configured to blacklist SCMS but cannot add new ones unless the code is also updated
GALAXY_SCMS = get_config(p, 'galaxy', 'scms', 'ANSIBLE_GALAXY_SCMS', 'git, hg', value_type='list')
GALAXY_ROLE_SKELETON = get_config(p, 'galaxy', 'role_skeleton', 'ANSIBLE_GALAXY_ROLE_SKELETON', None, value_type='path')
GALAXY_ROLE_SKELETON_IGNORE = get_config(p, 'galaxy', 'role_skeleton_ignore', 'ANSIBLE_GALAXY_ROLE_SKELETON_IGNORE', ['^.git$', '^.*/.git_keep$'],
value_type='list')
STRING_TYPE_FILTERS = get_config(p, 'jinja2', 'dont_type_filters', 'ANSIBLE_STRING_TYPE_FILTERS',
['string', 'to_json', 'to_nice_json', 'to_yaml', 'ppretty', 'json'], value_type='list' )
# colors
COLOR_HIGHLIGHT = get_config(p, 'colors', 'highlight', 'ANSIBLE_COLOR_HIGHLIGHT', 'white')
COLOR_VERBOSE = get_config(p, 'colors', 'verbose', 'ANSIBLE_COLOR_VERBOSE', 'blue')
COLOR_WARN = get_config(p, 'colors', 'warn', 'ANSIBLE_COLOR_WARN', 'bright purple')
COLOR_ERROR = get_config(p, 'colors', 'error', 'ANSIBLE_COLOR_ERROR', 'red')
COLOR_DEBUG = get_config(p, 'colors', 'debug', 'ANSIBLE_COLOR_DEBUG', 'dark gray')
COLOR_DEPRECATE = get_config(p, 'colors', 'deprecate', 'ANSIBLE_COLOR_DEPRECATE', 'purple')
COLOR_SKIP = get_config(p, 'colors', 'skip', 'ANSIBLE_COLOR_SKIP', 'cyan')
COLOR_UNREACHABLE = get_config(p, 'colors', 'unreachable', 'ANSIBLE_COLOR_UNREACHABLE', 'bright red')
COLOR_OK = get_config(p, 'colors', 'ok', 'ANSIBLE_COLOR_OK', 'green')
COLOR_CHANGED = get_config(p, 'colors', 'changed', 'ANSIBLE_COLOR_CHANGED', 'yellow')
COLOR_DIFF_ADD = get_config(p, 'colors', 'diff_add', 'ANSIBLE_COLOR_DIFF_ADD', 'green')
COLOR_DIFF_REMOVE = get_config(p, 'colors', 'diff_remove', 'ANSIBLE_COLOR_DIFF_REMOVE', 'red')
COLOR_DIFF_LINES = get_config(p, 'colors', 'diff_lines', 'ANSIBLE_COLOR_DIFF_LINES', 'cyan')
# diff
DIFF_CONTEXT = get_config(p, 'diff', 'context', 'ANSIBLE_DIFF_CONTEXT', 3, value_type='integer')
DIFF_ALWAYS = get_config(p, 'diff', 'always', 'ANSIBLE_DIFF_ALWAYS', False, value_type='bool')
# non-configurable things
MODULE_REQUIRE_ARGS = ['command', 'win_command', 'shell', 'win_shell', 'raw', 'script']
MODULE_NO_JSON = ['command', 'win_command', 'shell', 'win_shell', 'raw']
DEFAULT_BECOME_PASS = None
DEFAULT_PASSWORD_CHARS = to_text(ascii_letters + digits + ".,:-_", errors='strict') # characters included in auto-generated passwords
DEFAULT_SUDO_PASS = None
DEFAULT_REMOTE_PASS = None
DEFAULT_SUBSET = None
DEFAULT_SU_PASS = None
VAULT_VERSION_MIN = 1.0
VAULT_VERSION_MAX = 1.0
TREE_DIR = None
LOCALHOST = frozenset(['127.0.0.1', 'localhost', '::1'])
# module search
BLACKLIST_EXTS = ('.pyc', '.swp', '.bak', '~', '.rpm', '.md', '.txt')
IGNORE_FILES = ["COPYING", "CONTRIBUTING", "LICENSE", "README", "VERSION", "GUIDELINES"]
INTERNAL_RESULT_KEYS = ['add_host', 'add_group']
RESTRICTED_RESULT_KEYS = ['ansible_rsync_path', 'ansible_playbook_python']
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3268_2 |
crossvul-python_data_bad_1232_1 | ###
# Copyright (c) 2002-2004, Jeremiah Fincher
# Copyright (c) 2008-2009, James McCoy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from __future__ import division
import re
import math
import cmath
import types
import string
import supybot.utils as utils
from supybot.commands import *
import supybot.utils.minisix as minisix
import supybot.callbacks as callbacks
from supybot.i18n import PluginInternationalization, internationalizeDocstring
_ = PluginInternationalization('Math')
from .local import convertcore
baseArg = ('int', 'base', lambda i: i <= 36)
class Math(callbacks.Plugin):
"""Provides commands to work with math, such as a calculator and
a unit converter."""
@internationalizeDocstring
def base(self, irc, msg, args, frm, to, number):
"""<fromBase> [<toBase>] <number>
Converts <number> from base <fromBase> to base <toBase>.
If <toBase> is left out, it converts to decimal.
"""
if not number:
number = str(to)
to = 10
try:
irc.reply(self._convertBaseToBase(number, to, frm))
except ValueError:
irc.error(_('Invalid <number> for base %s: %s') % (frm, number))
base = wrap(base, [('int', 'base', lambda i: 2 <= i <= 36),
optional(('int', 'base', lambda i: 2 <= i <= 36), 10),
additional('something')])
def _convertDecimalToBase(self, number, base):
"""Convert a decimal number to another base; returns a string."""
if number == 0:
return '0'
elif number < 0:
negative = True
number = -number
else:
negative = False
digits = []
while number != 0:
digit = number % base
if digit >= 10:
digit = string.ascii_uppercase[digit - 10]
else:
digit = str(digit)
digits.append(digit)
number = number // base
digits.reverse()
return '-'*negative + ''.join(digits)
def _convertBaseToBase(self, number, toBase, fromBase):
"""Convert a number from any base, 2 through 36, to any other
base, 2 through 36. Returns a string."""
number = minisix.long(str(number), fromBase)
if toBase == 10:
return str(number)
return self._convertDecimalToBase(number, toBase)
_mathEnv = {'__builtins__': types.ModuleType('__builtins__'), 'i': 1j}
_mathEnv.update(math.__dict__)
_mathEnv.update(cmath.__dict__)
def _sqrt(x):
if isinstance(x, complex) or x < 0:
return cmath.sqrt(x)
else:
return math.sqrt(x)
def _cbrt(x):
return math.pow(x, 1.0/3)
def _factorial(x):
if x<=10000:
return float(math.factorial(x))
else:
raise Exception('factorial argument too large')
_mathEnv['sqrt'] = _sqrt
_mathEnv['cbrt'] = _cbrt
_mathEnv['abs'] = abs
_mathEnv['max'] = max
_mathEnv['min'] = min
_mathEnv['round'] = lambda x, y=0: round(x, int(y))
_mathSafeEnv = dict([(x,y) for x,y in _mathEnv.items()])
_mathSafeEnv['factorial'] = _factorial
_mathRe = re.compile(r'((?:(?<![A-Fa-f\d)])-)?'
r'(?:0x[A-Fa-f\d]+|'
r'0[0-7]+|'
r'\d+\.\d+|'
r'\.\d+|'
r'\d+\.|'
r'\d+))')
def _floatToString(self, x):
if -1e-10 < x < 1e-10:
return '0'
elif -1e-10 < int(x) - x < 1e-10:
return str(int(x))
else:
return str(x)
def _complexToString(self, x):
realS = self._floatToString(x.real)
imagS = self._floatToString(x.imag)
if imagS == '0':
return realS
elif imagS == '1':
imagS = '+i'
elif imagS == '-1':
imagS = '-i'
elif x.imag < 0:
imagS = '%si' % imagS
else:
imagS = '+%si' % imagS
if realS == '0' and imagS == '0':
return '0'
elif realS == '0':
return imagS.lstrip('+')
elif imagS == '0':
return realS
else:
return '%s%s' % (realS, imagS)
_calc_match_forbidden_chars = re.compile('[_\[\]]')
_calc_remover = utils.str.MultipleRemover('_[] \t')
###
# So this is how the 'calc' command works:
# First, we make a nice little safe environment for evaluation; basically,
# the names in the 'math' and 'cmath' modules. Then, we remove the ability
# of a random user to get ints evaluated: this means we have to turn all
# int literals (even octal numbers and hexadecimal numbers) into floats.
# Then we delete all square brackets, underscores, and whitespace, so no
# one can do list comprehensions or call __...__ functions.
###
@internationalizeDocstring
def calc(self, irc, msg, args, text):
"""<math expression>
Returns the value of the evaluated <math expression>. The syntax is
Python syntax; the type of arithmetic is floating point. Floating
point arithmetic is used in order to prevent a user from being able to
crash to the bot with something like '10**10**10**10'. One consequence
is that large values such as '10**24' might not be exact.
"""
try:
text = str(text)
except UnicodeEncodeError:
irc.error(_("There's no reason you should have fancy non-ASCII "
"characters in your mathematical expression. "
"Please remove them."))
return
if self._calc_match_forbidden_chars.match(text):
# Note: this is important to keep this to forbid usage of
# __builtins__
irc.error(_('There\'s really no reason why you should have '
'underscores or brackets in your mathematical '
'expression. Please remove them.'))
return
text = self._calc_remover(text)
if 'lambda' in text:
irc.error(_('You can\'t use lambda in this command.'))
return
text = text.lower()
def handleMatch(m):
s = m.group(1)
if s.startswith('0x'):
i = int(s, 16)
elif s.startswith('0') and '.' not in s:
try:
i = int(s, 8)
except ValueError:
i = int(s)
else:
i = float(s)
x = complex(i)
if x.imag == 0:
x = x.real
# Need to use string-formatting here instead of str() because
# use of str() on large numbers loses information:
# str(float(33333333333333)) => '3.33333333333e+13'
# float('3.33333333333e+13') => 33333333333300.0
return '%.16f' % x
return str(x)
text = self._mathRe.sub(handleMatch, text)
try:
self.log.info('evaluating %q from %s', text, msg.prefix)
x = complex(eval(text, self._mathSafeEnv, self._mathSafeEnv))
irc.reply(self._complexToString(x))
except OverflowError:
maxFloat = math.ldexp(0.9999999999999999, 1024)
irc.error(_('The answer exceeded %s or so.') % maxFloat)
except TypeError:
irc.error(_('Something in there wasn\'t a valid number.'))
except NameError as e:
irc.error(_('%s is not a defined function.') % str(e).split()[1])
except Exception as e:
irc.error(str(e))
calc = wrap(calc, ['text'])
@internationalizeDocstring
def icalc(self, irc, msg, args, text):
"""<math expression>
This is the same as the calc command except that it allows integer
math, and can thus cause the bot to suck up CPU. Hence it requires
the 'trusted' capability to use.
"""
if self._calc_match_forbidden_chars.match(text):
# Note: this is important to keep this to forbid usage of
# __builtins__
irc.error(_('There\'s really no reason why you should have '
'underscores or brackets in your mathematical '
'expression. Please remove them.'))
return
# This removes spaces, too, but we'll leave the removal of _[] for
# safety's sake.
text = self._calc_remover(text)
if 'lambda' in text:
irc.error(_('You can\'t use lambda in this command.'))
return
text = text.replace('lambda', '')
try:
self.log.info('evaluating %q from %s', text, msg.prefix)
irc.reply(str(eval(text, self._mathEnv, self._mathEnv)))
except OverflowError:
maxFloat = math.ldexp(0.9999999999999999, 1024)
irc.error(_('The answer exceeded %s or so.') % maxFloat)
except TypeError:
irc.error(_('Something in there wasn\'t a valid number.'))
except NameError as e:
irc.error(_('%s is not a defined function.') % str(e).split()[1])
except Exception as e:
irc.error(utils.exnToString(e))
icalc = wrap(icalc, [('checkCapability', 'trusted'), 'text'])
_rpnEnv = {
'dup': lambda s: s.extend([s.pop()]*2),
'swap': lambda s: s.extend([s.pop(), s.pop()])
}
def rpn(self, irc, msg, args):
"""<rpn math expression>
Returns the value of an RPN expression.
"""
stack = []
for arg in args:
try:
x = complex(arg)
if x == abs(x):
x = abs(x)
stack.append(x)
except ValueError: # Not a float.
if arg in self._mathSafeEnv:
f = self._mathSafeEnv[arg]
if callable(f):
called = False
arguments = []
while not called and stack:
arguments.append(stack.pop())
try:
stack.append(f(*arguments))
called = True
except TypeError:
pass
if not called:
irc.error(_('Not enough arguments for %s') % arg)
return
else:
stack.append(f)
elif arg in self._rpnEnv:
self._rpnEnv[arg](stack)
else:
arg2 = stack.pop()
arg1 = stack.pop()
s = '%s%s%s' % (arg1, arg, arg2)
try:
stack.append(eval(s, self._mathSafeEnv, self._mathSafeEnv))
except SyntaxError:
irc.error(format(_('%q is not a defined function.'),
arg))
return
if len(stack) == 1:
irc.reply(str(self._complexToString(complex(stack[0]))))
else:
s = ', '.join(map(self._complexToString, list(map(complex, stack))))
irc.reply(_('Stack: [%s]') % s)
@internationalizeDocstring
def convert(self, irc, msg, args, number, unit1, unit2):
"""[<number>] <unit> to <other unit>
Converts from <unit> to <other unit>. If number isn't given, it
defaults to 1. For unit information, see 'units' command.
"""
try:
digits = len(str(number).split('.')[1])
except IndexError:
digits = 0
try:
newNum = convertcore.convert(number, unit1, unit2)
if isinstance(newNum, float):
zeros = 0
for char in str(newNum).split('.')[1]:
if char != '0':
break
zeros += 1
# Let's add one signifiant digit. Physicists would not like
# that, but common people usually do not give extra zeros...
# (for example, with '32 C to F', an extra digit would be
# expected).
newNum = round(newNum, digits + 1 + zeros)
newNum = self._floatToString(newNum)
irc.reply(str(newNum))
except convertcore.UnitDataError as ude:
irc.error(str(ude))
convert = wrap(convert, [optional('float', 1.0),'something','to','text'])
@internationalizeDocstring
def units(self, irc, msg, args, type):
""" [<type>]
With no arguments, returns a list of measurement types, which can be
passed as arguments. When called with a type as an argument, returns
the units of that type.
"""
irc.reply(convertcore.units(type))
units = wrap(units, [additional('text')])
Class = Math
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| ./CrossVul/dataset_final_sorted/CWE-20/py/bad_1232_1 |
crossvul-python_data_good_872_0 | # -*- test-case-name: twisted.web.test.test_newclient -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
An U{HTTP 1.1<http://www.w3.org/Protocols/rfc2616/rfc2616.html>} client.
The way to use the functionality provided by this module is to:
- Connect a L{HTTP11ClientProtocol} to an HTTP server
- Create a L{Request} with the appropriate data
- Pass the request to L{HTTP11ClientProtocol.request}
- The returned Deferred will fire with a L{Response} object
- Create a L{IProtocol} provider which can handle the response body
- Connect it to the response with L{Response.deliverBody}
- When the protocol's C{connectionLost} method is called, the response is
complete. See L{Response.deliverBody} for details.
Various other classes in this module support this usage:
- HTTPParser is the basic HTTP parser. It can handle the parts of HTTP which
are symmetric between requests and responses.
- HTTPClientParser extends HTTPParser to handle response-specific parts of
HTTP. One instance is created for each request to parse the corresponding
response.
"""
from __future__ import division, absolute_import
__metaclass__ = type
import re
from zope.interface import implementer
from twisted.python.compat import networkString
from twisted.python.components import proxyForInterface
from twisted.python.reflect import fullyQualifiedName
from twisted.python.failure import Failure
from twisted.internet.interfaces import IConsumer, IPushProducer
from twisted.internet.error import ConnectionDone
from twisted.internet.defer import Deferred, succeed, fail, maybeDeferred
from twisted.internet.defer import CancelledError
from twisted.internet.protocol import Protocol
from twisted.protocols.basic import LineReceiver
from twisted.web.iweb import UNKNOWN_LENGTH, IResponse, IClientRequest
from twisted.web.http_headers import Headers
from twisted.web.http import NO_CONTENT, NOT_MODIFIED
from twisted.web.http import _DataLoss, PotentialDataLoss
from twisted.web.http import _IdentityTransferDecoder, _ChunkedTransferDecoder
from twisted.logger import Logger
# States HTTPParser can be in
STATUS = u'STATUS'
HEADER = u'HEADER'
BODY = u'BODY'
DONE = u'DONE'
_moduleLog = Logger()
class BadHeaders(Exception):
"""
Headers passed to L{Request} were in some way invalid.
"""
class ExcessWrite(Exception):
"""
The body L{IBodyProducer} for a request tried to write data after
indicating it had finished writing data.
"""
class ParseError(Exception):
"""
Some received data could not be parsed.
@ivar data: The string which could not be parsed.
"""
def __init__(self, reason, data):
Exception.__init__(self, reason, data)
self.data = data
class BadResponseVersion(ParseError):
"""
The version string in a status line was unparsable.
"""
class _WrapperException(Exception):
"""
L{_WrapperException} is the base exception type for exceptions which
include one or more other exceptions as the low-level causes.
@ivar reasons: A L{list} of one or more L{Failure} instances encountered
during an HTTP request. See subclass documentation for more details.
"""
def __init__(self, reasons):
Exception.__init__(self, reasons)
self.reasons = reasons
class RequestGenerationFailed(_WrapperException):
"""
There was an error while creating the bytes which make up a request.
@ivar reasons: A C{list} of one or more L{Failure} instances giving the
reasons the request generation was considered to have failed.
"""
class RequestTransmissionFailed(_WrapperException):
"""
There was an error while sending the bytes which make up a request.
@ivar reasons: A C{list} of one or more L{Failure} instances giving the
reasons the request transmission was considered to have failed.
"""
class ConnectionAborted(Exception):
"""
The connection was explicitly aborted by application code.
"""
class WrongBodyLength(Exception):
"""
An L{IBodyProducer} declared the number of bytes it was going to
produce (via its C{length} attribute) and then produced a different number
of bytes.
"""
class ResponseDone(Exception):
"""
L{ResponseDone} may be passed to L{IProtocol.connectionLost} on the
protocol passed to L{Response.deliverBody} and indicates that the entire
response has been delivered.
"""
class ResponseFailed(_WrapperException):
"""
L{ResponseFailed} indicates that all of the response to a request was not
received for some reason.
@ivar reasons: A C{list} of one or more L{Failure} instances giving the
reasons the response was considered to have failed.
@ivar response: If specified, the L{Response} received from the server (and
in particular the status code and the headers).
"""
def __init__(self, reasons, response=None):
_WrapperException.__init__(self, reasons)
self.response = response
class ResponseNeverReceived(ResponseFailed):
"""
A L{ResponseFailed} that knows no response bytes at all have been received.
"""
class RequestNotSent(Exception):
"""
L{RequestNotSent} indicates that an attempt was made to issue a request but
for reasons unrelated to the details of the request itself, the request
could not be sent. For example, this may indicate that an attempt was made
to send a request using a protocol which is no longer connected to a
server.
"""
def _callAppFunction(function):
"""
Call C{function}. If it raises an exception, log it with a minimal
description of the source.
@return: L{None}
"""
try:
function()
except:
_moduleLog.failure(
u"Unexpected exception from {name}",
name=fullyQualifiedName(function)
)
class HTTPParser(LineReceiver):
"""
L{HTTPParser} handles the parsing side of HTTP processing. With a suitable
subclass, it can parse either the client side or the server side of the
connection.
@ivar headers: All of the non-connection control message headers yet
received.
@ivar state: State indicator for the response parsing state machine. One
of C{STATUS}, C{HEADER}, C{BODY}, C{DONE}.
@ivar _partialHeader: L{None} or a C{list} of the lines of a multiline
header while that header is being received.
"""
# NOTE: According to HTTP spec, we're supposed to eat the
# 'Proxy-Authenticate' and 'Proxy-Authorization' headers also, but that
# doesn't sound like a good idea to me, because it makes it impossible to
# have a non-authenticating transparent proxy in front of an authenticating
# proxy. An authenticating proxy can eat them itself. -jknight
#
# Further, quoting
# http://homepages.tesco.net/J.deBoynePollard/FGA/web-proxy-connection-header.html
# regarding the 'Proxy-Connection' header:
#
# The Proxy-Connection: header is a mistake in how some web browsers
# use HTTP. Its name is the result of a false analogy. It is not a
# standard part of the protocol. There is a different standard
# protocol mechanism for doing what it does. And its existence
# imposes a requirement upon HTTP servers such that no proxy HTTP
# server can be standards-conforming in practice.
#
# -exarkun
# Some servers (like http://news.ycombinator.com/) return status lines and
# HTTP headers delimited by \n instead of \r\n.
delimiter = b'\n'
CONNECTION_CONTROL_HEADERS = set([
b'content-length', b'connection', b'keep-alive', b'te',
b'trailers', b'transfer-encoding', b'upgrade',
b'proxy-connection'])
def connectionMade(self):
self.headers = Headers()
self.connHeaders = Headers()
self.state = STATUS
self._partialHeader = None
def switchToBodyMode(self, decoder):
"""
Switch to body parsing mode - interpret any more bytes delivered as
part of the message body and deliver them to the given decoder.
"""
if self.state == BODY:
raise RuntimeError(u"already in body mode")
self.bodyDecoder = decoder
self.state = BODY
self.setRawMode()
def lineReceived(self, line):
"""
Handle one line from a response.
"""
# Handle the normal CR LF case.
if line[-1:] == b'\r':
line = line[:-1]
if self.state == STATUS:
self.statusReceived(line)
self.state = HEADER
elif self.state == HEADER:
if not line or line[0] not in b' \t':
if self._partialHeader is not None:
header = b''.join(self._partialHeader)
name, value = header.split(b':', 1)
value = value.strip()
self.headerReceived(name, value)
if not line:
# Empty line means the header section is over.
self.allHeadersReceived()
else:
# Line not beginning with LWS is another header.
self._partialHeader = [line]
else:
# A line beginning with LWS is a continuation of a header
# begun on a previous line.
self._partialHeader.append(line)
def rawDataReceived(self, data):
"""
Pass data from the message body to the body decoder object.
"""
self.bodyDecoder.dataReceived(data)
def isConnectionControlHeader(self, name):
"""
Return C{True} if the given lower-cased name is the name of a
connection control header (rather than an entity header).
According to RFC 2616, section 14.10, the tokens in the Connection
header are probably relevant here. However, I am not sure what the
practical consequences of either implementing or ignoring that are.
So I leave it unimplemented for the time being.
"""
return name in self.CONNECTION_CONTROL_HEADERS
def statusReceived(self, status):
"""
Callback invoked whenever the first line of a new message is received.
Override this.
@param status: The first line of an HTTP request or response message
without trailing I{CR LF}.
@type status: C{bytes}
"""
def headerReceived(self, name, value):
"""
Store the given header in C{self.headers}.
"""
name = name.lower()
if self.isConnectionControlHeader(name):
headers = self.connHeaders
else:
headers = self.headers
headers.addRawHeader(name, value)
def allHeadersReceived(self):
"""
Callback invoked after the last header is passed to C{headerReceived}.
Override this to change to the C{BODY} or C{DONE} state.
"""
self.switchToBodyMode(None)
class HTTPClientParser(HTTPParser):
"""
An HTTP parser which only handles HTTP responses.
@ivar request: The request with which the expected response is associated.
@type request: L{Request}
@ivar NO_BODY_CODES: A C{set} of response codes which B{MUST NOT} have a
body.
@ivar finisher: A callable to invoke when this response is fully parsed.
@ivar _responseDeferred: A L{Deferred} which will be called back with the
response when all headers in the response have been received.
Thereafter, L{None}.
@ivar _everReceivedData: C{True} if any bytes have been received.
"""
NO_BODY_CODES = set([NO_CONTENT, NOT_MODIFIED])
_transferDecoders = {
b'chunked': _ChunkedTransferDecoder,
}
bodyDecoder = None
_log = Logger()
def __init__(self, request, finisher):
self.request = request
self.finisher = finisher
self._responseDeferred = Deferred()
self._everReceivedData = False
def dataReceived(self, data):
"""
Override so that we know if any response has been received.
"""
self._everReceivedData = True
HTTPParser.dataReceived(self, data)
def parseVersion(self, strversion):
"""
Parse version strings of the form Protocol '/' Major '.' Minor. E.g.
b'HTTP/1.1'. Returns (protocol, major, minor). Will raise ValueError
on bad syntax.
"""
try:
proto, strnumber = strversion.split(b'/')
major, minor = strnumber.split(b'.')
major, minor = int(major), int(minor)
except ValueError as e:
raise BadResponseVersion(str(e), strversion)
if major < 0 or minor < 0:
raise BadResponseVersion(u"version may not be negative",
strversion)
return (proto, major, minor)
def statusReceived(self, status):
"""
Parse the status line into its components and create a response object
to keep track of this response's state.
"""
parts = status.split(b' ', 2)
if len(parts) == 2:
# Some broken servers omit the required `phrase` portion of
# `status-line`. One such server identified as
# "cloudflare-nginx". Others fail to identify themselves
# entirely. Fill in an empty phrase for such cases.
version, codeBytes = parts
phrase = b""
elif len(parts) == 3:
version, codeBytes, phrase = parts
else:
raise ParseError(u"wrong number of parts", status)
try:
statusCode = int(codeBytes)
except ValueError:
raise ParseError(u"non-integer status code", status)
self.response = Response._construct(
self.parseVersion(version),
statusCode,
phrase,
self.headers,
self.transport,
self.request,
)
def _finished(self, rest):
"""
Called to indicate that an entire response has been received. No more
bytes will be interpreted by this L{HTTPClientParser}. Extra bytes are
passed up and the state of this L{HTTPClientParser} is set to I{DONE}.
@param rest: A C{bytes} giving any extra bytes delivered to this
L{HTTPClientParser} which are not part of the response being
parsed.
"""
self.state = DONE
self.finisher(rest)
def isConnectionControlHeader(self, name):
"""
Content-Length in the response to a HEAD request is an entity header,
not a connection control header.
"""
if self.request.method == b'HEAD' and name == b'content-length':
return False
return HTTPParser.isConnectionControlHeader(self, name)
def allHeadersReceived(self):
"""
Figure out how long the response body is going to be by examining
headers and stuff.
"""
if 100 <= self.response.code < 200:
# RFC 7231 Section 6.2 says that if we receive a 1XX status code
# and aren't expecting it, we MAY ignore it. That's what we're
# going to do. We reset the parser here, but we leave
# _everReceivedData in its True state because we have, in fact,
# received data.
self._log.info(
"Ignoring unexpected {code} response",
code=self.response.code
)
self.connectionMade()
del self.response
return
if (self.response.code in self.NO_BODY_CODES
or self.request.method == b'HEAD'):
self.response.length = 0
# The order of the next two lines might be of interest when adding
# support for pipelining.
self._finished(self.clearLineBuffer())
self.response._bodyDataFinished()
else:
transferEncodingHeaders = self.connHeaders.getRawHeaders(
b'transfer-encoding')
if transferEncodingHeaders:
# This could be a KeyError. However, that would mean we do not
# know how to decode the response body, so failing the request
# is as good a behavior as any. Perhaps someday we will want
# to normalize/document/test this specifically, but failing
# seems fine to me for now.
transferDecoder = self._transferDecoders[transferEncodingHeaders[0].lower()]
# If anyone ever invents a transfer encoding other than
# chunked (yea right), and that transfer encoding can predict
# the length of the response body, it might be sensible to
# allow the transfer decoder to set the response object's
# length attribute.
else:
contentLengthHeaders = self.connHeaders.getRawHeaders(
b'content-length')
if contentLengthHeaders is None:
contentLength = None
elif len(contentLengthHeaders) == 1:
contentLength = int(contentLengthHeaders[0])
self.response.length = contentLength
else:
# "HTTP Message Splitting" or "HTTP Response Smuggling"
# potentially happening. Or it's just a buggy server.
raise ValueError(u"Too many Content-Length headers; "
u"response is invalid")
if contentLength == 0:
self._finished(self.clearLineBuffer())
transferDecoder = None
else:
transferDecoder = lambda x, y: _IdentityTransferDecoder(
contentLength, x, y)
if transferDecoder is None:
self.response._bodyDataFinished()
else:
# Make sure as little data as possible from the response body
# gets delivered to the response object until the response
# object actually indicates it is ready to handle bytes
# (probably because an application gave it a way to interpret
# them).
self.transport.pauseProducing()
self.switchToBodyMode(transferDecoder(
self.response._bodyDataReceived,
self._finished))
# This must be last. If it were first, then application code might
# change some state (for example, registering a protocol to receive the
# response body). Then the pauseProducing above would be wrong since
# the response is ready for bytes and nothing else would ever resume
# the transport.
self._responseDeferred.callback(self.response)
del self._responseDeferred
def connectionLost(self, reason):
if self.bodyDecoder is not None:
try:
try:
self.bodyDecoder.noMoreData()
except PotentialDataLoss:
self.response._bodyDataFinished(Failure())
except _DataLoss:
self.response._bodyDataFinished(
Failure(ResponseFailed([reason, Failure()],
self.response)))
else:
self.response._bodyDataFinished()
except:
# Handle exceptions from both the except suites and the else
# suite. Those functions really shouldn't raise exceptions,
# but maybe there's some buggy application code somewhere
# making things difficult.
self._log.failure('')
elif self.state != DONE:
if self._everReceivedData:
exceptionClass = ResponseFailed
else:
exceptionClass = ResponseNeverReceived
self._responseDeferred.errback(Failure(exceptionClass([reason])))
del self._responseDeferred
_VALID_METHOD = re.compile(
br"\A[%s]+\Z" % (
bytes().join(
(
b"!", b"#", b"$", b"%", b"&", b"'", b"*",
b"+", b"-", b".", b"^", b"_", b"`", b"|", b"~",
b"\x30-\x39",
b"\x41-\x5a",
b"\x61-\x7A",
),
),
),
)
def _ensureValidMethod(method):
"""
An HTTP method is an HTTP token, which consists of any visible
ASCII character that is not a delimiter (i.e. one of
C{"(),/:;<=>?@[\\]{}}.)
@param method: the method to check
@type method: L{bytes}
@return: the method if it is valid
@rtype: L{bytes}
@raise ValueError: if the method is not valid
@see: U{https://tools.ietf.org/html/rfc7230#section-3.1.1},
U{https://tools.ietf.org/html/rfc7230#section-3.2.6},
U{https://tools.ietf.org/html/rfc5234#appendix-B.1}
"""
if _VALID_METHOD.match(method):
return method
raise ValueError("Invalid method {!r}".format(method))
_VALID_URI = re.compile(br'\A[\x21-\x7e]+\Z')
def _ensureValidURI(uri):
"""
A valid URI cannot contain control characters (i.e., characters
between 0-32, inclusive and 127) or non-ASCII characters (i.e.,
characters with values between 128-255, inclusive).
@param uri: the URI to check
@type uri: L{bytes}
@return: the URI if it is valid
@rtype: L{bytes}
@raise ValueError: if the URI is not valid
@see: U{https://tools.ietf.org/html/rfc3986#section-3.3},
U{https://tools.ietf.org/html/rfc3986#appendix-A},
U{https://tools.ietf.org/html/rfc5234#appendix-B.1}
"""
if _VALID_URI.match(uri):
return uri
raise ValueError("Invalid URI {!r}".format(uri))
@implementer(IClientRequest)
class Request:
"""
A L{Request} instance describes an HTTP request to be sent to an HTTP
server.
@ivar method: See L{__init__}.
@ivar uri: See L{__init__}.
@ivar headers: See L{__init__}.
@ivar bodyProducer: See L{__init__}.
@ivar persistent: See L{__init__}.
@ivar _parsedURI: Parsed I{URI} for the request, or L{None}.
@type _parsedURI: L{twisted.web.client.URI} or L{None}
"""
_log = Logger()
def __init__(self, method, uri, headers, bodyProducer, persistent=False):
"""
@param method: The HTTP method for this request, ex: b'GET', b'HEAD',
b'POST', etc.
@type method: L{bytes}
@param uri: The relative URI of the resource to request. For example,
C{b'/foo/bar?baz=quux'}.
@type uri: L{bytes}
@param headers: Headers to be sent to the server. It is important to
note that this object does not create any implicit headers. So it
is up to the HTTP Client to add required headers such as 'Host'.
@type headers: L{twisted.web.http_headers.Headers}
@param bodyProducer: L{None} or an L{IBodyProducer} provider which
produces the content body to send to the remote HTTP server.
@param persistent: Set to C{True} when you use HTTP persistent
connection, defaults to C{False}.
@type persistent: L{bool}
"""
self.method = _ensureValidMethod(method)
self.uri = _ensureValidURI(uri)
self.headers = headers
self.bodyProducer = bodyProducer
self.persistent = persistent
self._parsedURI = None
@classmethod
def _construct(cls, method, uri, headers, bodyProducer, persistent=False,
parsedURI=None):
"""
Private constructor.
@param method: See L{__init__}.
@param uri: See L{__init__}.
@param headers: See L{__init__}.
@param bodyProducer: See L{__init__}.
@param persistent: See L{__init__}.
@param parsedURI: See L{Request._parsedURI}.
@return: L{Request} instance.
"""
request = cls(method, uri, headers, bodyProducer, persistent)
request._parsedURI = parsedURI
return request
@property
def absoluteURI(self):
"""
The absolute URI of the request as C{bytes}, or L{None} if the
absolute URI cannot be determined.
"""
return getattr(self._parsedURI, 'toBytes', lambda: None)()
def _writeHeaders(self, transport, TEorCL):
hosts = self.headers.getRawHeaders(b'host', ())
if len(hosts) != 1:
raise BadHeaders(u"Exactly one Host header required")
# In the future, having the protocol version be a parameter to this
# method would probably be good. It would be nice if this method
# weren't limited to issuing HTTP/1.1 requests.
requestLines = []
requestLines.append(
b' '.join(
[
_ensureValidMethod(self.method),
_ensureValidURI(self.uri),
b'HTTP/1.1\r\n',
]
),
)
if not self.persistent:
requestLines.append(b'Connection: close\r\n')
if TEorCL is not None:
requestLines.append(TEorCL)
for name, values in self.headers.getAllRawHeaders():
requestLines.extend([name + b': ' + v + b'\r\n' for v in values])
requestLines.append(b'\r\n')
transport.writeSequence(requestLines)
def _writeToBodyProducerChunked(self, transport):
"""
Write this request to the given transport using chunked
transfer-encoding to frame the body.
@param transport: See L{writeTo}.
@return: See L{writeTo}.
"""
self._writeHeaders(transport, b'Transfer-Encoding: chunked\r\n')
encoder = ChunkedEncoder(transport)
encoder.registerProducer(self.bodyProducer, True)
d = self.bodyProducer.startProducing(encoder)
def cbProduced(ignored):
encoder.unregisterProducer()
def ebProduced(err):
encoder._allowNoMoreWrites()
# Don't call the encoder's unregisterProducer because it will write
# a zero-length chunk. This would indicate to the server that the
# request body is complete. There was an error, though, so we
# don't want to do that.
transport.unregisterProducer()
return err
d.addCallbacks(cbProduced, ebProduced)
return d
def _writeToBodyProducerContentLength(self, transport):
"""
Write this request to the given transport using content-length to frame
the body.
@param transport: See L{writeTo}.
@return: See L{writeTo}.
"""
self._writeHeaders(
transport,
networkString(
'Content-Length: %d\r\n' % (self.bodyProducer.length,)))
# This Deferred is used to signal an error in the data written to the
# encoder below. It can only errback and it will only do so before too
# many bytes have been written to the encoder and before the producer
# Deferred fires.
finishedConsuming = Deferred()
# This makes sure the producer writes the correct number of bytes for
# the request body.
encoder = LengthEnforcingConsumer(
self.bodyProducer, transport, finishedConsuming)
transport.registerProducer(self.bodyProducer, True)
finishedProducing = self.bodyProducer.startProducing(encoder)
def combine(consuming, producing):
# This Deferred is returned and will be fired when the first of
# consuming or producing fires. If it's cancelled, forward that
# cancellation to the producer.
def cancelConsuming(ign):
finishedProducing.cancel()
ultimate = Deferred(cancelConsuming)
# Keep track of what has happened so far. This initially
# contains None, then an integer uniquely identifying what
# sequence of events happened. See the callbacks and errbacks
# defined below for the meaning of each value.
state = [None]
def ebConsuming(err):
if state == [None]:
# The consuming Deferred failed first. This means the
# overall writeTo Deferred is going to errback now. The
# producing Deferred should not fire later (because the
# consumer should have called stopProducing on the
# producer), but if it does, a callback will be ignored
# and an errback will be logged.
state[0] = 1
ultimate.errback(err)
else:
# The consuming Deferred errbacked after the producing
# Deferred fired. This really shouldn't ever happen.
# If it does, I goofed. Log the error anyway, just so
# there's a chance someone might notice and complain.
self._log.failure(
u"Buggy state machine in {request}/[{state}]: "
u"ebConsuming called",
failure=err,
request=repr(self),
state=state[0]
)
def cbProducing(result):
if state == [None]:
# The producing Deferred succeeded first. Nothing will
# ever happen to the consuming Deferred. Tell the
# encoder we're done so it can check what the producer
# wrote and make sure it was right.
state[0] = 2
try:
encoder._noMoreWritesExpected()
except:
# Fail the overall writeTo Deferred - something the
# producer did was wrong.
ultimate.errback()
else:
# Success - succeed the overall writeTo Deferred.
ultimate.callback(None)
# Otherwise, the consuming Deferred already errbacked. The
# producing Deferred wasn't supposed to fire, but it did
# anyway. It's buggy, but there's not really anything to be
# done about it. Just ignore this result.
def ebProducing(err):
if state == [None]:
# The producing Deferred failed first. This means the
# overall writeTo Deferred is going to errback now.
# Tell the encoder that we're done so it knows to reject
# further writes from the producer (which should not
# happen, but the producer may be buggy).
state[0] = 3
encoder._allowNoMoreWrites()
ultimate.errback(err)
else:
# The producing Deferred failed after the consuming
# Deferred failed. It shouldn't have, so it's buggy.
# Log the exception in case anyone who can fix the code
# is watching.
self._log.failure(u"Producer is buggy", failure=err)
consuming.addErrback(ebConsuming)
producing.addCallbacks(cbProducing, ebProducing)
return ultimate
d = combine(finishedConsuming, finishedProducing)
def f(passthrough):
# Regardless of what happens with the overall Deferred, once it
# fires, the producer registered way up above the definition of
# combine should be unregistered.
transport.unregisterProducer()
return passthrough
d.addBoth(f)
return d
def _writeToEmptyBodyContentLength(self, transport):
"""
Write this request to the given transport using content-length to frame
the (empty) body.
@param transport: See L{writeTo}.
@return: See L{writeTo}.
"""
self._writeHeaders(transport, b"Content-Length: 0\r\n")
return succeed(None)
def writeTo(self, transport):
"""
Format this L{Request} as an HTTP/1.1 request and write it to the given
transport. If bodyProducer is not None, it will be associated with an
L{IConsumer}.
@param transport: The transport to which to write.
@type transport: L{twisted.internet.interfaces.ITransport} provider
@return: A L{Deferred} which fires with L{None} when the request has
been completely written to the transport or with a L{Failure} if
there is any problem generating the request bytes.
"""
if self.bodyProducer is None:
# If the method semantics anticipate a body, include a
# Content-Length even if it is 0.
# https://tools.ietf.org/html/rfc7230#section-3.3.2
if self.method in (b"PUT", b"POST"):
self._writeToEmptyBodyContentLength(transport)
else:
self._writeHeaders(transport, None)
elif self.bodyProducer.length is UNKNOWN_LENGTH:
return self._writeToBodyProducerChunked(transport)
else:
return self._writeToBodyProducerContentLength(transport)
def stopWriting(self):
"""
Stop writing this request to the transport. This can only be called
after C{writeTo} and before the L{Deferred} returned by C{writeTo}
fires. It should cancel any asynchronous task started by C{writeTo}.
The L{Deferred} returned by C{writeTo} need not be fired if this method
is called.
"""
# If bodyProducer is None, then the Deferred returned by writeTo has
# fired already and this method cannot be called.
_callAppFunction(self.bodyProducer.stopProducing)
class LengthEnforcingConsumer:
"""
An L{IConsumer} proxy which enforces an exact length requirement on the
total data written to it.
@ivar _length: The number of bytes remaining to be written.
@ivar _producer: The L{IBodyProducer} which is writing to this
consumer.
@ivar _consumer: The consumer to which at most C{_length} bytes will be
forwarded.
@ivar _finished: A L{Deferred} which will be fired with a L{Failure} if too
many bytes are written to this consumer.
"""
def __init__(self, producer, consumer, finished):
self._length = producer.length
self._producer = producer
self._consumer = consumer
self._finished = finished
def _allowNoMoreWrites(self):
"""
Indicate that no additional writes are allowed. Attempts to write
after calling this method will be met with an exception.
"""
self._finished = None
def write(self, bytes):
"""
Write C{bytes} to the underlying consumer unless
C{_noMoreWritesExpected} has been called or there are/have been too
many bytes.
"""
if self._finished is None:
# No writes are supposed to happen any more. Try to convince the
# calling code to stop calling this method by calling its
# stopProducing method and then throwing an exception at it. This
# exception isn't documented as part of the API because you're
# never supposed to expect it: only buggy code will ever receive
# it.
self._producer.stopProducing()
raise ExcessWrite()
if len(bytes) <= self._length:
self._length -= len(bytes)
self._consumer.write(bytes)
else:
# No synchronous exception is raised in *this* error path because
# we still have _finished which we can use to report the error to a
# better place than the direct caller of this method (some
# arbitrary application code).
_callAppFunction(self._producer.stopProducing)
self._finished.errback(WrongBodyLength(u"too many bytes written"))
self._allowNoMoreWrites()
def _noMoreWritesExpected(self):
"""
Called to indicate no more bytes will be written to this consumer.
Check to see that the correct number have been written.
@raise WrongBodyLength: If not enough bytes have been written.
"""
if self._finished is not None:
self._allowNoMoreWrites()
if self._length:
raise WrongBodyLength(u"too few bytes written")
def makeStatefulDispatcher(name, template):
"""
Given a I{dispatch} name and a function, return a function which can be
used as a method and which, when called, will call another method defined
on the instance and return the result. The other method which is called is
determined by the value of the C{_state} attribute of the instance.
@param name: A string which is used to construct the name of the subsidiary
method to invoke. The subsidiary method is named like C{'_%s_%s' %
(name, _state)}.
@param template: A function object which is used to give the returned
function a docstring.
@return: The dispatcher function.
"""
def dispatcher(self, *args, **kwargs):
func = getattr(self, '_' + name + '_' + self._state, None)
if func is None:
raise RuntimeError(
u"%r has no %s method in state %s" % (self, name, self._state))
return func(*args, **kwargs)
dispatcher.__doc__ = template.__doc__
return dispatcher
# This proxy class is used only in the private constructor of the Response
# class below, in order to prevent users relying on any property of the
# concrete request object: they can only use what is provided by
# IClientRequest.
_ClientRequestProxy = proxyForInterface(IClientRequest)
@implementer(IResponse)
class Response:
"""
A L{Response} instance describes an HTTP response received from an HTTP
server.
L{Response} should not be subclassed or instantiated.
@ivar _transport: See L{__init__}.
@ivar _bodyProtocol: The L{IProtocol} provider to which the body is
delivered. L{None} before one has been registered with
C{deliverBody}.
@ivar _bodyBuffer: A C{list} of the strings passed to C{bodyDataReceived}
before C{deliverBody} is called. L{None} afterwards.
@ivar _state: Indicates what state this L{Response} instance is in,
particularly with respect to delivering bytes from the response body
to an application-supplied protocol object. This may be one of
C{'INITIAL'}, C{'CONNECTED'}, C{'DEFERRED_CLOSE'}, or C{'FINISHED'},
with the following meanings:
- INITIAL: This is the state L{Response} objects start in. No
protocol has yet been provided and the underlying transport may
still have bytes to deliver to it.
- DEFERRED_CLOSE: If the underlying transport indicates all bytes
have been delivered but no application-provided protocol is yet
available, the L{Response} moves to this state. Data is
buffered and waiting for a protocol to be delivered to.
- CONNECTED: If a protocol is provided when the state is INITIAL,
the L{Response} moves to this state. Any buffered data is
delivered and any data which arrives from the transport
subsequently is given directly to the protocol.
- FINISHED: If a protocol is provided in the DEFERRED_CLOSE state,
the L{Response} moves to this state after delivering all
buffered data to the protocol. Otherwise, if the L{Response} is
in the CONNECTED state, if the transport indicates there is no
more data, the L{Response} moves to this state. Nothing else
can happen once the L{Response} is in this state.
@type _state: C{str}
"""
length = UNKNOWN_LENGTH
_bodyProtocol = None
_bodyFinished = False
def __init__(self, version, code, phrase, headers, _transport):
"""
@param version: HTTP version components protocol, major, minor. E.g.
C{(b'HTTP', 1, 1)} to mean C{b'HTTP/1.1'}.
@param code: HTTP status code.
@type code: L{int}
@param phrase: HTTP reason phrase, intended to give a short description
of the HTTP status code.
@param headers: HTTP response headers.
@type headers: L{twisted.web.http_headers.Headers}
@param _transport: The transport which is delivering this response.
"""
self.version = version
self.code = code
self.phrase = phrase
self.headers = headers
self._transport = _transport
self._bodyBuffer = []
self._state = 'INITIAL'
self.request = None
self.previousResponse = None
@classmethod
def _construct(cls, version, code, phrase, headers, _transport, request):
"""
Private constructor.
@param version: See L{__init__}.
@param code: See L{__init__}.
@param phrase: See L{__init__}.
@param headers: See L{__init__}.
@param _transport: See L{__init__}.
@param request: See L{IResponse.request}.
@return: L{Response} instance.
"""
response = Response(version, code, phrase, headers, _transport)
response.request = _ClientRequestProxy(request)
return response
def setPreviousResponse(self, previousResponse):
self.previousResponse = previousResponse
def deliverBody(self, protocol):
"""
Dispatch the given L{IProtocol} depending of the current state of the
response.
"""
deliverBody = makeStatefulDispatcher('deliverBody', deliverBody)
def _deliverBody_INITIAL(self, protocol):
"""
Deliver any buffered data to C{protocol} and prepare to deliver any
future data to it. Move to the C{'CONNECTED'} state.
"""
protocol.makeConnection(self._transport)
self._bodyProtocol = protocol
for data in self._bodyBuffer:
self._bodyProtocol.dataReceived(data)
self._bodyBuffer = None
self._state = 'CONNECTED'
# Now that there's a protocol to consume the body, resume the
# transport. It was previously paused by HTTPClientParser to avoid
# reading too much data before it could be handled. We need to do this
# after we transition our state as it may recursively lead to more data
# being delivered, or even the body completing.
self._transport.resumeProducing()
def _deliverBody_CONNECTED(self, protocol):
"""
It is invalid to attempt to deliver data to a protocol when it is
already being delivered to another protocol.
"""
raise RuntimeError(
u"Response already has protocol %r, cannot deliverBody "
u"again" % (self._bodyProtocol,))
def _deliverBody_DEFERRED_CLOSE(self, protocol):
"""
Deliver any buffered data to C{protocol} and then disconnect the
protocol. Move to the C{'FINISHED'} state.
"""
# Unlike _deliverBody_INITIAL, there is no need to resume the
# transport here because all of the response data has been received
# already. Some higher level code may want to resume the transport if
# that code expects further data to be received over it.
protocol.makeConnection(self._transport)
for data in self._bodyBuffer:
protocol.dataReceived(data)
self._bodyBuffer = None
protocol.connectionLost(self._reason)
self._state = 'FINISHED'
def _deliverBody_FINISHED(self, protocol):
"""
It is invalid to attempt to deliver data to a protocol after the
response body has been delivered to another protocol.
"""
raise RuntimeError(
u"Response already finished, cannot deliverBody now.")
def _bodyDataReceived(self, data):
"""
Called by HTTPClientParser with chunks of data from the response body.
They will be buffered or delivered to the protocol passed to
deliverBody.
"""
_bodyDataReceived = makeStatefulDispatcher('bodyDataReceived',
_bodyDataReceived)
def _bodyDataReceived_INITIAL(self, data):
"""
Buffer any data received for later delivery to a protocol passed to
C{deliverBody}.
Little or no data should be buffered by this method, since the
transport has been paused and will not be resumed until a protocol
is supplied.
"""
self._bodyBuffer.append(data)
def _bodyDataReceived_CONNECTED(self, data):
"""
Deliver any data received to the protocol to which this L{Response}
is connected.
"""
self._bodyProtocol.dataReceived(data)
def _bodyDataReceived_DEFERRED_CLOSE(self, data):
"""
It is invalid for data to be delivered after it has been indicated
that the response body has been completely delivered.
"""
raise RuntimeError(u"Cannot receive body data after _bodyDataFinished")
def _bodyDataReceived_FINISHED(self, data):
"""
It is invalid for data to be delivered after the response body has
been delivered to a protocol.
"""
raise RuntimeError(u"Cannot receive body data after "
u"protocol disconnected")
def _bodyDataFinished(self, reason=None):
"""
Called by HTTPClientParser when no more body data is available. If the
optional reason is supplied, this indicates a problem or potential
problem receiving all of the response body.
"""
_bodyDataFinished = makeStatefulDispatcher('bodyDataFinished',
_bodyDataFinished)
def _bodyDataFinished_INITIAL(self, reason=None):
"""
Move to the C{'DEFERRED_CLOSE'} state to wait for a protocol to
which to deliver the response body.
"""
self._state = 'DEFERRED_CLOSE'
if reason is None:
reason = Failure(ResponseDone(u"Response body fully received"))
self._reason = reason
def _bodyDataFinished_CONNECTED(self, reason=None):
"""
Disconnect the protocol and move to the C{'FINISHED'} state.
"""
if reason is None:
reason = Failure(ResponseDone(u"Response body fully received"))
self._bodyProtocol.connectionLost(reason)
self._bodyProtocol = None
self._state = 'FINISHED'
def _bodyDataFinished_DEFERRED_CLOSE(self):
"""
It is invalid to attempt to notify the L{Response} of the end of the
response body data more than once.
"""
raise RuntimeError(u"Cannot finish body data more than once")
def _bodyDataFinished_FINISHED(self):
"""
It is invalid to attempt to notify the L{Response} of the end of the
response body data more than once.
"""
raise RuntimeError(u"Cannot finish body data after "
u"protocol disconnected")
@implementer(IConsumer)
class ChunkedEncoder:
"""
Helper object which exposes L{IConsumer} on top of L{HTTP11ClientProtocol}
for streaming request bodies to the server.
"""
def __init__(self, transport):
self.transport = transport
def _allowNoMoreWrites(self):
"""
Indicate that no additional writes are allowed. Attempts to write
after calling this method will be met with an exception.
"""
self.transport = None
def registerProducer(self, producer, streaming):
"""
Register the given producer with C{self.transport}.
"""
self.transport.registerProducer(producer, streaming)
def write(self, data):
"""
Write the given request body bytes to the transport using chunked
encoding.
@type data: C{bytes}
"""
if self.transport is None:
raise ExcessWrite()
self.transport.writeSequence((networkString("%x\r\n" % len(data)),
data, b"\r\n"))
def unregisterProducer(self):
"""
Indicate that the request body is complete and finish the request.
"""
self.write(b'')
self.transport.unregisterProducer()
self._allowNoMoreWrites()
@implementer(IPushProducer)
class TransportProxyProducer:
"""
An L{twisted.internet.interfaces.IPushProducer} implementation which
wraps another such thing and proxies calls to it until it is told to stop.
@ivar _producer: The wrapped L{twisted.internet.interfaces.IPushProducer}
provider or L{None} after this proxy has been stopped.
"""
# LineReceiver uses this undocumented attribute of transports to decide
# when to stop calling lineReceived or rawDataReceived (if it finds it to
# be true, it doesn't bother to deliver any more data). Set disconnecting
# to False here and never change it to true so that all data is always
# delivered to us and so that LineReceiver doesn't fail with an
# AttributeError.
disconnecting = False
def __init__(self, producer):
self._producer = producer
def stopProxying(self):
"""
Stop forwarding calls of L{twisted.internet.interfaces.IPushProducer}
methods to the underlying L{twisted.internet.interfaces.IPushProducer}
provider.
"""
self._producer = None
def stopProducing(self):
"""
Proxy the stoppage to the underlying producer, unless this proxy has
been stopped.
"""
if self._producer is not None:
self._producer.stopProducing()
def resumeProducing(self):
"""
Proxy the resumption to the underlying producer, unless this proxy has
been stopped.
"""
if self._producer is not None:
self._producer.resumeProducing()
def pauseProducing(self):
"""
Proxy the pause to the underlying producer, unless this proxy has been
stopped.
"""
if self._producer is not None:
self._producer.pauseProducing()
def loseConnection(self):
"""
Proxy the request to lose the connection to the underlying producer,
unless this proxy has been stopped.
"""
if self._producer is not None:
self._producer.loseConnection()
class HTTP11ClientProtocol(Protocol):
"""
L{HTTP11ClientProtocol} is an implementation of the HTTP 1.1 client
protocol. It supports as few features as possible.
@ivar _parser: After a request is issued, the L{HTTPClientParser} to
which received data making up the response to that request is
delivered.
@ivar _finishedRequest: After a request is issued, the L{Deferred} which
will fire when a L{Response} object corresponding to that request is
available. This allows L{HTTP11ClientProtocol} to fail the request
if there is a connection or parsing problem.
@ivar _currentRequest: After a request is issued, the L{Request}
instance used to make that request. This allows
L{HTTP11ClientProtocol} to stop request generation if necessary (for
example, if the connection is lost).
@ivar _transportProxy: After a request is issued, the
L{TransportProxyProducer} to which C{_parser} is connected. This
allows C{_parser} to pause and resume the transport in a way which
L{HTTP11ClientProtocol} can exert some control over.
@ivar _responseDeferred: After a request is issued, the L{Deferred} from
C{_parser} which will fire with a L{Response} when one has been
received. This is eventually chained with C{_finishedRequest}, but
only in certain cases to avoid double firing that Deferred.
@ivar _state: Indicates what state this L{HTTP11ClientProtocol} instance
is in with respect to transmission of a request and reception of a
response. This may be one of the following strings:
- QUIESCENT: This is the state L{HTTP11ClientProtocol} instances
start in. Nothing is happening: no request is being sent and no
response is being received or expected.
- TRANSMITTING: When a request is made (via L{request}), the
instance moves to this state. L{Request.writeTo} has been used
to start to send a request but it has not yet finished.
- TRANSMITTING_AFTER_RECEIVING_RESPONSE: The server has returned a
complete response but the request has not yet been fully sent
yet. The instance will remain in this state until the request
is fully sent.
- GENERATION_FAILED: There was an error while the request. The
request was not fully sent to the network.
- WAITING: The request was fully sent to the network. The
instance is now waiting for the response to be fully received.
- ABORTING: Application code has requested that the HTTP connection
be aborted.
- CONNECTION_LOST: The connection has been lost.
@type _state: C{str}
@ivar _abortDeferreds: A list of C{Deferred} instances that will fire when
the connection is lost.
"""
_state = 'QUIESCENT'
_parser = None
_finishedRequest = None
_currentRequest = None
_transportProxy = None
_responseDeferred = None
_log = Logger()
def __init__(self, quiescentCallback=lambda c: None):
self._quiescentCallback = quiescentCallback
self._abortDeferreds = []
@property
def state(self):
return self._state
def request(self, request):
"""
Issue C{request} over C{self.transport} and return a L{Deferred} which
will fire with a L{Response} instance or an error.
@param request: The object defining the parameters of the request to
issue.
@type request: L{Request}
@rtype: L{Deferred}
@return: The deferred may errback with L{RequestGenerationFailed} if
the request was not fully written to the transport due to a local
error. It may errback with L{RequestTransmissionFailed} if it was
not fully written to the transport due to a network error. It may
errback with L{ResponseFailed} if the request was sent (not
necessarily received) but some or all of the response was lost. It
may errback with L{RequestNotSent} if it is not possible to send
any more requests using this L{HTTP11ClientProtocol}.
"""
if self._state != 'QUIESCENT':
return fail(RequestNotSent())
self._state = 'TRANSMITTING'
_requestDeferred = maybeDeferred(request.writeTo, self.transport)
def cancelRequest(ign):
# Explicitly cancel the request's deferred if it's still trying to
# write when this request is cancelled.
if self._state in (
'TRANSMITTING', 'TRANSMITTING_AFTER_RECEIVING_RESPONSE'):
_requestDeferred.cancel()
else:
self.transport.abortConnection()
self._disconnectParser(Failure(CancelledError()))
self._finishedRequest = Deferred(cancelRequest)
# Keep track of the Request object in case we need to call stopWriting
# on it.
self._currentRequest = request
self._transportProxy = TransportProxyProducer(self.transport)
self._parser = HTTPClientParser(request, self._finishResponse)
self._parser.makeConnection(self._transportProxy)
self._responseDeferred = self._parser._responseDeferred
def cbRequestWritten(ignored):
if self._state == 'TRANSMITTING':
self._state = 'WAITING'
self._responseDeferred.chainDeferred(self._finishedRequest)
def ebRequestWriting(err):
if self._state == 'TRANSMITTING':
self._state = 'GENERATION_FAILED'
self.transport.abortConnection()
self._finishedRequest.errback(
Failure(RequestGenerationFailed([err])))
else:
self._log.failure(
u'Error writing request, but not in valid state '
u'to finalize request: {state}',
failure=err,
state=self._state
)
_requestDeferred.addCallbacks(cbRequestWritten, ebRequestWriting)
return self._finishedRequest
def _finishResponse(self, rest):
"""
Called by an L{HTTPClientParser} to indicate that it has parsed a
complete response.
@param rest: A C{bytes} giving any trailing bytes which were given to
the L{HTTPClientParser} which were not part of the response it
was parsing.
"""
_finishResponse = makeStatefulDispatcher('finishResponse', _finishResponse)
def _finishResponse_WAITING(self, rest):
# Currently the rest parameter is ignored. Don't forget to use it if
# we ever add support for pipelining. And maybe check what trailers
# mean.
if self._state == 'WAITING':
self._state = 'QUIESCENT'
else:
# The server sent the entire response before we could send the
# whole request. That sucks. Oh well. Fire the request()
# Deferred with the response. But first, make sure that if the
# request does ever finish being written that it won't try to fire
# that Deferred.
self._state = 'TRANSMITTING_AFTER_RECEIVING_RESPONSE'
self._responseDeferred.chainDeferred(self._finishedRequest)
# This will happen if we're being called due to connection being lost;
# if so, no need to disconnect parser again, or to call
# _quiescentCallback.
if self._parser is None:
return
reason = ConnectionDone(u"synthetic!")
connHeaders = self._parser.connHeaders.getRawHeaders(b'connection', ())
if ((b'close' in connHeaders) or self._state != "QUIESCENT" or
not self._currentRequest.persistent):
self._giveUp(Failure(reason))
else:
# Just in case we had paused the transport, resume it before
# considering it quiescent again.
self.transport.resumeProducing()
# We call the quiescent callback first, to ensure connection gets
# added back to connection pool before we finish the request.
try:
self._quiescentCallback(self)
except:
# If callback throws exception, just log it and disconnect;
# keeping persistent connections around is an optimisation:
self._log.failure('')
self.transport.loseConnection()
self._disconnectParser(reason)
_finishResponse_TRANSMITTING = _finishResponse_WAITING
def _disconnectParser(self, reason):
"""
If there is still a parser, call its C{connectionLost} method with the
given reason. If there is not, do nothing.
@type reason: L{Failure}
"""
if self._parser is not None:
parser = self._parser
self._parser = None
self._currentRequest = None
self._finishedRequest = None
self._responseDeferred = None
# The parser is no longer allowed to do anything to the real
# transport. Stop proxying from the parser's transport to the real
# transport before telling the parser it's done so that it can't do
# anything.
self._transportProxy.stopProxying()
self._transportProxy = None
parser.connectionLost(reason)
def _giveUp(self, reason):
"""
Lose the underlying connection and disconnect the parser with the given
L{Failure}.
Use this method instead of calling the transport's loseConnection
method directly otherwise random things will break.
"""
self.transport.loseConnection()
self._disconnectParser(reason)
def dataReceived(self, bytes):
"""
Handle some stuff from some place.
"""
try:
self._parser.dataReceived(bytes)
except:
self._giveUp(Failure())
def connectionLost(self, reason):
"""
The underlying transport went away. If appropriate, notify the parser
object.
"""
connectionLost = makeStatefulDispatcher('connectionLost', connectionLost)
def _connectionLost_QUIESCENT(self, reason):
"""
Nothing is currently happening. Move to the C{'CONNECTION_LOST'}
state but otherwise do nothing.
"""
self._state = 'CONNECTION_LOST'
def _connectionLost_GENERATION_FAILED(self, reason):
"""
The connection was in an inconsistent state. Move to the
C{'CONNECTION_LOST'} state but otherwise do nothing.
"""
self._state = 'CONNECTION_LOST'
def _connectionLost_TRANSMITTING(self, reason):
"""
Fail the L{Deferred} for the current request, notify the request
object that it does not need to continue transmitting itself, and
move to the C{'CONNECTION_LOST'} state.
"""
self._state = 'CONNECTION_LOST'
self._finishedRequest.errback(
Failure(RequestTransmissionFailed([reason])))
del self._finishedRequest
# Tell the request that it should stop bothering now.
self._currentRequest.stopWriting()
def _connectionLost_TRANSMITTING_AFTER_RECEIVING_RESPONSE(self, reason):
"""
Move to the C{'CONNECTION_LOST'} state.
"""
self._state = 'CONNECTION_LOST'
def _connectionLost_WAITING(self, reason):
"""
Disconnect the response parser so that it can propagate the event as
necessary (for example, to call an application protocol's
C{connectionLost} method, or to fail a request L{Deferred}) and move
to the C{'CONNECTION_LOST'} state.
"""
self._disconnectParser(reason)
self._state = 'CONNECTION_LOST'
def _connectionLost_ABORTING(self, reason):
"""
Disconnect the response parser with a L{ConnectionAborted} failure, and
move to the C{'CONNECTION_LOST'} state.
"""
self._disconnectParser(Failure(ConnectionAborted()))
self._state = 'CONNECTION_LOST'
for d in self._abortDeferreds:
d.callback(None)
self._abortDeferreds = []
def abort(self):
"""
Close the connection and cause all outstanding L{request} L{Deferred}s
to fire with an error.
"""
if self._state == "CONNECTION_LOST":
return succeed(None)
self.transport.loseConnection()
self._state = 'ABORTING'
d = Deferred()
self._abortDeferreds.append(d)
return d
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_872_0 |
crossvul-python_data_bad_3500_0 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_3500_0 |
crossvul-python_data_bad_1740_0 | """Serve files directly from the ContentsManager."""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import mimetypes
import json
import base64
from tornado import web
from notebook.base.handlers import IPythonHandler
class FilesHandler(IPythonHandler):
"""serve files via ContentsManager"""
@web.authenticated
def get(self, path):
cm = self.contents_manager
if cm.is_hidden(path):
self.log.info("Refusing to serve hidden file, via 404 Error")
raise web.HTTPError(404)
path = path.strip('/')
if '/' in path:
_, name = path.rsplit('/', 1)
else:
name = path
model = cm.get(path, type='file')
if self.get_argument("download", False):
self.set_header('Content-Disposition','attachment; filename="%s"' % name)
# get mimetype from filename
if name.endswith('.ipynb'):
self.set_header('Content-Type', 'application/json')
else:
cur_mime = mimetypes.guess_type(name)[0]
if cur_mime is not None:
self.set_header('Content-Type', cur_mime)
if model['format'] == 'base64':
b64_bytes = model['content'].encode('ascii')
self.write(base64.decodestring(b64_bytes))
elif model['format'] == 'json':
self.write(json.dumps(model['content']))
else:
self.write(model['content'])
self.flush()
default_handlers = [
(r"/files/(.*)", FilesHandler),
] | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_1740_0 |
crossvul-python_data_bad_1232_0 | 404: Not Found | ./CrossVul/dataset_final_sorted/CWE-20/py/bad_1232_0 |
crossvul-python_data_good_100_2 | # -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
# Copyright 2017 - 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer, reactor
from twisted.python.failure import Failure
from synapse.api.constants import EventTypes, Membership, MAX_DEPTH
from synapse.api.errors import AuthError, Codes, SynapseError
from synapse.crypto.event_signing import add_hashes_and_signatures
from synapse.events.utils import serialize_event
from synapse.events.validator import EventValidator
from synapse.types import (
UserID, RoomAlias, RoomStreamToken,
)
from synapse.util.async import run_on_reactor, ReadWriteLock, Limiter
from synapse.util.logcontext import preserve_fn, run_in_background
from synapse.util.metrics import measure_func
from synapse.util.frozenutils import frozendict_json_encoder
from synapse.util.stringutils import random_string
from synapse.visibility import filter_events_for_client
from synapse.replication.http.send_event import send_event_to_master
from ._base import BaseHandler
from canonicaljson import encode_canonical_json
import logging
import simplejson
logger = logging.getLogger(__name__)
class PurgeStatus(object):
"""Object tracking the status of a purge request
This class contains information on the progress of a purge request, for
return by get_purge_status.
Attributes:
status (int): Tracks whether this request has completed. One of
STATUS_{ACTIVE,COMPLETE,FAILED}
"""
STATUS_ACTIVE = 0
STATUS_COMPLETE = 1
STATUS_FAILED = 2
STATUS_TEXT = {
STATUS_ACTIVE: "active",
STATUS_COMPLETE: "complete",
STATUS_FAILED: "failed",
}
def __init__(self):
self.status = PurgeStatus.STATUS_ACTIVE
def asdict(self):
return {
"status": PurgeStatus.STATUS_TEXT[self.status]
}
class MessageHandler(BaseHandler):
def __init__(self, hs):
super(MessageHandler, self).__init__(hs)
self.hs = hs
self.state = hs.get_state_handler()
self.clock = hs.get_clock()
self.pagination_lock = ReadWriteLock()
self._purges_in_progress_by_room = set()
# map from purge id to PurgeStatus
self._purges_by_id = {}
def start_purge_history(self, room_id, topological_ordering,
delete_local_events=False):
"""Start off a history purge on a room.
Args:
room_id (str): The room to purge from
topological_ordering (int): minimum topo ordering to preserve
delete_local_events (bool): True to delete local events as well as
remote ones
Returns:
str: unique ID for this purge transaction.
"""
if room_id in self._purges_in_progress_by_room:
raise SynapseError(
400,
"History purge already in progress for %s" % (room_id, ),
)
purge_id = random_string(16)
# we log the purge_id here so that it can be tied back to the
# request id in the log lines.
logger.info("[purge] starting purge_id %s", purge_id)
self._purges_by_id[purge_id] = PurgeStatus()
run_in_background(
self._purge_history,
purge_id, room_id, topological_ordering, delete_local_events,
)
return purge_id
@defer.inlineCallbacks
def _purge_history(self, purge_id, room_id, topological_ordering,
delete_local_events):
"""Carry out a history purge on a room.
Args:
purge_id (str): The id for this purge
room_id (str): The room to purge from
topological_ordering (int): minimum topo ordering to preserve
delete_local_events (bool): True to delete local events as well as
remote ones
Returns:
Deferred
"""
self._purges_in_progress_by_room.add(room_id)
try:
with (yield self.pagination_lock.write(room_id)):
yield self.store.purge_history(
room_id, topological_ordering, delete_local_events,
)
logger.info("[purge] complete")
self._purges_by_id[purge_id].status = PurgeStatus.STATUS_COMPLETE
except Exception:
logger.error("[purge] failed: %s", Failure().getTraceback().rstrip())
self._purges_by_id[purge_id].status = PurgeStatus.STATUS_FAILED
finally:
self._purges_in_progress_by_room.discard(room_id)
# remove the purge from the list 24 hours after it completes
def clear_purge():
del self._purges_by_id[purge_id]
reactor.callLater(24 * 3600, clear_purge)
def get_purge_status(self, purge_id):
"""Get the current status of an active purge
Args:
purge_id (str): purge_id returned by start_purge_history
Returns:
PurgeStatus|None
"""
return self._purges_by_id.get(purge_id)
@defer.inlineCallbacks
def get_messages(self, requester, room_id=None, pagin_config=None,
as_client_event=True, event_filter=None):
"""Get messages in a room.
Args:
requester (Requester): The user requesting messages.
room_id (str): The room they want messages from.
pagin_config (synapse.api.streams.PaginationConfig): The pagination
config rules to apply, if any.
as_client_event (bool): True to get events in client-server format.
event_filter (Filter): Filter to apply to results or None
Returns:
dict: Pagination API results
"""
user_id = requester.user.to_string()
if pagin_config.from_token:
room_token = pagin_config.from_token.room_key
else:
pagin_config.from_token = (
yield self.hs.get_event_sources().get_current_token_for_room(
room_id=room_id
)
)
room_token = pagin_config.from_token.room_key
room_token = RoomStreamToken.parse(room_token)
pagin_config.from_token = pagin_config.from_token.copy_and_replace(
"room_key", str(room_token)
)
source_config = pagin_config.get_source_config("room")
with (yield self.pagination_lock.read(room_id)):
membership, member_event_id = yield self._check_in_room_or_world_readable(
room_id, user_id
)
if source_config.direction == 'b':
# if we're going backwards, we might need to backfill. This
# requires that we have a topo token.
if room_token.topological:
max_topo = room_token.topological
else:
max_topo = yield self.store.get_max_topological_token(
room_id, room_token.stream
)
if membership == Membership.LEAVE:
# If they have left the room then clamp the token to be before
# they left the room, to save the effort of loading from the
# database.
leave_token = yield self.store.get_topological_token_for_event(
member_event_id
)
leave_token = RoomStreamToken.parse(leave_token)
if leave_token.topological < max_topo:
source_config.from_key = str(leave_token)
yield self.hs.get_handlers().federation_handler.maybe_backfill(
room_id, max_topo
)
events, next_key = yield self.store.paginate_room_events(
room_id=room_id,
from_key=source_config.from_key,
to_key=source_config.to_key,
direction=source_config.direction,
limit=source_config.limit,
event_filter=event_filter,
)
next_token = pagin_config.from_token.copy_and_replace(
"room_key", next_key
)
if not events:
defer.returnValue({
"chunk": [],
"start": pagin_config.from_token.to_string(),
"end": next_token.to_string(),
})
if event_filter:
events = event_filter.filter(events)
events = yield filter_events_for_client(
self.store,
user_id,
events,
is_peeking=(member_event_id is None),
)
time_now = self.clock.time_msec()
chunk = {
"chunk": [
serialize_event(e, time_now, as_client_event)
for e in events
],
"start": pagin_config.from_token.to_string(),
"end": next_token.to_string(),
}
defer.returnValue(chunk)
@defer.inlineCallbacks
def get_room_data(self, user_id=None, room_id=None,
event_type=None, state_key="", is_guest=False):
""" Get data from a room.
Args:
event : The room path event
Returns:
The path data content.
Raises:
SynapseError if something went wrong.
"""
membership, membership_event_id = yield self._check_in_room_or_world_readable(
room_id, user_id
)
if membership == Membership.JOIN:
data = yield self.state_handler.get_current_state(
room_id, event_type, state_key
)
elif membership == Membership.LEAVE:
key = (event_type, state_key)
room_state = yield self.store.get_state_for_events(
[membership_event_id], [key]
)
data = room_state[membership_event_id].get(key)
defer.returnValue(data)
@defer.inlineCallbacks
def _check_in_room_or_world_readable(self, room_id, user_id):
try:
# check_user_was_in_room will return the most recent membership
# event for the user if:
# * The user is a non-guest user, and was ever in the room
# * The user is a guest user, and has joined the room
# else it will throw.
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
defer.returnValue((member_event.membership, member_event.event_id))
return
except AuthError:
visibility = yield self.state_handler.get_current_state(
room_id, EventTypes.RoomHistoryVisibility, ""
)
if (
visibility and
visibility.content["history_visibility"] == "world_readable"
):
defer.returnValue((Membership.JOIN, None))
return
raise AuthError(
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
)
@defer.inlineCallbacks
def get_state_events(self, user_id, room_id, is_guest=False):
"""Retrieve all state events for a given room. If the user is
joined to the room then return the current state. If the user has
left the room return the state events from when they left.
Args:
user_id(str): The user requesting state events.
room_id(str): The room ID to get all state events from.
Returns:
A list of dicts representing state events. [{}, {}, {}]
"""
membership, membership_event_id = yield self._check_in_room_or_world_readable(
room_id, user_id
)
if membership == Membership.JOIN:
room_state = yield self.state_handler.get_current_state(room_id)
elif membership == Membership.LEAVE:
room_state = yield self.store.get_state_for_events(
[membership_event_id], None
)
room_state = room_state[membership_event_id]
now = self.clock.time_msec()
defer.returnValue(
[serialize_event(c, now) for c in room_state.values()]
)
@defer.inlineCallbacks
def get_joined_members(self, requester, room_id):
"""Get all the joined members in the room and their profile information.
If the user has left the room return the state events from when they left.
Args:
requester(Requester): The user requesting state events.
room_id(str): The room ID to get all state events from.
Returns:
A dict of user_id to profile info
"""
user_id = requester.user.to_string()
if not requester.app_service:
# We check AS auth after fetching the room membership, as it
# requires us to pull out all joined members anyway.
membership, _ = yield self._check_in_room_or_world_readable(
room_id, user_id
)
if membership != Membership.JOIN:
raise NotImplementedError(
"Getting joined members after leaving is not implemented"
)
users_with_profile = yield self.state.get_current_user_in_room(room_id)
# If this is an AS, double check that they are allowed to see the members.
# This can either be because the AS user is in the room or becuase there
# is a user in the room that the AS is "interested in"
if requester.app_service and user_id not in users_with_profile:
for uid in users_with_profile:
if requester.app_service.is_interested_in_user(uid):
break
else:
# Loop fell through, AS has no interested users in room
raise AuthError(403, "Appservice not in room")
defer.returnValue({
user_id: {
"avatar_url": profile.avatar_url,
"display_name": profile.display_name,
}
for user_id, profile in users_with_profile.iteritems()
})
class EventCreationHandler(object):
def __init__(self, hs):
self.hs = hs
self.auth = hs.get_auth()
self.store = hs.get_datastore()
self.state = hs.get_state_handler()
self.clock = hs.get_clock()
self.validator = EventValidator()
self.profile_handler = hs.get_profile_handler()
self.event_builder_factory = hs.get_event_builder_factory()
self.server_name = hs.hostname
self.ratelimiter = hs.get_ratelimiter()
self.notifier = hs.get_notifier()
self.config = hs.config
self.http_client = hs.get_simple_http_client()
# This is only used to get at ratelimit function, and maybe_kick_guest_users
self.base_handler = BaseHandler(hs)
self.pusher_pool = hs.get_pusherpool()
# We arbitrarily limit concurrent event creation for a room to 5.
# This is to stop us from diverging history *too* much.
self.limiter = Limiter(max_count=5)
self.action_generator = hs.get_action_generator()
self.spam_checker = hs.get_spam_checker()
@defer.inlineCallbacks
def create_event(self, requester, event_dict, token_id=None, txn_id=None,
prev_events_and_hashes=None):
"""
Given a dict from a client, create a new event.
Creates an FrozenEvent object, filling out auth_events, prev_events,
etc.
Adds display names to Join membership events.
Args:
requester
event_dict (dict): An entire event
token_id (str)
txn_id (str)
prev_events_and_hashes (list[(str, dict[str, str], int)]|None):
the forward extremities to use as the prev_events for the
new event. For each event, a tuple of (event_id, hashes, depth)
where *hashes* is a map from algorithm to hash.
If None, they will be requested from the database.
Returns:
Tuple of created event (FrozenEvent), Context
"""
builder = self.event_builder_factory.new(event_dict)
self.validator.validate_new(builder)
if builder.type == EventTypes.Member:
membership = builder.content.get("membership", None)
target = UserID.from_string(builder.state_key)
if membership in {Membership.JOIN, Membership.INVITE}:
# If event doesn't include a display name, add one.
profile = self.profile_handler
content = builder.content
try:
if "displayname" not in content:
content["displayname"] = yield profile.get_displayname(target)
if "avatar_url" not in content:
content["avatar_url"] = yield profile.get_avatar_url(target)
except Exception as e:
logger.info(
"Failed to get profile information for %r: %s",
target, e
)
if token_id is not None:
builder.internal_metadata.token_id = token_id
if txn_id is not None:
builder.internal_metadata.txn_id = txn_id
event, context = yield self.create_new_client_event(
builder=builder,
requester=requester,
prev_events_and_hashes=prev_events_and_hashes,
)
defer.returnValue((event, context))
@defer.inlineCallbacks
def send_nonmember_event(self, requester, event, context, ratelimit=True):
"""
Persists and notifies local clients and federation of an event.
Args:
event (FrozenEvent) the event to send.
context (Context) the context of the event.
ratelimit (bool): Whether to rate limit this send.
is_guest (bool): Whether the sender is a guest.
"""
if event.type == EventTypes.Member:
raise SynapseError(
500,
"Tried to send member event through non-member codepath"
)
user = UserID.from_string(event.sender)
assert self.hs.is_mine(user), "User must be our own: %s" % (user,)
if event.is_state():
prev_state = yield self.deduplicate_state_event(event, context)
if prev_state is not None:
defer.returnValue(prev_state)
yield self.handle_new_client_event(
requester=requester,
event=event,
context=context,
ratelimit=ratelimit,
)
@defer.inlineCallbacks
def deduplicate_state_event(self, event, context):
"""
Checks whether event is in the latest resolved state in context.
If so, returns the version of the event in context.
Otherwise, returns None.
"""
prev_event_id = context.prev_state_ids.get((event.type, event.state_key))
prev_event = yield self.store.get_event(prev_event_id, allow_none=True)
if not prev_event:
return
if prev_event and event.user_id == prev_event.user_id:
prev_content = encode_canonical_json(prev_event.content)
next_content = encode_canonical_json(event.content)
if prev_content == next_content:
defer.returnValue(prev_event)
return
@defer.inlineCallbacks
def create_and_send_nonmember_event(
self,
requester,
event_dict,
ratelimit=True,
txn_id=None
):
"""
Creates an event, then sends it.
See self.create_event and self.send_nonmember_event.
"""
# We limit the number of concurrent event sends in a room so that we
# don't fork the DAG too much. If we don't limit then we can end up in
# a situation where event persistence can't keep up, causing
# extremities to pile up, which in turn leads to state resolution
# taking longer.
with (yield self.limiter.queue(event_dict["room_id"])):
event, context = yield self.create_event(
requester,
event_dict,
token_id=requester.access_token_id,
txn_id=txn_id
)
spam_error = self.spam_checker.check_event_for_spam(event)
if spam_error:
if not isinstance(spam_error, basestring):
spam_error = "Spam is not permitted here"
raise SynapseError(
403, spam_error, Codes.FORBIDDEN
)
yield self.send_nonmember_event(
requester,
event,
context,
ratelimit=ratelimit,
)
defer.returnValue(event)
@measure_func("create_new_client_event")
@defer.inlineCallbacks
def create_new_client_event(self, builder, requester=None,
prev_events_and_hashes=None):
"""Create a new event for a local client
Args:
builder (EventBuilder):
requester (synapse.types.Requester|None):
prev_events_and_hashes (list[(str, dict[str, str], int)]|None):
the forward extremities to use as the prev_events for the
new event. For each event, a tuple of (event_id, hashes, depth)
where *hashes* is a map from algorithm to hash.
If None, they will be requested from the database.
Returns:
Deferred[(synapse.events.EventBase, synapse.events.snapshot.EventContext)]
"""
if prev_events_and_hashes is not None:
assert len(prev_events_and_hashes) <= 10, \
"Attempting to create an event with %i prev_events" % (
len(prev_events_and_hashes),
)
else:
prev_events_and_hashes = \
yield self.store.get_prev_events_for_room(builder.room_id)
if prev_events_and_hashes:
depth = max([d for _, _, d in prev_events_and_hashes]) + 1
# we cap depth of generated events, to ensure that they are not
# rejected by other servers (and so that they can be persisted in
# the db)
depth = min(depth, MAX_DEPTH)
else:
depth = 1
prev_events = [
(event_id, prev_hashes)
for event_id, prev_hashes, _ in prev_events_and_hashes
]
builder.prev_events = prev_events
builder.depth = depth
context = yield self.state.compute_event_context(builder)
if requester:
context.app_service = requester.app_service
if builder.is_state():
builder.prev_state = yield self.store.add_event_hashes(
context.prev_state_events
)
yield self.auth.add_auth_events(builder, context)
signing_key = self.hs.config.signing_key[0]
add_hashes_and_signatures(
builder, self.server_name, signing_key
)
event = builder.build()
logger.debug(
"Created event %s with state: %s",
event.event_id, context.prev_state_ids,
)
defer.returnValue(
(event, context,)
)
@measure_func("handle_new_client_event")
@defer.inlineCallbacks
def handle_new_client_event(
self,
requester,
event,
context,
ratelimit=True,
extra_users=[],
):
"""Processes a new event. This includes checking auth, persisting it,
notifying users, sending to remote servers, etc.
If called from a worker will hit out to the master process for final
processing.
Args:
requester (Requester)
event (FrozenEvent)
context (EventContext)
ratelimit (bool)
extra_users (list(UserID)): Any extra users to notify about event
"""
try:
yield self.auth.check_from_context(event, context)
except AuthError as err:
logger.warn("Denying new event %r because %s", event, err)
raise err
# Ensure that we can round trip before trying to persist in db
try:
dump = frozendict_json_encoder.encode(event.content)
simplejson.loads(dump)
except Exception:
logger.exception("Failed to encode content: %r", event.content)
raise
yield self.action_generator.handle_push_actions_for_event(
event, context
)
try:
# If we're a worker we need to hit out to the master.
if self.config.worker_app:
yield send_event_to_master(
self.http_client,
host=self.config.worker_replication_host,
port=self.config.worker_replication_http_port,
requester=requester,
event=event,
context=context,
ratelimit=ratelimit,
extra_users=extra_users,
)
return
yield self.persist_and_notify_client_event(
requester,
event,
context,
ratelimit=ratelimit,
extra_users=extra_users,
)
except: # noqa: E722, as we reraise the exception this is fine.
# Ensure that we actually remove the entries in the push actions
# staging area, if we calculated them.
preserve_fn(self.store.remove_push_actions_from_staging)(event.event_id)
raise
@defer.inlineCallbacks
def persist_and_notify_client_event(
self,
requester,
event,
context,
ratelimit=True,
extra_users=[],
):
"""Called when we have fully built the event, have already
calculated the push actions for the event, and checked auth.
This should only be run on master.
"""
assert not self.config.worker_app
if ratelimit:
yield self.base_handler.ratelimit(requester)
yield self.base_handler.maybe_kick_guest_users(event, context)
if event.type == EventTypes.CanonicalAlias:
# Check the alias is acually valid (at this time at least)
room_alias_str = event.content.get("alias", None)
if room_alias_str:
room_alias = RoomAlias.from_string(room_alias_str)
directory_handler = self.hs.get_handlers().directory_handler
mapping = yield directory_handler.get_association(room_alias)
if mapping["room_id"] != event.room_id:
raise SynapseError(
400,
"Room alias %s does not point to the room" % (
room_alias_str,
)
)
federation_handler = self.hs.get_handlers().federation_handler
if event.type == EventTypes.Member:
if event.content["membership"] == Membership.INVITE:
def is_inviter_member_event(e):
return (
e.type == EventTypes.Member and
e.sender == event.sender
)
state_to_include_ids = [
e_id
for k, e_id in context.current_state_ids.iteritems()
if k[0] in self.hs.config.room_invite_state_types
or k == (EventTypes.Member, event.sender)
]
state_to_include = yield self.store.get_events(state_to_include_ids)
event.unsigned["invite_room_state"] = [
{
"type": e.type,
"state_key": e.state_key,
"content": e.content,
"sender": e.sender,
}
for e in state_to_include.itervalues()
]
invitee = UserID.from_string(event.state_key)
if not self.hs.is_mine(invitee):
# TODO: Can we add signature from remote server in a nicer
# way? If we have been invited by a remote server, we need
# to get them to sign the event.
returned_invite = yield federation_handler.send_invite(
invitee.domain,
event,
)
event.unsigned.pop("room_state", None)
# TODO: Make sure the signatures actually are correct.
event.signatures.update(
returned_invite.signatures
)
if event.type == EventTypes.Redaction:
auth_events_ids = yield self.auth.compute_auth_events(
event, context.prev_state_ids, for_verification=True,
)
auth_events = yield self.store.get_events(auth_events_ids)
auth_events = {
(e.type, e.state_key): e for e in auth_events.values()
}
if self.auth.check_redaction(event, auth_events=auth_events):
original_event = yield self.store.get_event(
event.redacts,
check_redacted=False,
get_prev_content=False,
allow_rejected=False,
allow_none=False
)
if event.user_id != original_event.user_id:
raise AuthError(
403,
"You don't have permission to redact events"
)
if event.type == EventTypes.Create and context.prev_state_ids:
raise AuthError(
403,
"Changing the room create event is forbidden",
)
(event_stream_id, max_stream_id) = yield self.store.persist_event(
event, context=context
)
# this intentionally does not yield: we don't care about the result
# and don't need to wait for it.
preserve_fn(self.pusher_pool.on_new_notifications)(
event_stream_id, max_stream_id
)
@defer.inlineCallbacks
def _notify():
yield run_on_reactor()
self.notifier.on_new_room_event(
event, event_stream_id, max_stream_id,
extra_users=extra_users
)
preserve_fn(_notify)()
if event.type == EventTypes.Message:
presence = self.hs.get_presence_handler()
# We don't want to block sending messages on any presence code. This
# matters as sometimes presence code can take a while.
preserve_fn(presence.bump_presence_active_time)(requester.user)
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_100_2 |
crossvul-python_data_good_1740_0 | """Serve files directly from the ContentsManager."""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import mimetypes
import json
import base64
from tornado import web
from notebook.base.handlers import IPythonHandler
class FilesHandler(IPythonHandler):
"""serve files via ContentsManager"""
@web.authenticated
def get(self, path):
cm = self.contents_manager
if cm.is_hidden(path):
self.log.info("Refusing to serve hidden file, via 404 Error")
raise web.HTTPError(404)
path = path.strip('/')
if '/' in path:
_, name = path.rsplit('/', 1)
else:
name = path
model = cm.get(path, type='file')
if self.get_argument("download", False):
self.set_header('Content-Disposition','attachment; filename="%s"' % name)
# get mimetype from filename
if name.endswith('.ipynb'):
self.set_header('Content-Type', 'application/json')
else:
cur_mime = mimetypes.guess_type(name)[0]
if cur_mime is not None:
self.set_header('Content-Type', cur_mime)
else:
if model['format'] == 'base64':
self.set_header('Content-Type', 'application/octet-stream')
else:
self.set_header('Content-Type', 'text/plain')
if model['format'] == 'base64':
b64_bytes = model['content'].encode('ascii')
self.write(base64.decodestring(b64_bytes))
elif model['format'] == 'json':
self.write(json.dumps(model['content']))
else:
self.write(model['content'])
self.flush()
default_handlers = [
(r"/files/(.*)", FilesHandler),
] | ./CrossVul/dataset_final_sorted/CWE-20/py/good_1740_0 |
crossvul-python_data_good_652_0 | from __future__ import unicode_literals
from itertools import chain
import re
import string
import six
from six.moves.urllib.parse import urlparse
from xml.sax.saxutils import unescape
import html5lib
from html5lib.constants import (
entities,
namespaces,
prefixes,
tokenTypes,
)
try:
from html5lib.constants import ReparseException
except ImportError:
# html5lib-python 1.0 changed the name
from html5lib.constants import _ReparseException as ReparseException
from html5lib.filters.base import Filter
from html5lib.filters import sanitizer
from html5lib.serializer import HTMLSerializer
from html5lib._tokenizer import HTMLTokenizer
from html5lib._trie import Trie
from bleach.utils import alphabetize_attributes, force_unicode
#: Map of entity name to expanded entity
ENTITIES = entities
#: Trie of html entity string -> character representation
ENTITIES_TRIE = Trie(ENTITIES)
#: List of allowed tags
ALLOWED_TAGS = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
]
#: Map of allowed attributes by tag
ALLOWED_ATTRIBUTES = {
'a': ['href', 'title'],
'abbr': ['title'],
'acronym': ['title'],
}
#: List of allowed styles
ALLOWED_STYLES = []
#: List of allowed protocols
ALLOWED_PROTOCOLS = ['http', 'https', 'mailto']
AMP_SPLIT_RE = re.compile('(&)')
#: Invisible characters--0 to and including 31 except 9 (tab), 10 (lf), and 13 (cr)
INVISIBLE_CHARACTERS = ''.join([chr(c) for c in chain(range(0, 9), range(11, 13), range(14, 32))])
#: Regexp for characters that are invisible
INVISIBLE_CHARACTERS_RE = re.compile(
'[' + INVISIBLE_CHARACTERS + ']',
re.UNICODE
)
#: String to replace invisible characters with. This can be a character, a
#: string, or even a function that takes a Python re matchobj
INVISIBLE_REPLACEMENT_CHAR = '?'
def convert_entity(value):
"""Convert an entity (minus the & and ; part) into what it represents
This handles numeric, hex, and text entities.
:arg value: the string (minus the ``&`` and ``;`` part) to convert
:returns: unicode character
"""
if value[0] == '#':
if value[1] in ('x', 'X'):
return six.unichr(int(value[2:], 16))
return six.unichr(int(value[1:], 10))
return ENTITIES[value]
def convert_entities(text):
"""Converts all found entities in the text
:arg text: the text to convert entities in
:returns: unicode text with converted entities
"""
if '&' not in text:
return text
new_text = []
for part in next_possible_entity(text):
if not part:
continue
if part.startswith('&'):
entity = match_entity(part)
if entity is not None:
new_text.append(convert_entity(entity))
remainder = part[len(entity) + 2:]
if part:
new_text.append(remainder)
continue
new_text.append(part)
return u''.join(new_text)
class BleachHTMLTokenizer(HTMLTokenizer):
def consumeEntity(self, allowedChar=None, fromAttribute=False):
# We don't want to consume and convert entities, so this overrides the
# html5lib tokenizer's consumeEntity so that it's now a no-op.
#
# However, when that gets called, it's consumed an &, so we put that in
# the stream.
if fromAttribute:
self.currentToken['data'][-1][1] += '&'
else:
self.tokenQueue.append({"type": tokenTypes['Characters'], "data": '&'})
class BleachHTMLParser(html5lib.HTMLParser):
def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs):
# Override HTMLParser so we can swap out the tokenizer for our own.
self.innerHTMLMode = innerHTML
self.container = container
self.scripting = scripting
self.tokenizer = BleachHTMLTokenizer(stream, parser=self, **kwargs)
self.reset()
try:
self.mainLoop()
except ReparseException:
self.reset()
self.mainLoop()
class Cleaner(object):
"""Cleaner for cleaning HTML fragments of malicious content
This cleaner is a security-focused function whose sole purpose is to remove
malicious content from a string such that it can be displayed as content in
a web page.
This cleaner is not designed to use to transform content to be used in
non-web-page contexts.
To use::
from bleach.sanitizer import Cleaner
cleaner = Cleaner()
for text in all_the_yucky_things:
sanitized = cleaner.clean(text)
"""
def __init__(self, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES,
styles=ALLOWED_STYLES, protocols=ALLOWED_PROTOCOLS, strip=False,
strip_comments=True, filters=None):
"""Initializes a Cleaner
:arg list tags: allowed list of tags; defaults to
``bleach.sanitizer.ALLOWED_TAGS``
:arg dict attributes: allowed attributes; can be a callable, list or dict;
defaults to ``bleach.sanitizer.ALLOWED_ATTRIBUTES``
:arg list styles: allowed list of css styles; defaults to
``bleach.sanitizer.ALLOWED_STYLES``
:arg list protocols: allowed list of protocols for links; defaults
to ``bleach.sanitizer.ALLOWED_PROTOCOLS``
:arg bool strip: whether or not to strip disallowed elements
:arg bool strip_comments: whether or not to strip HTML comments
:arg list filters: list of html5lib Filter classes to pass streamed content through
.. seealso:: http://html5lib.readthedocs.io/en/latest/movingparts.html#filters
.. Warning::
Using filters changes the output of ``bleach.Cleaner.clean``.
Make sure the way the filters change the output are secure.
"""
self.tags = tags
self.attributes = attributes
self.styles = styles
self.protocols = protocols
self.strip = strip
self.strip_comments = strip_comments
self.filters = filters or []
self.parser = BleachHTMLParser(namespaceHTMLElements=False)
self.walker = html5lib.getTreeWalker('etree')
self.serializer = BleachHTMLSerializer(
quote_attr_values='always',
omit_optional_tags=False,
escape_lt_in_attrs=True,
# We want to leave entities as they are without escaping or
# resolving or expanding
resolve_entities=False,
# Bleach has its own sanitizer, so don't use the html5lib one
sanitize=False,
# Bleach sanitizer alphabetizes already, so don't use the html5lib one
alphabetical_attributes=False,
)
def clean(self, text):
"""Cleans text and returns sanitized result as unicode
:arg str text: text to be cleaned
:returns: sanitized text as unicode
:raises TypeError: if ``text`` is not a text type
"""
if not isinstance(text, six.string_types):
message = "argument cannot be of '{name}' type, must be of text type".format(
name=text.__class__.__name__)
raise TypeError(message)
if not text:
return u''
text = force_unicode(text)
dom = self.parser.parseFragment(text)
filtered = BleachSanitizerFilter(
source=self.walker(dom),
# Bleach-sanitizer-specific things
attributes=self.attributes,
strip_disallowed_elements=self.strip,
strip_html_comments=self.strip_comments,
# html5lib-sanitizer things
allowed_elements=self.tags,
allowed_css_properties=self.styles,
allowed_protocols=self.protocols,
allowed_svg_properties=[],
)
# Apply any filters after the BleachSanitizerFilter
for filter_class in self.filters:
filtered = filter_class(source=filtered)
return self.serializer.render(filtered)
def attribute_filter_factory(attributes):
"""Generates attribute filter function for the given attributes value
The attributes value can take one of several shapes. This returns a filter
function appropriate to the attributes value. One nice thing about this is
that there's less if/then shenanigans in the ``allow_token`` method.
"""
if callable(attributes):
return attributes
if isinstance(attributes, dict):
def _attr_filter(tag, attr, value):
if tag in attributes:
attr_val = attributes[tag]
if callable(attr_val):
return attr_val(tag, attr, value)
if attr in attr_val:
return True
if '*' in attributes:
attr_val = attributes['*']
if callable(attr_val):
return attr_val(tag, attr, value)
return attr in attr_val
return False
return _attr_filter
if isinstance(attributes, list):
def _attr_filter(tag, attr, value):
return attr in attributes
return _attr_filter
raise ValueError('attributes needs to be a callable, a list or a dict')
def match_entity(stream):
"""Returns first entity in stream or None if no entity exists
Note: For Bleach purposes, entities must start with a "&" and end with
a ";".
:arg stream: the character stream
:returns: ``None`` or the entity string without "&" or ";"
"""
# Nix the & at the beginning
if stream[0] != '&':
raise ValueError('Stream should begin with "&"')
stream = stream[1:]
stream = list(stream)
possible_entity = ''
end_characters = '<&=;' + string.whitespace
# Handle number entities
if stream and stream[0] == '#':
possible_entity = '#'
stream.pop(0)
if stream and stream[0] in ('x', 'X'):
allowed = '0123456789abcdefABCDEF'
possible_entity += stream.pop(0)
else:
allowed = '0123456789'
# FIXME(willkg): Do we want to make sure these are valid number
# entities? This doesn't do that currently.
while stream and stream[0] not in end_characters:
c = stream.pop(0)
if c not in allowed:
break
possible_entity += c
if possible_entity and stream and stream[0] == ';':
return possible_entity
return None
# Handle character entities
while stream and stream[0] not in end_characters:
c = stream.pop(0)
if not ENTITIES_TRIE.has_keys_with_prefix(possible_entity):
break
possible_entity += c
if possible_entity and stream and stream[0] == ';':
return possible_entity
return None
def next_possible_entity(text):
"""Takes a text and generates a list of possible entities
:arg text: the text to look at
:returns: generator where each part (except the first) starts with an
"&"
"""
for i, part in enumerate(AMP_SPLIT_RE.split(text)):
if i == 0:
yield part
elif i % 2 == 0:
yield '&' + part
class BleachSanitizerFilter(sanitizer.Filter):
"""html5lib Filter that sanitizes text
This filter can be used anywhere html5lib filters can be used.
"""
def __init__(self, source, attributes=ALLOWED_ATTRIBUTES,
strip_disallowed_elements=False, strip_html_comments=True,
**kwargs):
"""Creates a BleachSanitizerFilter instance
:arg Treewalker source: stream
:arg list tags: allowed list of tags; defaults to
``bleach.sanitizer.ALLOWED_TAGS``
:arg dict attributes: allowed attributes; can be a callable, list or dict;
defaults to ``bleach.sanitizer.ALLOWED_ATTRIBUTES``
:arg list styles: allowed list of css styles; defaults to
``bleach.sanitizer.ALLOWED_STYLES``
:arg list protocols: allowed list of protocols for links; defaults
to ``bleach.sanitizer.ALLOWED_PROTOCOLS``
:arg bool strip_disallowed_elements: whether or not to strip disallowed
elements
:arg bool strip_html_comments: whether or not to strip HTML comments
"""
self.attr_filter = attribute_filter_factory(attributes)
self.strip_disallowed_elements = strip_disallowed_elements
self.strip_html_comments = strip_html_comments
return super(BleachSanitizerFilter, self).__init__(source, **kwargs)
def __iter__(self):
for token in Filter.__iter__(self):
ret = self.sanitize_token(token)
if not ret:
continue
if isinstance(ret, list):
for subtoken in ret:
yield subtoken
else:
yield ret
def sanitize_token(self, token):
"""Sanitize a token either by HTML-encoding or dropping.
Unlike sanitizer.Filter, allowed_attributes can be a dict of {'tag':
['attribute', 'pairs'], 'tag': callable}.
Here callable is a function with two arguments of attribute name and
value. It should return true of false.
Also gives the option to strip tags instead of encoding.
:arg dict token: token to sanitize
:returns: token or list of tokens
"""
token_type = token['type']
if token_type in ['StartTag', 'EndTag', 'EmptyTag']:
if token['name'] in self.allowed_elements:
return self.allow_token(token)
elif self.strip_disallowed_elements:
return None
else:
if 'data' in token:
# Alphabetize the attributes before calling .disallowed_token()
# so that the resulting string is stable
token['data'] = alphabetize_attributes(token['data'])
return self.disallowed_token(token)
elif token_type == 'Comment':
if not self.strip_html_comments:
return token
else:
return None
elif token_type == 'Characters':
return self.sanitize_characters(token)
else:
return token
def sanitize_characters(self, token):
"""Handles Characters tokens
Our overridden tokenizer doesn't do anything with entities. However,
that means that the serializer will convert all ``&`` in Characters
tokens to ``&``.
Since we don't want that, we extract entities here and convert them to
Entity tokens so the serializer will let them be.
:arg token: the Characters token to work on
:returns: a list of tokens
"""
data = token.get('data', '')
if not data:
return token
data = INVISIBLE_CHARACTERS_RE.sub(INVISIBLE_REPLACEMENT_CHAR, data)
token['data'] = data
# If there isn't a & in the data, we can return now
if '&' not in data:
return token
new_tokens = []
# For each possible entity that starts with a "&", we try to extract an
# actual entity and re-tokenize accordingly
for part in next_possible_entity(data):
if not part:
continue
if part.startswith('&'):
entity = match_entity(part)
if entity is not None:
new_tokens.append({'type': 'Entity', 'name': entity})
# Length of the entity plus 2--one for & at the beginning
# and and one for ; at the end
remainder = part[len(entity) + 2:]
if remainder:
new_tokens.append({'type': 'Characters', 'data': remainder})
continue
new_tokens.append({'type': 'Characters', 'data': part})
return new_tokens
def sanitize_uri_value(self, value, allowed_protocols):
"""Checks a uri value to see if it's allowed
:arg value: the uri value to sanitize
:arg allowed_protocols: list of allowed protocols
:returns: allowed value or None
"""
# NOTE(willkg): This transforms the value into one that's easier to
# match and verify, but shouldn't get returned since it's vastly
# different than the original value.
# Convert all character entities in the value
new_value = convert_entities(value)
# Nix backtick, space characters, and control characters
new_value = re.sub(
"[`\000-\040\177-\240\s]+",
'',
new_value
)
# Remove REPLACEMENT characters
new_value = new_value.replace('\ufffd', '')
# Lowercase it--this breaks the value, but makes it easier to match
# against
new_value = new_value.lower()
# Drop attributes with uri values that have protocols that aren't
# allowed
parsed = urlparse(new_value)
if parsed.scheme:
# If urlparse found a scheme, check that
if parsed.scheme in allowed_protocols:
return value
else:
# Allow uris that are just an anchor
if new_value.startswith('#'):
return value
# Handle protocols that urlparse doesn't recognize like "myprotocol"
if ':' in new_value and new_value.split(':')[0] in allowed_protocols:
return value
# If there's no protocol/scheme specified, then assume it's "http"
# and see if that's allowed
if 'http' in allowed_protocols:
return value
return None
def allow_token(self, token):
"""Handles the case where we're allowing the tag"""
if 'data' in token:
# Loop through all the attributes and drop the ones that are not
# allowed, are unsafe or break other rules. Additionally, fix
# attribute values that need fixing.
#
# At the end of this loop, we have the final set of attributes
# we're keeping.
attrs = {}
for namespaced_name, val in token['data'].items():
namespace, name = namespaced_name
# Drop attributes that are not explicitly allowed
#
# NOTE(willkg): We pass in the attribute name--not a namespaced
# name.
if not self.attr_filter(token['name'], name, val):
continue
# Drop attributes with uri values that use a disallowed protocol
# Sanitize attributes with uri values
if namespaced_name in self.attr_val_is_uri:
new_value = self.sanitize_uri_value(val, self.allowed_protocols)
if new_value is None:
continue
val = new_value
# Drop values in svg attrs with non-local IRIs
if namespaced_name in self.svg_attr_val_allows_ref:
new_val = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)',
' ',
unescape(val))
new_val = new_val.strip()
if not new_val:
continue
else:
# Replace the val with the unescaped version because
# it's a iri
val = new_val
# Drop href and xlink:href attr for svg elements with non-local IRIs
if (None, token['name']) in self.svg_allow_local_href:
if namespaced_name in [(None, 'href'), (namespaces['xlink'], 'href')]:
if re.search(r'^\s*[^#\s]', val):
continue
# If it's a style attribute, sanitize it
if namespaced_name == (None, u'style'):
val = self.sanitize_css(val)
# At this point, we want to keep the attribute, so add it in
attrs[namespaced_name] = val
token['data'] = alphabetize_attributes(attrs)
return token
def disallowed_token(self, token):
token_type = token["type"]
if token_type == "EndTag":
token["data"] = "</%s>" % token["name"]
elif token["data"]:
assert token_type in ("StartTag", "EmptyTag")
attrs = []
for (ns, name), v in token["data"].items():
attrs.append(' %s="%s"' % (
name if ns is None else "%s:%s" % (prefixes[ns], name),
# NOTE(willkg): HTMLSerializer escapes attribute values
# already, so if we do it here (like HTMLSerializer does),
# then we end up double-escaping.
v)
)
token["data"] = "<%s%s>" % (token["name"], ''.join(attrs))
else:
token["data"] = "<%s>" % token["name"]
if token.get("selfClosing"):
token["data"] = token["data"][:-1] + "/>"
token["type"] = "Characters"
del token["name"]
return token
def sanitize_css(self, style):
"""Sanitizes css in style tags"""
# disallow urls
style = re.compile('url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
# gauntlet
# Validate the css in the style tag and if it's not valid, then drop
# the whole thing.
parts = style.split(';')
gauntlet = re.compile(
r"""^([-/:,#%.'"\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'\s*|"[\s\w]+"|\([\d,%\.\s]+\))*$"""
)
for part in parts:
if not gauntlet.match(part):
return ''
if not re.match("^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
return ''
clean = []
for prop, value in re.findall('([-\w]+)\s*:\s*([^:;]*)', style):
if not value:
continue
if prop.lower() in self.allowed_css_properties:
clean.append(prop + ': ' + value + ';')
elif prop.lower() in self.allowed_svg_properties:
clean.append(prop + ': ' + value + ';')
return ' '.join(clean)
class BleachHTMLSerializer(HTMLSerializer):
"""Wraps the HTMLSerializer and undoes & -> & in attributes"""
def escape_base_amp(self, stoken):
"""Escapes bare & in HTML attribute values"""
# First, undo what the HTMLSerializer did
stoken = stoken.replace('&', '&')
# Then, escape any bare &
for part in next_possible_entity(stoken):
if not part:
continue
if part.startswith('&'):
entity = match_entity(part)
if entity is not None:
yield '&' + entity + ';'
# Length of the entity plus 2--one for & at the beginning
# and and one for ; at the end
part = part[len(entity) + 2:]
if part:
yield part
continue
yield part.replace('&', '&')
def serialize(self, treewalker, encoding=None):
"""Wrap HTMLSerializer.serialize and escape bare & in attributes"""
in_tag = False
after_equals = False
for stoken in super(BleachHTMLSerializer, self).serialize(treewalker, encoding):
if in_tag:
if stoken == '>':
in_tag = False
elif after_equals:
if stoken != '"':
for part in self.escape_base_amp(stoken):
yield part
after_equals = False
continue
elif stoken == '=':
after_equals = True
yield stoken
else:
if stoken.startswith('<'):
in_tag = True
yield stoken
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_652_0 |
crossvul-python_data_good_3500_1 | import os
from mercurial import ui, hg
import Bcfg2.Server.Plugin
# for debugging output only
import logging
logger = logging.getLogger('Bcfg2.Plugins.Mercurial')
class Hg(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Version):
"""Mercurial is a version plugin for dealing with Bcfg2 repository."""
name = 'Mercurial'
__version__ = '$Id$'
__author__ = 'bcfg-dev@mcs.anl.gov'
experimental = True
def __init__(self, core, datastore):
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Version.__init__(self)
self.core = core
self.datastore = datastore
# path to hg directory for Bcfg2 repo
hg_dir = "%s/.hg" % datastore
# Read changeset from bcfg2 repo
if os.path.isdir(hg_dir):
self.get_revision()
else:
logger.error("%s is not present." % hg_dir)
raise Bcfg2.Server.Plugin.PluginInitError
logger.debug("Initialized hg plugin with hg directory = %s" % hg_dir)
def get_revision(self):
"""Read hg revision information for the Bcfg2 repository."""
try:
repo_path = "%s/" % self.datastore
repo = hg.repository(ui.ui(), repo_path)
tip = repo.changelog.tip()
revision = repo.changelog.rev(tip)
except:
logger.error("Failed to read hg repository; disabling mercurial support")
raise Bcfg2.Server.Plugin.PluginInitError
return revision
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3500_1 |
crossvul-python_data_good_3723_0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Forms used for Horizon's auth mechanisms.
"""
import logging
from django import shortcuts
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.http import same_origin
from django.utils.translation import ugettext as _
from keystoneclient import exceptions as keystone_exceptions
from horizon import api
from horizon import base
from horizon import exceptions
from horizon import forms
from horizon import users
LOG = logging.getLogger(__name__)
def _set_session_data(request, token):
request.session['serviceCatalog'] = token.serviceCatalog
request.session['tenant'] = token.tenant['name']
request.session['tenant_id'] = token.tenant['id']
request.session['token'] = token.id
request.session['user_name'] = token.user['name']
request.session['user_id'] = token.user['id']
request.session['roles'] = token.user['roles']
class Login(forms.SelfHandlingForm):
""" Form used for logging in a user.
Handles authentication with Keystone, choosing a tenant, and fetching
a scoped token token for that tenant. Redirects to the URL returned
by :meth:`horizon.get_user_home` if successful.
Subclass of :class:`~horizon.forms.SelfHandlingForm`.
"""
region = forms.ChoiceField(label=_("Region"), required=False)
username = forms.CharField(label=_("User Name"))
password = forms.CharField(label=_("Password"),
widget=forms.PasswordInput(render_value=False))
def __init__(self, *args, **kwargs):
super(Login, self).__init__(*args, **kwargs)
# FIXME(gabriel): When we switch to region-only settings, we can
# remove this default region business.
default_region = (settings.OPENSTACK_KEYSTONE_URL, "Default Region")
regions = getattr(settings, 'AVAILABLE_REGIONS', [default_region])
self.fields['region'].choices = regions
if len(regions) == 1:
self.fields['region'].initial = default_region[0]
self.fields['region'].widget = forms.widgets.HiddenInput()
def handle(self, request, data):
if 'user_name' in request.session:
if request.session['user_name'] != data['username']:
# To avoid reusing another user's session, create a
# new, empty session if the existing session
# corresponds to a different authenticated user.
request.session.flush()
# Always cycle the session key when viewing the login form to
# prevent session fixation
request.session.cycle_key()
# For now we'll allow fallback to OPENSTACK_KEYSTONE_URL if the
# form post doesn't include a region.
endpoint = data.get('region', None) or settings.OPENSTACK_KEYSTONE_URL
region_name = dict(self.fields['region'].choices)[endpoint]
request.session['region_endpoint'] = endpoint
request.session['region_name'] = region_name
redirect_to = request.REQUEST.get(REDIRECT_FIELD_NAME, None)
# Make sure the requested redirect matches the protocol,
# domain, and port of this request
if redirect_to and not same_origin(
request.build_absolute_uri(redirect_to),
request.build_absolute_uri()):
redirect_to = None
if data.get('tenant', None):
try:
token = api.token_create(request,
data.get('tenant'),
data['username'],
data['password'])
tenants = api.tenant_list_for_token(request, token.id)
except:
msg = _('Unable to authenticate for that project.')
exceptions.handle(request,
message=msg,
escalate=True)
_set_session_data(request, token)
user = users.get_user_from_request(request)
redirect = redirect_to or base.Horizon.get_user_home(user)
return shortcuts.redirect(redirect)
elif data.get('username', None):
try:
unscoped_token = api.token_create(request,
'',
data['username'],
data['password'])
except keystone_exceptions.Unauthorized:
exceptions.handle(request,
_('Invalid user name or password.'))
except:
# If we get here we don't want to show a stack trace to the
# user. However, if we fail here, there may be bad session
# data that's been cached already.
request.user_logout()
exceptions.handle(request,
message=_("An error occurred authenticating."
" Please try again later."),
escalate=True)
# Unscoped token
request.session['unscoped_token'] = unscoped_token.id
request.user.username = data['username']
# Get the tenant list, and log in using first tenant
# FIXME (anthony): add tenant chooser here?
try:
tenants = api.tenant_list_for_token(request, unscoped_token.id)
except:
exceptions.handle(request)
tenants = []
# Abort if there are no valid tenants for this user
if not tenants:
messages.error(request,
_('You are not authorized for any projects.') %
{"user": data['username']},
extra_tags="login")
return
# Create a token.
# NOTE(gabriel): Keystone can return tenants that you're
# authorized to administer but not to log into as a user, so in
# the case of an Unauthorized error we should iterate through
# the tenants until one succeeds or we've failed them all.
while tenants:
tenant = tenants.pop()
try:
token = api.token_create_scoped(request,
tenant.id,
unscoped_token.id)
break
except:
# This will continue for recognized Unauthorized
# exceptions from keystoneclient.
exceptions.handle(request, ignore=True)
token = None
if token is None:
raise exceptions.NotAuthorized(
_("You are not authorized for any available projects."))
_set_session_data(request, token)
user = users.get_user_from_request(request)
redirect = redirect_to or base.Horizon.get_user_home(user)
return shortcuts.redirect(redirect)
class LoginWithTenant(Login):
"""
Exactly like :class:`.Login` but includes the tenant id as a field
so that the process of choosing a default tenant is bypassed.
"""
region = forms.ChoiceField(required=False)
username = forms.CharField(max_length="20",
widget=forms.TextInput(attrs={'readonly': 'readonly'}))
tenant = forms.CharField(widget=forms.HiddenInput())
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3723_0 |
crossvul-python_data_good_2141_2 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible import utils
class ReturnData(object):
''' internal return class for runner execute methods, not part of public API signature '''
__slots__ = [ 'result', 'comm_ok', 'host', 'diff' ]
def __init__(self, conn=None, host=None, result=None,
comm_ok=True, diff=dict()):
# which host is this ReturnData about?
if conn is not None:
self.host = conn.host
delegate = getattr(conn, 'delegate', None)
if delegate is not None:
self.host = delegate
else:
self.host = host
self.result = result
self.comm_ok = comm_ok
# if these values are set and used with --diff we can show
# changes made to particular files
self.diff = diff
if type(self.result) in [ str, unicode ]:
self.result = utils.parse_json(self.result, from_remote=True)
if self.host is None:
raise Exception("host not set")
if type(self.result) != dict:
raise Exception("dictionary result expected")
def communicated_ok(self):
return self.comm_ok
def is_successful(self):
return self.comm_ok and (self.result.get('failed', False) == False) and ('failed_when_result' in self.result and [not self.result['failed_when_result']] or [self.result.get('rc',0) == 0])[0]
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_2141_2 |
crossvul-python_data_good_3659_2 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2011 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import context
from nova import db
from nova import flags
from nova import log as logging
from nova.openstack.common import cfg
from nova import utils
from nova.virt import netutils
LOG = logging.getLogger(__name__)
allow_same_net_traffic_opt = cfg.BoolOpt('allow_same_net_traffic',
default=True,
help='Whether to allow network traffic from same network')
FLAGS = flags.FLAGS
FLAGS.register_opt(allow_same_net_traffic_opt)
class FirewallDriver(object):
""" Firewall Driver base class.
Defines methods that any driver providing security groups
and provider fireall functionality should implement.
"""
def prepare_instance_filter(self, instance, network_info):
"""Prepare filters for the instance.
At this point, the instance isn't running yet."""
raise NotImplementedError()
def unfilter_instance(self, instance, network_info):
"""Stop filtering instance"""
raise NotImplementedError()
def apply_instance_filter(self, instance, network_info):
"""Apply instance filter.
Once this method returns, the instance should be firewalled
appropriately. This method should as far as possible be a
no-op. It's vastly preferred to get everything set up in
prepare_instance_filter.
"""
raise NotImplementedError()
def refresh_security_group_rules(self, security_group_id):
"""Refresh security group rules from data store
Gets called when a rule has been added to or removed from
the security group."""
raise NotImplementedError()
def refresh_security_group_members(self, security_group_id):
"""Refresh security group members from data store
Gets called when an instance gets added to or removed from
the security group."""
raise NotImplementedError()
def refresh_provider_fw_rules(self):
"""Refresh common rules for all hosts/instances from data store.
Gets called when a rule has been added to or removed from
the list of rules (via admin api).
"""
raise NotImplementedError()
def setup_basic_filtering(self, instance, network_info):
"""Create rules to block spoofing and allow dhcp.
This gets called when spawning an instance, before
:py:meth:`prepare_instance_filter`.
"""
raise NotImplementedError()
def instance_filter_exists(self, instance, network_info):
"""Check nova-instance-instance-xxx exists"""
raise NotImplementedError()
class IptablesFirewallDriver(FirewallDriver):
"""Driver which enforces security groups through iptables rules."""
def __init__(self, **kwargs):
from nova.network import linux_net
self.iptables = linux_net.iptables_manager
self.instances = {}
self.network_infos = {}
self.basicly_filtered = False
self.iptables.ipv4['filter'].add_chain('sg-fallback')
self.iptables.ipv4['filter'].add_rule('sg-fallback', '-j DROP')
self.iptables.ipv6['filter'].add_chain('sg-fallback')
self.iptables.ipv6['filter'].add_rule('sg-fallback', '-j DROP')
def setup_basic_filtering(self, instance, network_info):
pass
def apply_instance_filter(self, instance, network_info):
"""No-op. Everything is done in prepare_instance_filter."""
pass
def unfilter_instance(self, instance, network_info):
if self.instances.pop(instance['id'], None):
# NOTE(vish): use the passed info instead of the stored info
self.network_infos.pop(instance['id'])
self.remove_filters_for_instance(instance)
self.iptables.apply()
else:
LOG.info(_('Attempted to unfilter instance %s which is not '
'filtered'), instance['id'])
def prepare_instance_filter(self, instance, network_info):
self.instances[instance['id']] = instance
self.network_infos[instance['id']] = network_info
self.add_filters_for_instance(instance)
LOG.debug(_('Filters added to instance %s'), instance['uuid'])
self.refresh_provider_fw_rules()
LOG.debug(_('Provider Firewall Rules refreshed'))
self.iptables.apply()
def _create_filter(self, ips, chain_name):
return ['-d %s -j $%s' % (ip, chain_name) for ip in ips]
def _filters_for_instance(self, chain_name, network_info):
"""Creates a rule corresponding to each ip that defines a
jump to the corresponding instance - chain for all the traffic
destined to that ip."""
ips_v4 = [ip['ip'] for (_n, mapping) in network_info
for ip in mapping['ips']]
ipv4_rules = self._create_filter(ips_v4, chain_name)
ipv6_rules = []
if FLAGS.use_ipv6:
ips_v6 = [ip['ip'] for (_n, mapping) in network_info
for ip in mapping['ip6s']]
ipv6_rules = self._create_filter(ips_v6, chain_name)
return ipv4_rules, ipv6_rules
def _add_filters(self, chain_name, ipv4_rules, ipv6_rules):
for rule in ipv4_rules:
self.iptables.ipv4['filter'].add_rule(chain_name, rule)
if FLAGS.use_ipv6:
for rule in ipv6_rules:
self.iptables.ipv6['filter'].add_rule(chain_name, rule)
def add_filters_for_instance(self, instance):
network_info = self.network_infos[instance['id']]
chain_name = self._instance_chain_name(instance)
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].add_chain(chain_name)
self.iptables.ipv4['filter'].add_chain(chain_name)
ipv4_rules, ipv6_rules = self._filters_for_instance(chain_name,
network_info)
self._add_filters('local', ipv4_rules, ipv6_rules)
ipv4_rules, ipv6_rules = self.instance_rules(instance, network_info)
self._add_filters(chain_name, ipv4_rules, ipv6_rules)
def remove_filters_for_instance(self, instance):
chain_name = self._instance_chain_name(instance)
self.iptables.ipv4['filter'].remove_chain(chain_name)
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].remove_chain(chain_name)
@staticmethod
def _security_group_chain_name(security_group_id):
return 'nova-sg-%s' % (security_group_id,)
def _instance_chain_name(self, instance):
return 'inst-%s' % (instance['id'],)
def _do_basic_rules(self, ipv4_rules, ipv6_rules, network_info):
# Always drop invalid packets
ipv4_rules += ['-m state --state ' 'INVALID -j DROP']
ipv6_rules += ['-m state --state ' 'INVALID -j DROP']
# Allow established connections
ipv4_rules += ['-m state --state ESTABLISHED,RELATED -j ACCEPT']
ipv6_rules += ['-m state --state ESTABLISHED,RELATED -j ACCEPT']
# Pass through provider-wide drops
ipv4_rules += ['-j $provider']
ipv6_rules += ['-j $provider']
def _do_dhcp_rules(self, ipv4_rules, network_info):
dhcp_servers = [info['dhcp_server'] for (_n, info) in network_info]
for dhcp_server in dhcp_servers:
ipv4_rules.append('-s %s -p udp --sport 67 --dport 68 '
'-j ACCEPT' % (dhcp_server,))
def _do_project_network_rules(self, ipv4_rules, ipv6_rules, network_info):
cidrs = [network['cidr'] for (network, _i) in network_info]
for cidr in cidrs:
ipv4_rules.append('-s %s -j ACCEPT' % (cidr,))
if FLAGS.use_ipv6:
cidrv6s = [network['cidr_v6'] for (network, _i) in
network_info]
for cidrv6 in cidrv6s:
ipv6_rules.append('-s %s -j ACCEPT' % (cidrv6,))
def _do_ra_rules(self, ipv6_rules, network_info):
gateways_v6 = [mapping['gateway_v6'] for (_n, mapping) in
network_info]
for gateway_v6 in gateways_v6:
ipv6_rules.append(
'-s %s/128 -p icmpv6 -j ACCEPT' % (gateway_v6,))
def _build_icmp_rule(self, rule, version):
icmp_type = rule.from_port
icmp_code = rule.to_port
if icmp_type == -1:
icmp_type_arg = None
else:
icmp_type_arg = '%s' % icmp_type
if not icmp_code == -1:
icmp_type_arg += '/%s' % icmp_code
if icmp_type_arg:
if version == 4:
return ['-m', 'icmp', '--icmp-type', icmp_type_arg]
elif version == 6:
return ['-m', 'icmp6', '--icmpv6-type', icmp_type_arg]
# return empty list if icmp_type == -1
return []
def _build_tcp_udp_rule(self, rule, version):
if rule.from_port == rule.to_port:
return ['--dport', '%s' % (rule.from_port,)]
else:
return ['-m', 'multiport',
'--dports', '%s:%s' % (rule.from_port,
rule.to_port)]
def instance_rules(self, instance, network_info):
ctxt = context.get_admin_context()
ipv4_rules = []
ipv6_rules = []
# Initialize with basic rules
self._do_basic_rules(ipv4_rules, ipv6_rules, network_info)
# Set up rules to allow traffic to/from DHCP server
self._do_dhcp_rules(ipv4_rules, network_info)
#Allow project network traffic
if FLAGS.allow_same_net_traffic:
self._do_project_network_rules(ipv4_rules, ipv6_rules,
network_info)
# We wrap these in FLAGS.use_ipv6 because they might cause
# a DB lookup. The other ones are just list operations, so
# they're not worth the clutter.
if FLAGS.use_ipv6:
# Allow RA responses
self._do_ra_rules(ipv6_rules, network_info)
security_groups = db.security_group_get_by_instance(ctxt,
instance['id'])
# then, security group chains and rules
for security_group in security_groups:
rules = db.security_group_rule_get_by_security_group(ctxt,
security_group['id'])
for rule in rules:
LOG.debug(_('Adding security group rule: %r'), rule)
if not rule.cidr:
version = 4
else:
version = netutils.get_ip_version(rule.cidr)
if version == 4:
fw_rules = ipv4_rules
else:
fw_rules = ipv6_rules
protocol = rule.protocol.lower()
if version == 6 and protocol == 'icmp':
protocol = 'icmpv6'
args = ['-j ACCEPT']
if protocol:
args += ['-p', protocol]
if protocol in ['udp', 'tcp']:
args += self._build_tcp_udp_rule(rule, version)
elif protocol == 'icmp':
args += self._build_icmp_rule(rule, version)
if rule.cidr:
LOG.info('Using cidr %r', rule.cidr)
args += ['-s', rule.cidr]
fw_rules += [' '.join(args)]
else:
if rule['grantee_group']:
# FIXME(jkoelker) This needs to be ported up into
# the compute manager which already
# has access to a nw_api handle,
# and should be the only one making
# making rpc calls.
import nova.network
nw_api = nova.network.API()
for instance in rule['grantee_group']['instances']:
LOG.info('instance: %r', instance)
nw_info = nw_api.get_instance_nw_info(ctxt,
instance)
ips = [ip['address']
for ip in nw_info.fixed_ips()
if ip['version'] == version]
LOG.info('ips: %r', ips)
for ip in ips:
subrule = args + ['-s %s' % ip]
fw_rules += [' '.join(subrule)]
LOG.info('Using fw_rules: %r', fw_rules)
ipv4_rules += ['-j $sg-fallback']
ipv6_rules += ['-j $sg-fallback']
return ipv4_rules, ipv6_rules
def instance_filter_exists(self, instance, network_info):
pass
def refresh_security_group_members(self, security_group):
self.do_refresh_security_group_rules(security_group)
self.iptables.apply()
def refresh_security_group_rules(self, security_group):
self.do_refresh_security_group_rules(security_group)
self.iptables.apply()
@utils.synchronized('iptables', external=True)
def do_refresh_security_group_rules(self, security_group):
for instance in self.instances.values():
self.remove_filters_for_instance(instance)
self.add_filters_for_instance(instance)
def refresh_provider_fw_rules(self):
"""See :class:`FirewallDriver` docs."""
self._do_refresh_provider_fw_rules()
self.iptables.apply()
@utils.synchronized('iptables', external=True)
def _do_refresh_provider_fw_rules(self):
"""Internal, synchronized version of refresh_provider_fw_rules."""
self._purge_provider_fw_rules()
self._build_provider_fw_rules()
def _purge_provider_fw_rules(self):
"""Remove all rules from the provider chains."""
self.iptables.ipv4['filter'].empty_chain('provider')
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].empty_chain('provider')
def _build_provider_fw_rules(self):
"""Create all rules for the provider IP DROPs."""
self.iptables.ipv4['filter'].add_chain('provider')
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].add_chain('provider')
ipv4_rules, ipv6_rules = self._provider_rules()
for rule in ipv4_rules:
self.iptables.ipv4['filter'].add_rule('provider', rule)
if FLAGS.use_ipv6:
for rule in ipv6_rules:
self.iptables.ipv6['filter'].add_rule('provider', rule)
@staticmethod
def _provider_rules():
"""Generate a list of rules from provider for IP4 & IP6."""
ctxt = context.get_admin_context()
ipv4_rules = []
ipv6_rules = []
rules = db.provider_fw_rule_get_all(ctxt)
for rule in rules:
LOG.debug(_('Adding provider rule: %s'), rule['cidr'])
version = netutils.get_ip_version(rule['cidr'])
if version == 4:
fw_rules = ipv4_rules
else:
fw_rules = ipv6_rules
protocol = rule['protocol']
if version == 6 and protocol == 'icmp':
protocol = 'icmpv6'
args = ['-p', protocol, '-s', rule['cidr']]
if protocol in ['udp', 'tcp']:
if rule['from_port'] == rule['to_port']:
args += ['--dport', '%s' % (rule['from_port'],)]
else:
args += ['-m', 'multiport',
'--dports', '%s:%s' % (rule['from_port'],
rule['to_port'])]
elif protocol == 'icmp':
icmp_type = rule['from_port']
icmp_code = rule['to_port']
if icmp_type == -1:
icmp_type_arg = None
else:
icmp_type_arg = '%s' % icmp_type
if not icmp_code == -1:
icmp_type_arg += '/%s' % icmp_code
if icmp_type_arg:
if version == 4:
args += ['-m', 'icmp', '--icmp-type',
icmp_type_arg]
elif version == 6:
args += ['-m', 'icmp6', '--icmpv6-type',
icmp_type_arg]
args += ['-j DROP']
fw_rules += [' '.join(args)]
return ipv4_rules, ipv6_rules
class NoopFirewallDriver(object):
"""Firewall driver which just provides No-op methods."""
def __init__(*args, **kwargs):
pass
def _noop(*args, **kwargs):
pass
def __getattr__(self, key):
return self._noop
def instance_filter_exists(self, instance, network_info):
return True
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_3659_2 |
crossvul-python_data_good_5399_0 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import six
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hmac
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
@utils.register_interface(KeyDerivationFunction)
class HKDF(object):
def __init__(self, algorithm, length, salt, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
if not (salt is None or isinstance(salt, bytes)):
raise TypeError("salt must be bytes.")
if salt is None:
salt = b"\x00" * (self._algorithm.digest_size // 8)
self._salt = salt
self._backend = backend
self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend)
def _extract(self, key_material):
h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend)
h.update(key_material)
return h.finalize()
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
return self._hkdf_expand.derive(self._extract(key_material))
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
@utils.register_interface(KeyDerivationFunction)
class HKDFExpand(object):
def __init__(self, algorithm, length, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
self._backend = backend
max_length = 255 * (algorithm.digest_size // 8)
if length > max_length:
raise ValueError(
"Can not derive keys larger than {0} octets.".format(
max_length
))
self._length = length
if not (info is None or isinstance(info, bytes)):
raise TypeError("info must be bytes.")
if info is None:
info = b""
self._info = info
self._used = False
def _expand(self, key_material):
output = [b""]
counter = 1
while self._algorithm.digest_size * (len(output) - 1) < self._length:
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
h.update(output[-1])
h.update(self._info)
h.update(six.int2byte(counter))
output.append(h.finalize())
counter += 1
return b"".join(output)[:self._length]
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
if self._used:
raise AlreadyFinalized
self._used = True
return self._expand(key_material)
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
| ./CrossVul/dataset_final_sorted/CWE-20/py/good_5399_0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.