code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'isucdc2.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('cdc.urls', namespace="cdc")),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| ISEAGE-ISU/cdc2-2015-www | isucdc2/urls.py | Python | mit | 469 |
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
'''The main GUI'''
import collections, os, sys, textwrap, time, gc, errno
from Queue import Queue, Empty
from threading import Thread
from collections import OrderedDict
from io import BytesIO
import apsw
from PyQt5.Qt import (
Qt, QTimer, QAction, QMenu, QIcon, pyqtSignal, QUrl, QFont, QDialog,
QApplication, QSystemTrayIcon)
from calibre import prints, force_unicode, detect_ncpus
from calibre.constants import (
__appname__, isosx, iswindows, filesystem_encoding, DEBUG)
from calibre.utils.config import prefs, dynamic
from calibre.utils.ipc.pool import Pool
from calibre.db.legacy import LibraryDatabase
from calibre.customize.ui import interface_actions, available_store_plugins
from calibre.gui2 import (error_dialog, GetMetadata, open_url,
gprefs, max_available_height, config, info_dialog, Dispatcher,
question_dialog, warning_dialog)
from calibre.gui2.cover_flow import CoverFlowMixin
from calibre.gui2.widgets import ProgressIndicator
from calibre.gui2.update import UpdateMixin
from calibre.gui2.main_window import MainWindow
from calibre.gui2.layout import MainWindowMixin
from calibre.gui2.device import DeviceMixin
from calibre.gui2.email import EmailMixin
from calibre.gui2.ebook_download import EbookDownloadMixin
from calibre.gui2.jobs import JobManager, JobsDialog, JobsButton
from calibre.gui2.init import LibraryViewMixin, LayoutMixin
from calibre.gui2.search_box import SearchBoxMixin, SavedSearchBoxMixin
from calibre.gui2.search_restriction_mixin import SearchRestrictionMixin
from calibre.gui2.tag_browser.ui import TagBrowserMixin
from calibre.gui2.keyboard import Manager
from calibre.gui2.auto_add import AutoAdder
from calibre.gui2.proceed import ProceedQuestion
from calibre.gui2.dialogs.message_box import JobError
from calibre.gui2.job_indicator import Pointer
from calibre.gui2.dbus_export.widgets import factory
from calibre.gui2.open_with import register_keyboard_shortcuts
from calibre.library import current_library_name
class Listener(Thread): # {{{
def __init__(self, listener):
Thread.__init__(self)
self.daemon = True
self.listener, self.queue = listener, Queue()
self._run = True
self.start()
def run(self):
if self.listener is None:
return
while self._run:
try:
conn = self.listener.accept()
msg = conn.recv()
self.queue.put(msg)
except:
continue
def close(self):
self._run = False
try:
if self.listener is not None:
self.listener.close()
except:
import traceback
traceback.print_exc()
# }}}
_gui = None
def get_gui():
return _gui
def add_quick_start_guide(library_view, refresh_cover_browser=None):
from calibre.ebooks.metadata.meta import get_metadata
from calibre.ebooks import calibre_cover
from calibre.utils.zipfile import safe_replace
from calibre.utils.localization import get_lang, canonicalize_lang
from calibre.ptempfile import PersistentTemporaryFile
l = canonicalize_lang(get_lang()) or 'eng'
gprefs['quick_start_guide_added'] = True
imgbuf = BytesIO(calibre_cover(_('Quick Start Guide'), '', author_size=8))
try:
with open(P('quick_start/%s.epub' % l), 'rb') as src:
buf = BytesIO(src.read())
except EnvironmentError as err:
if err.errno != errno.ENOENT:
raise
with open(P('quick_start/eng.epub'), 'rb') as src:
buf = BytesIO(src.read())
safe_replace(buf, 'images/cover.jpg', imgbuf)
buf.seek(0)
mi = get_metadata(buf, 'epub')
with PersistentTemporaryFile('.epub') as tmp:
tmp.write(buf.getvalue())
library_view.model().add_books([tmp.name], ['epub'], [mi])
os.remove(tmp.name)
library_view.model().books_added(1)
if refresh_cover_browser is not None:
refresh_cover_browser()
if library_view.model().rowCount(None) < 3:
library_view.resizeColumnsToContents()
class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
TagBrowserMixin, CoverFlowMixin, LibraryViewMixin, SearchBoxMixin,
SavedSearchBoxMixin, SearchRestrictionMixin, LayoutMixin, UpdateMixin,
EbookDownloadMixin
):
'The main GUI'
proceed_requested = pyqtSignal(object, object)
book_converted = pyqtSignal(object, object)
def __init__(self, opts, parent=None, gui_debug=None):
global _gui
MainWindow.__init__(self, opts, parent=parent, disable_automatic_gc=True)
self.jobs_pointer = Pointer(self)
self.proceed_requested.connect(self.do_proceed,
type=Qt.QueuedConnection)
self.proceed_question = ProceedQuestion(self)
self.job_error_dialog = JobError(self)
self.keyboard = Manager(self)
_gui = self
self.opts = opts
self.device_connected = None
self.gui_debug = gui_debug
self.iactions = OrderedDict()
# Actions
for action in interface_actions():
if opts.ignore_plugins and action.plugin_path is not None:
continue
try:
ac = self.init_iaction(action)
except:
# Ignore errors in loading user supplied plugins
import traceback
traceback.print_exc()
if action.plugin_path is None:
raise
continue
ac.plugin_path = action.plugin_path
ac.interface_action_base_plugin = action
self.add_iaction(ac)
self.load_store_plugins()
def init_iaction(self, action):
ac = action.load_actual_plugin(self)
ac.plugin_path = action.plugin_path
ac.interface_action_base_plugin = action
action.actual_iaction_plugin_loaded = True
return ac
def add_iaction(self, ac):
acmap = self.iactions
if ac.name in acmap:
if ac.priority >= acmap[ac.name].priority:
acmap[ac.name] = ac
else:
acmap[ac.name] = ac
def load_store_plugins(self):
from calibre.gui2.store.loader import Stores
self.istores = Stores()
for store in available_store_plugins():
if self.opts.ignore_plugins and store.plugin_path is not None:
continue
try:
st = self.init_istore(store)
self.add_istore(st)
except:
# Ignore errors in loading user supplied plugins
import traceback
traceback.print_exc()
if store.plugin_path is None:
raise
continue
self.istores.builtins_loaded()
def init_istore(self, store):
st = store.load_actual_plugin(self)
st.plugin_path = store.plugin_path
st.base_plugin = store
store.actual_istore_plugin_loaded = True
return st
def add_istore(self, st):
stmap = self.istores
if st.name in stmap:
if st.priority >= stmap[st.name].priority:
stmap[st.name] = st
else:
stmap[st.name] = st
def initialize(self, library_path, db, listener, actions, show_gui=True):
opts = self.opts
self.preferences_action, self.quit_action = actions
self.library_path = library_path
self.content_server = None
self._spare_pool = None
self.must_restart_before_config = False
self.listener = Listener(listener)
self.check_messages_timer = QTimer()
self.check_messages_timer.timeout.connect(self.another_instance_wants_to_talk)
self.check_messages_timer.start(1000)
for ac in self.iactions.values():
try:
ac.do_genesis()
except Exception:
# Ignore errors in third party plugins
import traceback
traceback.print_exc()
if getattr(ac, 'plugin_path', None) is None:
raise
self.donate_action = QAction(QIcon(I('donate.png')),
_('&Donate to support calibre'), self)
for st in self.istores.values():
st.do_genesis()
MainWindowMixin.init_main_window_mixin(self, db)
# Jobs Button {{{
self.job_manager = JobManager()
self.jobs_dialog = JobsDialog(self, self.job_manager)
self.jobs_button = JobsButton(horizontal=True, parent=self)
self.jobs_button.initialize(self.jobs_dialog, self.job_manager)
# }}}
LayoutMixin.init_layout_mixin(self)
DeviceMixin.init_device_mixin(self)
self.progress_indicator = ProgressIndicator(self)
self.progress_indicator.pos = (0, 20)
self.verbose = opts.verbose
self.get_metadata = GetMetadata()
self.upload_memory = {}
self.metadata_dialogs = []
self.default_thumbnail = None
self.tb_wrapper = textwrap.TextWrapper(width=40)
self.viewers = collections.deque()
self.system_tray_icon = None
if config['systray_icon']:
self.system_tray_icon = factory(app_id='com.calibre-ebook.gui').create_system_tray_icon(parent=self, title='calibre')
if self.system_tray_icon is not None:
self.system_tray_icon.setIcon(QIcon(I('lt.png')))
if not (iswindows or isosx):
self.system_tray_icon.setIcon(QIcon.fromTheme('calibre-gui', QIcon(I('lt.png'))))
self.system_tray_icon.setToolTip(self.jobs_button.tray_tooltip())
self.system_tray_icon.setVisible(True)
self.jobs_button.tray_tooltip_updated.connect(self.system_tray_icon.setToolTip)
elif config['systray_icon']:
prints('Failed to create system tray icon, your desktop environment probably does not support the StatusNotifier spec')
self.system_tray_menu = QMenu(self)
self.toggle_to_tray_action = self.system_tray_menu.addAction(QIcon(I('page.png')), '')
self.toggle_to_tray_action.triggered.connect(self.system_tray_icon_activated)
self.system_tray_menu.addAction(self.donate_action)
self.donate_button.clicked.connect(self.donate_action.trigger)
self.donate_button.setToolTip(self.donate_action.text().replace('&', ''))
self.donate_button.setIcon(self.donate_action.icon())
self.donate_button.setStatusTip(self.donate_button.toolTip())
self.eject_action = self.system_tray_menu.addAction(
QIcon(I('eject.png')), _('&Eject connected device'))
self.eject_action.setEnabled(False)
self.addAction(self.quit_action)
self.system_tray_menu.addAction(self.quit_action)
self.keyboard.register_shortcut('quit calibre', _('Quit calibre'),
default_keys=('Ctrl+Q',), action=self.quit_action)
if self.system_tray_icon is not None:
self.system_tray_icon.setContextMenu(self.system_tray_menu)
self.system_tray_icon.activated.connect(self.system_tray_icon_activated)
self.quit_action.triggered[bool].connect(self.quit)
self.donate_action.triggered[bool].connect(self.donate)
self.esc_action = QAction(self)
self.addAction(self.esc_action)
self.keyboard.register_shortcut('clear current search',
_('Clear the current search'), default_keys=('Esc',),
action=self.esc_action)
self.esc_action.triggered.connect(self.esc)
self.shift_esc_action = QAction(self)
self.addAction(self.shift_esc_action)
self.keyboard.register_shortcut('focus book list',
_('Focus the book list'), default_keys=('Shift+Esc',),
action=self.shift_esc_action)
self.shift_esc_action.triggered.connect(self.shift_esc)
self.ctrl_esc_action = QAction(self)
self.addAction(self.ctrl_esc_action)
self.keyboard.register_shortcut('clear virtual library',
_('Clear the virtual library'), default_keys=('Ctrl+Esc',),
action=self.ctrl_esc_action)
self.ctrl_esc_action.triggered.connect(self.ctrl_esc)
self.alt_esc_action = QAction(self)
self.addAction(self.alt_esc_action)
self.keyboard.register_shortcut('clear additional restriction',
_('Clear the additional restriction'), default_keys=('Alt+Esc',),
action=self.alt_esc_action)
self.alt_esc_action.triggered.connect(self.clear_additional_restriction)
# ###################### Start spare job server ########################
QTimer.singleShot(1000, self.create_spare_pool)
# ###################### Location Manager ########################
self.location_manager.location_selected.connect(self.location_selected)
self.location_manager.unmount_device.connect(self.device_manager.umount_device)
self.location_manager.configure_device.connect(self.configure_connected_device)
self.location_manager.update_device_metadata.connect(self.update_metadata_on_device)
self.eject_action.triggered.connect(self.device_manager.umount_device)
# ################### Update notification ###################
UpdateMixin.init_update_mixin(self, opts)
# ###################### Search boxes ########################
SearchRestrictionMixin.init_search_restirction_mixin(self)
SavedSearchBoxMixin.init_saved_seach_box_mixin(self)
# ###################### Library view ########################
LibraryViewMixin.init_library_view_mixin(self, db)
SearchBoxMixin.init_search_box_mixin(self) # Requires current_db
if show_gui:
self.show()
if self.system_tray_icon is not None and self.system_tray_icon.isVisible() and opts.start_in_tray:
self.hide_windows()
self.library_view.model().count_changed_signal.connect(
self.iactions['Choose Library'].count_changed)
if not gprefs.get('quick_start_guide_added', False):
try:
add_quick_start_guide(self.library_view)
except:
import traceback
traceback.print_exc()
for view in ('library', 'memory', 'card_a', 'card_b'):
v = getattr(self, '%s_view' % view)
v.selectionModel().selectionChanged.connect(self.update_status_bar)
v.model().count_changed_signal.connect(self.update_status_bar)
self.library_view.model().count_changed()
self.bars_manager.database_changed(self.library_view.model().db)
self.library_view.model().database_changed.connect(self.bars_manager.database_changed,
type=Qt.QueuedConnection)
# ########################## Tags Browser ##############################
TagBrowserMixin.init_tag_browser_mixin(self, db)
# ######################## Search Restriction ##########################
if db.prefs['virtual_lib_on_startup']:
self.apply_virtual_library(db.prefs['virtual_lib_on_startup'])
self.rebuild_vl_tabs()
# ########################## Cover Flow ################################
CoverFlowMixin.init_cover_flow_mixin(self)
self._calculated_available_height = min(max_available_height()-15,
self.height())
self.resize(self.width(), self._calculated_available_height)
self.build_context_menus()
for ac in self.iactions.values():
try:
ac.gui_layout_complete()
except:
import traceback
traceback.print_exc()
if ac.plugin_path is None:
raise
if config['autolaunch_server']:
self.start_content_server()
self.keyboard_interrupt.connect(self.quit, type=Qt.QueuedConnection)
self.read_settings()
self.finalize_layout()
if self.bars_manager.showing_donate:
self.donate_button.start_animation()
self.set_window_title()
for ac in self.iactions.values():
try:
ac.initialization_complete()
except:
import traceback
traceback.print_exc()
if ac.plugin_path is None:
raise
self.set_current_library_information(current_library_name(), db.library_id,
db.field_metadata)
register_keyboard_shortcuts()
self.keyboard.finalize()
self.auto_adder = AutoAdder(gprefs['auto_add_path'], self)
self.save_layout_state()
# Collect cycles now
gc.collect()
if show_gui and self.gui_debug is not None:
QTimer.singleShot(10, self.show_gui_debug_msg)
self.iactions['Connect Share'].check_smartdevice_menus()
QTimer.singleShot(1, self.start_smartdevice)
QTimer.singleShot(100, self.update_toggle_to_tray_action)
def show_gui_debug_msg(self):
info_dialog(self, _('Debug mode'), '<p>' +
_('You have started calibre in debug mode. After you '
'quit calibre, the debug log will be available in '
'the file: %s<p>The '
'log will be displayed automatically.')%self.gui_debug, show=True)
def esc(self, *args):
self.clear_button.click()
def shift_esc(self):
self.current_view().setFocus(Qt.OtherFocusReason)
def ctrl_esc(self):
self.apply_virtual_library()
self.current_view().setFocus(Qt.OtherFocusReason)
def start_smartdevice(self):
message = None
if self.device_manager.get_option('smartdevice', 'autostart'):
try:
message = self.device_manager.start_plugin('smartdevice')
except:
message = 'start smartdevice unknown exception'
prints(message)
import traceback
traceback.print_exc()
if message:
if not self.device_manager.is_running('Wireless Devices'):
error_dialog(self, _('Problem starting the wireless device'),
_('The wireless device driver had problems starting. '
'It said "%s"')%message, show=True)
self.iactions['Connect Share'].set_smartdevice_action_state()
def start_content_server(self, check_started=True):
from calibre.library.server.main import start_threaded_server
from calibre.library.server import server_config
self.content_server = start_threaded_server(
self.library_view.model().db, server_config().parse())
self.content_server.state_callback = Dispatcher(
self.iactions['Connect Share'].content_server_state_changed)
if check_started:
self.content_server.start_failure_callback = \
Dispatcher(self.content_server_start_failed)
def content_server_start_failed(self, msg):
error_dialog(self, _('Failed to start Content Server'),
_('Could not start the content server. Error:\n\n%s')%msg,
show=True)
def resizeEvent(self, ev):
MainWindow.resizeEvent(self, ev)
self.search.setMaximumWidth(self.width()-150)
def create_spare_pool(self, *args):
if self._spare_pool is None:
num = min(detect_ncpus(), int(config['worker_limit']/2.0))
self._spare_pool = Pool(max_workers=num, name='GUIPool')
def spare_pool(self):
ans, self._spare_pool = self._spare_pool, None
QTimer.singleShot(1000, self.create_spare_pool)
return ans
def do_proceed(self, func, payload):
if callable(func):
func(payload)
def no_op(self, *args):
pass
def system_tray_icon_activated(self, r=False):
if r in (QSystemTrayIcon.Trigger, QSystemTrayIcon.MiddleClick, False):
if self.isVisible():
if self.isMinimized():
self.showNormal()
else:
self.hide_windows()
else:
self.show_windows()
if self.isMinimized():
self.showNormal()
@property
def is_minimized_to_tray(self):
return getattr(self, '__systray_minimized', False)
def ask_a_yes_no_question(self, title, msg, det_msg='',
show_copy_button=False, ans_when_user_unavailable=True,
skip_dialog_name=None, skipped_value=True):
if self.is_minimized_to_tray:
return ans_when_user_unavailable
return question_dialog(self, title, msg, det_msg=det_msg,
show_copy_button=show_copy_button,
skip_dialog_name=skip_dialog_name,
skip_dialog_skipped_value=skipped_value)
def update_toggle_to_tray_action(self, *args):
if hasattr(self, 'toggle_to_tray_action'):
self.toggle_to_tray_action.setText(
_('Hide main window') if self.isVisible() else _('Show main window'))
def hide_windows(self):
for window in QApplication.topLevelWidgets():
if isinstance(window, (MainWindow, QDialog)) and \
window.isVisible():
window.hide()
setattr(window, '__systray_minimized', True)
self.update_toggle_to_tray_action()
def show_windows(self, *args):
for window in QApplication.topLevelWidgets():
if getattr(window, '__systray_minimized', False):
window.show()
setattr(window, '__systray_minimized', False)
self.update_toggle_to_tray_action()
def test_server(self, *args):
if self.content_server is not None and \
self.content_server.exception is not None:
error_dialog(self, _('Failed to start content server'),
unicode(self.content_server.exception)).exec_()
@property
def current_db(self):
return self.library_view.model().db
def another_instance_wants_to_talk(self):
try:
msg = self.listener.queue.get_nowait()
except Empty:
return
if msg.startswith('launched:'):
import json
try:
argv = json.loads(msg[len('launched:'):])
except ValueError:
prints('Failed to decode message from other instance: %r' % msg)
if DEBUG:
error_dialog(self, 'Invalid message',
'Received an invalid message from other calibre instance.'
' Do you have multiple versions of calibre installed?',
det_msg='Invalid msg: %r' % msg, show=True)
argv = ()
if isinstance(argv, (list, tuple)) and len(argv) > 1:
files = [os.path.abspath(p) for p in argv[1:] if not os.path.isdir(p) and os.access(p, os.R_OK)]
if files:
self.iactions['Add Books'].add_filesystem_book(files)
self.setWindowState(self.windowState() & ~Qt.WindowMinimized|Qt.WindowActive)
self.show_windows()
self.raise_()
self.activateWindow()
elif msg.startswith('refreshdb:'):
m = self.library_view.model()
m.db.new_api.reload_from_db()
m.db.data.refresh(clear_caches=False, do_search=False)
m.resort()
m.research()
self.tags_view.recount()
elif msg.startswith('shutdown:'):
self.quit(confirm_quit=False)
elif msg.startswith('bookedited:'):
parts = msg.split(':')[1:]
try:
book_id, fmt, library_id = parts[:3]
book_id = int(book_id)
m = self.library_view.model()
db = m.db.new_api
if m.db.library_id == library_id and db.has_id(book_id):
db.format_metadata(book_id, fmt, allow_cache=False, update_db=True)
db.update_last_modified((book_id,))
m.refresh_ids((book_id,))
except Exception:
import traceback
traceback.print_exc()
else:
print msg
def current_view(self):
'''Convenience method that returns the currently visible view '''
idx = self.stack.currentIndex()
if idx == 0:
return self.library_view
if idx == 1:
return self.memory_view
if idx == 2:
return self.card_a_view
if idx == 3:
return self.card_b_view
def booklists(self):
return self.memory_view.model().db, self.card_a_view.model().db, self.card_b_view.model().db
def library_moved(self, newloc, copy_structure=False, call_close=True,
allow_rebuild=False):
if newloc is None:
return
default_prefs = None
try:
olddb = self.library_view.model().db
if copy_structure:
default_prefs = olddb.prefs
from calibre.utils.formatter_functions import unload_user_template_functions
unload_user_template_functions(olddb.library_id)
except:
olddb = None
try:
db = LibraryDatabase(newloc, default_prefs=default_prefs)
except apsw.Error:
if not allow_rebuild:
raise
import traceback
repair = question_dialog(self, _('Corrupted database'),
_('The library database at %s appears to be corrupted. Do '
'you want calibre to try and rebuild it automatically? '
'The rebuild may not be completely successful.')
% force_unicode(newloc, filesystem_encoding),
det_msg=traceback.format_exc()
)
if repair:
from calibre.gui2.dialogs.restore_library import repair_library_at
if repair_library_at(newloc, parent=self):
db = LibraryDatabase(newloc, default_prefs=default_prefs)
else:
return
else:
return
if self.content_server is not None:
self.content_server.set_database(db)
self.library_path = newloc
prefs['library_path'] = self.library_path
self.book_on_device(None, reset=True)
db.set_book_on_device_func(self.book_on_device)
self.library_view.set_database(db)
self.tags_view.set_database(db, self.alter_tb)
self.library_view.model().set_book_on_device_func(self.book_on_device)
self.status_bar.clear_message()
self.search.clear()
self.saved_search.clear()
self.book_details.reset_info()
# self.library_view.model().count_changed()
db = self.library_view.model().db
self.iactions['Choose Library'].count_changed(db.count())
self.set_window_title()
self.apply_named_search_restriction('') # reset restriction to null
self.saved_searches_changed(recount=False) # reload the search restrictions combo box
if db.prefs['virtual_lib_on_startup']:
self.apply_virtual_library(db.prefs['virtual_lib_on_startup'])
self.rebuild_vl_tabs()
for action in self.iactions.values():
action.library_changed(db)
if olddb is not None:
try:
if call_close:
olddb.close()
except:
import traceback
traceback.print_exc()
olddb.break_cycles()
if self.device_connected:
self.set_books_in_library(self.booklists(), reset=True)
self.refresh_ondevice()
self.memory_view.reset()
self.card_a_view.reset()
self.card_b_view.reset()
self.set_current_library_information(current_library_name(), db.library_id,
db.field_metadata)
self.library_view.set_current_row(0)
# Run a garbage collection now so that it does not freeze the
# interface later
gc.collect()
def set_window_title(self):
db = self.current_db
restrictions = [x for x in (db.data.get_base_restriction_name(),
db.data.get_search_restriction_name()) if x]
restrictions = ' :: '.join(restrictions)
font = QFont()
if restrictions:
restrictions = ' :: ' + restrictions
font.setBold(True)
font.setItalic(True)
self.virtual_library.setFont(font)
title = u'{0} - || {1}{2} ||'.format(
__appname__, self.iactions['Choose Library'].library_name(), restrictions)
self.setWindowTitle(title)
def location_selected(self, location):
'''
Called when a location icon is clicked (e.g. Library)
'''
page = 0 if location == 'library' else 1 if location == 'main' else 2 if location == 'carda' else 3
self.stack.setCurrentIndex(page)
self.book_details.reset_info()
for x in ('tb', 'cb'):
splitter = getattr(self, x+'_splitter')
splitter.button.setEnabled(location == 'library')
for action in self.iactions.values():
action.location_selected(location)
if location == 'library':
self.virtual_library_menu.setEnabled(True)
self.highlight_only_button.setEnabled(True)
self.vl_tabs.setEnabled(True)
else:
self.virtual_library_menu.setEnabled(False)
self.highlight_only_button.setEnabled(False)
self.vl_tabs.setEnabled(False)
# Reset the view in case something changed while it was invisible
self.current_view().reset()
self.set_number_of_books_shown()
self.update_status_bar()
def job_exception(self, job, dialog_title=_('Conversion Error'), retry_func=None):
if not hasattr(self, '_modeless_dialogs'):
self._modeless_dialogs = []
minz = self.is_minimized_to_tray
if self.isVisible():
for x in list(self._modeless_dialogs):
if not x.isVisible():
self._modeless_dialogs.remove(x)
try:
if 'calibre.ebooks.DRMError' in job.details:
if not minz:
from calibre.gui2.dialogs.drm_error import DRMErrorMessage
d = DRMErrorMessage(self, _('Cannot convert') + ' ' +
job.description.split(':')[-1].partition('(')[-1][:-1])
d.setModal(False)
d.show()
self._modeless_dialogs.append(d)
return
if 'calibre.ebooks.oeb.transforms.split.SplitError' in job.details:
title = job.description.split(':')[-1].partition('(')[-1][:-1]
msg = _('<p><b>Failed to convert: %s')%title
msg += '<p>'+_('''
Many older ebook reader devices are incapable of displaying
EPUB files that have internal components over a certain size.
Therefore, when converting to EPUB, calibre automatically tries
to split up the EPUB into smaller sized pieces. For some
files that are large undifferentiated blocks of text, this
splitting fails.
<p>You can <b>work around the problem</b> by either increasing the
maximum split size under EPUB Output in the conversion dialog,
or by turning on Heuristic Processing, also in the conversion
dialog. Note that if you make the maximum split size too large,
your ebook reader may have trouble with the EPUB.
''')
if not minz:
d = error_dialog(self, _('Conversion Failed'), msg,
det_msg=job.details)
d.setModal(False)
d.show()
self._modeless_dialogs.append(d)
return
if 'calibre.web.feeds.input.RecipeDisabled' in job.details:
if not minz:
msg = job.details
msg = msg[msg.find('calibre.web.feeds.input.RecipeDisabled:'):]
msg = msg.partition(':')[-1]
d = error_dialog(self, _('Recipe Disabled'),
'<p>%s</p>'%msg)
d.setModal(False)
d.show()
self._modeless_dialogs.append(d)
return
if 'calibre.ebooks.conversion.ConversionUserFeedBack:' in job.details:
if not minz:
import json
payload = job.details.rpartition(
'calibre.ebooks.conversion.ConversionUserFeedBack:')[-1]
payload = json.loads('{' + payload.partition('{')[-1])
d = {'info':info_dialog, 'warn':warning_dialog,
'error':error_dialog}.get(payload['level'],
error_dialog)
d = d(self, payload['title'],
'<p>%s</p>'%payload['msg'],
det_msg=payload['det_msg'])
d.setModal(False)
d.show()
self._modeless_dialogs.append(d)
return
except:
pass
if job.killed:
return
try:
prints(job.details, file=sys.stderr)
except:
pass
if not minz:
self.job_error_dialog.show_error(dialog_title,
_('<b>Failed</b>')+': '+unicode(job.description),
det_msg=job.details, retry_func=retry_func)
def read_settings(self):
geometry = config['main_window_geometry']
if geometry is not None:
self.restoreGeometry(geometry)
self.read_layout_settings()
def write_settings(self):
with gprefs: # Only write to gprefs once
config.set('main_window_geometry', self.saveGeometry())
dynamic.set('sort_history', self.library_view.model().sort_history)
self.save_layout_state()
def quit(self, checked=True, restart=False, debug_on_restart=False,
confirm_quit=True):
if confirm_quit and not self.confirm_quit():
return
try:
self.shutdown()
except:
pass
self.restart_after_quit = restart
self.debug_on_restart = debug_on_restart
QApplication.instance().quit()
def donate(self, *args):
open_url(QUrl('http://calibre-ebook.com/donate'))
def confirm_quit(self):
if self.job_manager.has_jobs():
msg = _('There are active jobs. Are you sure you want to quit?')
if self.job_manager.has_device_jobs():
msg = '<p>'+__appname__ + \
_(''' is communicating with the device!<br>
Quitting may cause corruption on the device.<br>
Are you sure you want to quit?''')+'</p>'
if not question_dialog(self, _('Active jobs'), msg):
return False
from calibre.db.delete_service import has_jobs
if has_jobs():
msg = _('Some deleted books are still being moved to the Recycle '
'Bin, if you quit now, they will be left behind. Are you '
'sure you want to quit?')
if not question_dialog(self, _('Active jobs'), msg):
return False
return True
def shutdown(self, write_settings=True):
self.grid_view.shutdown()
try:
db = self.library_view.model().db
cf = db.clean
except:
pass
else:
cf()
# Save the current field_metadata for applications like calibre2opds
# Goes here, because if cf is valid, db is valid.
db.new_api.set_pref('field_metadata', db.field_metadata.all_metadata())
db.commit_dirty_cache()
db.prefs.write_serialized(prefs['library_path'])
for action in self.iactions.values():
if not action.shutting_down():
return
if write_settings:
self.write_settings()
self.check_messages_timer.stop()
if hasattr(self, 'update_checker'):
self.update_checker.shutdown()
self.listener.close()
self.job_manager.server.close()
self.job_manager.threaded_server.close()
self.device_manager.keep_going = False
self.auto_adder.stop()
mb = self.library_view.model().metadata_backup
if mb is not None:
mb.stop()
self.hide_windows()
try:
try:
if self.content_server is not None:
s = self.content_server
self.content_server = None
s.exit()
except:
pass
except KeyboardInterrupt:
pass
if self._spare_pool is not None:
self._spare_pool.shutdown()
from calibre.db.delete_service import shutdown
shutdown()
time.sleep(2)
self.istores.join()
self.hide_windows()
# Do not report any errors that happen after the shutdown
sys.excepthook = sys.__excepthook__
return True
def run_wizard(self, *args):
if self.confirm_quit():
self.run_wizard_b4_shutdown = True
self.restart_after_quit = True
try:
self.shutdown(write_settings=False)
except:
pass
QApplication.instance().quit()
def closeEvent(self, e):
self.write_settings()
if self.system_tray_icon is not None and self.system_tray_icon.isVisible():
if not dynamic['systray_msg'] and not isosx:
info_dialog(self, 'calibre', 'calibre '+
_('will keep running in the system tray. To close it, '
'choose <b>Quit</b> in the context menu of the '
'system tray.'), show_copy_button=False).exec_()
dynamic['systray_msg'] = True
self.hide_windows()
e.ignore()
else:
if self.confirm_quit():
try:
self.shutdown(write_settings=False)
except:
import traceback
traceback.print_exc()
e.accept()
else:
e.ignore()
# }}}
| hazrpg/calibre | src/calibre/gui2/ui.py | Python | gpl-3.0 | 39,371 |
# Copyright 2014-2016 Ivan Kravets <me@ikravets.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import atexit
import platform
import Queue
import sys
import threading
import uuid
from collections import deque
from os import getenv
from time import sleep, time
from traceback import format_exc
import click
import requests
from platformio import __version__, app, exception, util
from platformio.ide.projectgenerator import ProjectGenerator
class TelemetryBase(object):
MACHINE_ID = str(uuid.uuid5(uuid.NAMESPACE_OID, str(uuid.getnode())))
def __init__(self):
self._params = {}
def __getitem__(self, name):
return self._params.get(name, None)
def __setitem__(self, name, value):
self._params[name] = value
def __delitem__(self, name):
if name in self._params:
del self._params[name]
def get_cid(self):
cid = app.get_state_item("cid")
if not cid:
cid = self.MACHINE_ID
app.set_state_item("cid", cid)
return cid
def send(self, hittype):
raise NotImplementedError()
class MeasurementProtocol(TelemetryBase):
TRACKING_ID = "UA-1768265-9"
PARAMS_MAP = {
"screen_name": "cd",
"event_category": "ec",
"event_action": "ea",
"event_label": "el",
"event_value": "ev"
}
def __init__(self):
TelemetryBase.__init__(self)
self['v'] = 1
self['tid'] = self.TRACKING_ID
self['cid'] = self.get_cid()
self['sr'] = "%dx%d" % click.get_terminal_size()
self._prefill_screen_name()
self._prefill_appinfo()
self._prefill_custom_data()
def __getitem__(self, name):
if name in self.PARAMS_MAP:
name = self.PARAMS_MAP[name]
return TelemetryBase.__getitem__(self, name)
def __setitem__(self, name, value):
if name in self.PARAMS_MAP:
name = self.PARAMS_MAP[name]
TelemetryBase.__setitem__(self, name, value)
def _prefill_appinfo(self):
self['av'] = __version__
# gather dependent packages
dpdata = []
dpdata.append("Click/%s" % click.__version__)
if app.get_session_var("caller_id"):
dpdata.append("Caller/%s" % app.get_session_var("caller_id"))
self['an'] = " ".join(dpdata)
def _prefill_custom_data(self):
self['cd1'] = util.get_systype()
self['cd2'] = "Python/%s %s" % (platform.python_version(),
platform.platform())
self['cd4'] = (1 if app.get_setting("enable_prompts") or
app.get_session_var("caller_id") else 0)
def _prefill_screen_name(self):
self['cd3'] = " ".join([str(s).lower() for s in sys.argv[1:]])
if not app.get_session_var("command_ctx"):
return
ctx_args = app.get_session_var("command_ctx").args
args = [str(s).lower() for s in ctx_args if not str(s).startswith("-")]
if not args:
return
if args[0] in ("lib", "platforms", "serialports", "settings"):
cmd_path = args[:2]
else:
cmd_path = args[:1]
self['screen_name'] = " ".join([p.title() for p in cmd_path])
def send(self, hittype):
if not app.get_setting("enable_telemetry"):
return
self['t'] = hittype
# correct queue time
if "qt" in self._params and isinstance(self['qt'], float):
self['qt'] = int((time() - self['qt']) * 1000)
MPDataPusher().push(self._params)
@util.singleton
class MPDataPusher(object):
MAX_WORKERS = 5
def __init__(self):
self._queue = Queue.LifoQueue()
self._failedque = deque()
self._http_session = requests.Session()
self._http_offline = False
self._workers = []
def push(self, item):
# if network is off-line
if self._http_offline:
if "qt" not in item:
item['qt'] = time()
self._failedque.append(item)
return
self._queue.put(item)
self._tune_workers()
def in_wait(self):
return self._queue.unfinished_tasks
def get_items(self):
items = list(self._failedque)
try:
while True:
items.append(self._queue.get_nowait())
except Queue.Empty:
pass
return items
def _tune_workers(self):
for i, w in enumerate(self._workers):
if not w.is_alive():
del self._workers[i]
need_nums = min(self._queue.qsize(), self.MAX_WORKERS)
active_nums = len(self._workers)
if need_nums <= active_nums:
return
for i in range(need_nums - active_nums):
t = threading.Thread(target=self._worker)
t.daemon = True
t.start()
self._workers.append(t)
def _worker(self):
while True:
try:
item = self._queue.get()
_item = item.copy()
if "qt" not in _item:
_item['qt'] = time()
self._failedque.append(_item)
if self._send_data(item):
self._failedque.remove(_item)
self._queue.task_done()
except: # pylint: disable=W0702
pass
def _send_data(self, data):
if self._http_offline:
return False
try:
r = self._http_session.post(
"https://ssl.google-analytics.com/collect",
data=data,
headers=util.get_request_defheaders(),
timeout=1
)
r.raise_for_status()
return True
except: # pylint: disable=W0702
self._http_offline = True
return False
def on_command():
resend_backuped_reports()
mp = MeasurementProtocol()
mp.send("screenview")
if util.is_ci():
measure_ci()
if app.get_session_var("caller_id"):
measure_caller(app.get_session_var("caller_id"))
def measure_ci():
event = {
"category": "CI",
"action": "NoName",
"label": None
}
envmap = {
"APPVEYOR": {"label": getenv("APPVEYOR_REPO_NAME")},
"CIRCLECI": {"label": "%s/%s" % (getenv("CIRCLE_PROJECT_USERNAME"),
getenv("CIRCLE_PROJECT_REPONAME"))},
"TRAVIS": {"label": getenv("TRAVIS_REPO_SLUG")},
"SHIPPABLE": {"label": getenv("REPO_NAME")},
"DRONE": {"label": getenv("DRONE_REPO_SLUG")}
}
for key, value in envmap.iteritems():
if getenv(key, "").lower() != "true":
continue
event.update({"action": key, "label": value['label']})
on_event(**event)
def measure_caller(calller_id):
calller_id = str(calller_id)[:20].lower()
event = {
"category": "Caller",
"action": "Misc",
"label": calller_id
}
if calller_id in (["atom", "vim"] + ProjectGenerator.get_supported_ides()):
event['action'] = "IDE"
on_event(**event)
def on_run_environment(options, targets):
opts = ["%s=%s" % (opt, value) for opt, value in sorted(options.items())]
targets = [t.title() for t in targets or ["run"]]
on_event("Env", " ".join(targets), "&".join(opts))
def on_event(category, action, label=None, value=None, screen_name=None):
mp = MeasurementProtocol()
mp['event_category'] = category[:150]
mp['event_action'] = action[:500]
if label:
mp['event_label'] = label[:500]
if value:
mp['event_value'] = int(value)
if screen_name:
mp['screen_name'] = screen_name[:2048]
mp.send("event")
def on_exception(e):
if isinstance(e, exception.AbortedByUser):
return
is_crash = any([
not isinstance(e, exception.PlatformioException),
"Error" in e.__class__.__name__
])
mp = MeasurementProtocol()
mp['exd'] = "%s: %s" % (type(e).__name__, format_exc() if is_crash else e)
mp['exf'] = 1 if is_crash else 0
mp.send("exception")
@atexit.register
def _finalize():
timeout = 1000 # msec
elapsed = 0
try:
while elapsed < timeout:
if not MPDataPusher().in_wait():
break
sleep(0.2)
elapsed += 200
backup_reports(MPDataPusher().get_items())
except KeyboardInterrupt:
pass
def backup_reports(items):
if not items:
return
KEEP_MAX_REPORTS = 100
tm = app.get_state_item("telemetry", {})
if "backup" not in tm:
tm['backup'] = []
for params in items:
# skip static options
for key in params.keys():
if key in ("v", "tid", "cid", "cd1", "cd2", "sr", "an"):
del params[key]
# store time in UNIX format
if "qt" not in params:
params['qt'] = time()
elif not isinstance(params['qt'], float):
params['qt'] = time() - (params['qt'] / 1000)
tm['backup'].append(params)
tm['backup'] = tm['backup'][KEEP_MAX_REPORTS * -1:]
app.set_state_item("telemetry", tm)
def resend_backuped_reports():
tm = app.get_state_item("telemetry", {})
if "backup" not in tm or not tm['backup']:
return False
for report in tm['backup']:
mp = MeasurementProtocol()
for key, value in report.items():
mp[key] = value
mp.send(report['t'])
# clean
tm['backup'] = []
app.set_state_item("telemetry", tm)
| ZachMassia/platformio | platformio/telemetry.py | Python | apache-2.0 | 10,093 |
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""The Tornado web framework.
核心模块, 参考示例使用代码:
- 重要模块:
- tornado.web
- tornado.ioloop # 根据示例,可知入口在此.参看: ioloop.py
- tornado.httpserver
The Tornado web framework looks a bit like web.py (http://webpy.org/) or
Google's webapp (http://code.google.com/appengine/docs/python/tools/webapp/),
but with additional tools and optimizations to take advantage of the
Tornado non-blocking web server and tools.
Here is the canonical "Hello, world" example app:
import tornado.httpserver
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(8888)
tornado.ioloop.IOLoop.instance().start()
See the Tornado walkthrough on GitHub for more details and a good
getting started guide.
"""
import base64
import binascii
import calendar
import Cookie
import cStringIO
import datetime
import email.utils
import escape
import functools
import gzip
import hashlib
import hmac
import httplib
import locale
import logging
import mimetypes
import os.path
import re
import stat
import sys
import template
import time
import types
import urllib
import urlparse
import uuid
"""
# 模块说明: 核心模块
RequestHandler() 需要处理哪些工作:
- 1. HTTP方法支持(GET,POST, HEAD, DELETE, PUT), 预定义各种接口
- 2. 预定义接口: 配对定义[类似 unittest 的 setUp(), tearDown() 方法]
- prepare() # 运行前, 准备工作
- on_connection_close() # 运行后, 清理工作
- 根据需要, 选择使用
- 3. cookies处理:
- set
- get
- clear
- 4. HTTP头处理:
- set_status() # 状态码
- set_header() # 头信息
- 5. 重定向:
- redirect()
"""
class RequestHandler(object):
"""Subclass this class and define get() or post() to make a handler.
If you want to support more methods than the standard GET/HEAD/POST, you
should override the class variable SUPPORTED_METHODS in your
RequestHandler class.
译:
1. 继承此类,并自定义get(), post()方法,创建 handler
2. 若需要支持更多方法(GET/HEAD/POST), 需要 在 子类中 覆写 类变量 SUPPORTED_METHODS
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PUT")
def __init__(self, application, request, transforms=None):
self.application = application
self.request = request
self._headers_written = False
self._finished = False
self._auto_finish = True
self._transforms = transforms or []
self.ui = _O((n, self._ui_method(m)) for n, m in
application.ui_methods.iteritems())
self.ui["modules"] = _O((n, self._ui_module(n, m)) for n, m in
application.ui_modules.iteritems())
self.clear()
# Check since connection is not available in WSGI
if hasattr(self.request, "connection"):
self.request.connection.stream.set_close_callback(
self.on_connection_close) # 注意 self.on_connection_close() 调用时机
@property
def settings(self):
return self.application.settings
# 如下这部分, 默认的接口定义, 如果子类没有覆写这些方法,就直接抛出异常.
# 也就是说: 这些接口, 必须要 覆写,才可以用
def head(self, *args, **kwargs):
raise HTTPError(405)
def get(self, *args, **kwargs):
raise HTTPError(405)
def post(self, *args, **kwargs):
raise HTTPError(405)
def delete(self, *args, **kwargs):
raise HTTPError(405)
def put(self, *args, **kwargs):
raise HTTPError(405)
# 预定义接口: 准备工作函数, 给需要 个性化配置用
# 注意调用时机: self._execute()
def prepare(self):
"""Called before the actual handler method.
Useful to override in a handler if you want a common bottleneck for
all of your requests.
"""
pass
# 预定义接口2: 执行完后, 附带清理工作.(根据需要自行修改)
# 注意调用时机: __init__()
def on_connection_close(self):
"""Called in async handlers if the client closed the connection.
You may override this to clean up resources associated with
long-lived connections.
Note that the select()-based implementation of IOLoop does not detect
closed connections and so this method will not be called until
you try (and fail) to produce some output. The epoll- and kqueue-
based implementations should detect closed connections even while
the request is idle.
"""
pass
def clear(self):
"""Resets all headers and content for this response."""
self._headers = {
"Server": "TornadoServer/1.0",
"Content-Type": "text/html; charset=UTF-8",
}
if not self.request.supports_http_1_1():
if self.request.headers.get("Connection") == "Keep-Alive":
self.set_header("Connection", "Keep-Alive")
self._write_buffer = []
self._status_code = 200
# 设置 HTTP状态码
def set_status(self, status_code):
"""Sets the status code for our response."""
assert status_code in httplib.responses # 使用 assert 方式 作条件判断, 出错时,直接抛出
self._status_code = status_code
# 设置 HTTP头信息
# 根据 value 类型, 作 格式转换处理
def set_header(self, name, value):
"""Sets the given response header name and value.
If a datetime is given, we automatically format it according to the
HTTP specification. If the value is not a string, we convert it to
a string. All header values are then encoded as UTF-8.
"""
if isinstance(value, datetime.datetime):
t = calendar.timegm(value.utctimetuple())
value = email.utils.formatdate(t, localtime=False, usegmt=True)
elif isinstance(value, int) or isinstance(value, long):
value = str(value)
else:
value = _utf8(value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request. Also cap length to
# prevent obviously erroneous values.
safe_value = re.sub(r"[\x00-\x1f]", " ", value)[:4000] # 正则过滤 + 截取4000长度字符串
if safe_value != value:
raise ValueError("Unsafe header value %r", value)
self._headers[name] = value
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we throw an HTTP 404 exception if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
args = self.get_arguments(name, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise HTTPError(404, "Missing argument %s" % name)
return default
return args[-1]
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
values = self.request.arguments.get(name, [])
# Get rid of any weird control chars
values = [re.sub(r"[\x00-\x08\x0e-\x1f]", " ", x) for x in values]
values = [_unicode(x) for x in values]
if strip:
values = [x.strip() for x in values]
return values
@property
def cookies(self):
"""A dictionary of Cookie.Morsel objects."""
# 如果不存在,定义cookies
# 如果存在, 返回之
if not hasattr(self, "_cookies"):
self._cookies = Cookie.BaseCookie() # 定义
if "Cookie" in self.request.headers:
try:
self._cookies.load(self.request.headers["Cookie"]) # 赋值
except:
self.clear_all_cookies() # 异常时,调用 自定义清理函数
return self._cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if name in self.cookies: # 注意, 因为 cookies() 被定义成 property, 可以直接这样调用
return self.cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
name = _utf8(name)
value = _utf8(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookies"):
self._new_cookies = []
new_cookie = Cookie.BaseCookie()
self._new_cookies.append(new_cookie)
new_cookie[name] = value
if domain:
new_cookie[name]["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
timestamp = calendar.timegm(expires.utctimetuple())
new_cookie[name]["expires"] = email.utils.formatdate(
timestamp, localtime=False, usegmt=True)
if path:
new_cookie[name]["path"] = path
for k, v in kwargs.iteritems():
new_cookie[name][k] = v
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name."""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
# 赋空值, 清掉 cookie, 多个web框架,标准实现写法
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self):
"""Deletes all the cookies the user sent with this request."""
# 注: 注意如上2个相关函数 命名特征
# - 单个操作: clear_cookie()
# - 批量操作: clear_all_cookies()
for name in self.cookies.iterkeys():
self.clear_cookie(name)
def set_secure_cookie(self, name, value, expires_days=30, **kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
You must specify the 'cookie_secret' setting in your Application
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use get_secure_cookie().
"""
# 如下几步, 构造 "安全的cookie", 加 时间戳, 防伪造
timestamp = str(int(time.time()))
value = base64.b64encode(value)
signature = self._cookie_signature(name, value, timestamp) # 加时间戳
value = "|".join([value, timestamp, signature])
self.set_cookie(name, value, expires_days=expires_days, **kwargs)
def get_secure_cookie(self, name, include_name=True, value=None):
"""Returns the given signed cookie if it validates, or None.
In older versions of Tornado (0.1 and 0.2), we did not include the
name of the cookie in the cookie signature. To read these old-style
cookies, pass include_name=False to this method. Otherwise, all
attempts to read old-style cookies will fail (and you may log all
your users out whose cookies were written with a previous Tornado
version).
"""
if value is None:
value = self.get_cookie(name)
if not value:
return None
parts = value.split("|")
if len(parts) != 3:
return None
if include_name:
signature = self._cookie_signature(name, parts[0], parts[1])
else:
signature = self._cookie_signature(parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
logging.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < time.time() - 31 * 86400:
logging.warning("Expired cookie %r", value)
return None
# 尝试返回
try:
return base64.b64decode(parts[0])
except:
return None
def _cookie_signature(self, *parts):
self.require_setting("cookie_secret", "secure cookies")
hash = hmac.new(self.application.settings["cookie_secret"],
digestmod=hashlib.sha1)
for part in parts:
hash.update(part)
return hash.hexdigest()
# 关键代码: 重定向
#
def redirect(self, url, permanent=False):
"""Sends a redirect to the given (optionally relative) URL."""
if self._headers_written:
raise Exception("Cannot redirect after headers have been written")
self.set_status(301 if permanent else 302)
# Remove whitespace
url = re.sub(r"[\x00-\x20]+", "", _utf8(url))
self.set_header("Location", urlparse.urljoin(self.request.uri, url))
self.finish() # 调用处理
# 关键代码: 准备 渲染页面的 数据, 常用接口函数
# 特别说明:
# - 这里 write() 方法, 并没有直接 渲染页面, 而是在 准备 渲染数据
# - 实际的 渲染HTML页面操作, 在 finish() 中
def write(self, chunk):
"""Writes the given chunk to the output buffer.
To write the output to the network, use the flush() method below.
If the given chunk is a dictionary, we write it as JSON and set
the Content-Type of the response to be text/javascript.
"""
assert not self._finished
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "text/javascript; charset=UTF-8")
chunk = _utf8(chunk)
self._write_buffer.append(chunk) # 准备 待渲染的 HTML数据
# 关键代码: 渲染页面
#
def render(self, template_name, **kwargs):
"""Renders the template with the given arguments as the response."""
html = self.render_string(template_name, **kwargs)
# Insert the additional JS and CSS added by the modules on the page
js_embed = []
js_files = []
css_embed = []
css_files = []
html_heads = []
html_bodies = []
for module in getattr(self, "_active_modules", {}).itervalues():
# JS 部分
embed_part = module.embedded_javascript()
if embed_part:
js_embed.append(_utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, basestring):
js_files.append(file_part)
else:
js_files.extend(file_part)
# CSS 部分
embed_part = module.embedded_css()
if embed_part:
css_embed.append(_utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, basestring):
css_files.append(file_part)
else:
css_files.extend(file_part)
# Header 部分
head_part = module.html_head()
if head_part:
html_heads.append(_utf8(head_part))
body_part = module.html_body()
if body_part:
html_bodies.append(_utf8(body_part))
# ----------------------------------------------------------
# 如下是 分块处理部分:
# - 本质工作: 在 拼接一个 长 HTML 字符串(包含 HTML,CSS,JS)
# ----------------------------------------------------------
if js_files:
# Maintain order of JavaScript files given by modules
paths = []
unique_paths = set()
for path in js_files:
if not path.startswith("/") and not path.startswith("http:"):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
js = ''.join('<script src="' + escape.xhtml_escape(p) +
'" type="text/javascript"></script>'
for p in paths)
sloc = html.rindex('</body>')
html = html[:sloc] + js + '\n' + html[sloc:]
if js_embed:
js = '<script type="text/javascript">\n//<![CDATA[\n' + \
'\n'.join(js_embed) + '\n//]]>\n</script>'
sloc = html.rindex('</body>')
html = html[:sloc] + js + '\n' + html[sloc:]
if css_files:
paths = set()
for path in css_files:
if not path.startswith("/") and not path.startswith("http:"):
paths.add(self.static_url(path))
else:
paths.add(path)
css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths)
hloc = html.index('</head>')
html = html[:hloc] + css + '\n' + html[hloc:]
if css_embed:
css = '<style type="text/css">\n' + '\n'.join(css_embed) + \
'\n</style>'
hloc = html.index('</head>')
html = html[:hloc] + css + '\n' + html[hloc:]
if html_heads:
hloc = html.index('</head>')
html = html[:hloc] + ''.join(html_heads) + '\n' + html[hloc:]
if html_bodies:
hloc = html.index('</body>')
html = html[:hloc] + ''.join(html_bodies) + '\n' + html[hloc:]
# 注意
self.finish(html) # 关键调用
def render_string(self, template_name, **kwargs):
"""Generate the given template with the given arguments.
We return the generated string. To generate and write a template
as a response, use render() above.
"""
# If no template_path is specified, use the path of the calling file
template_path = self.get_template_path()
if not template_path:
frame = sys._getframe(0)
web_file = frame.f_code.co_filename
while frame.f_code.co_filename == web_file:
frame = frame.f_back
template_path = os.path.dirname(frame.f_code.co_filename)
if not getattr(RequestHandler, "_templates", None):
RequestHandler._templates = {}
if template_path not in RequestHandler._templates:
loader = self.application.settings.get("template_loader") or\
template.Loader(template_path)
RequestHandler._templates[template_path] = loader # 注意
t = RequestHandler._templates[template_path].load(template_name)
args = dict(
handler=self,
request=self.request,
current_user=self.current_user,
locale=self.locale,
_=self.locale.translate,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
reverse_url=self.application.reverse_url
)
args.update(self.ui)
args.update(kwargs)
return t.generate(**args)
def flush(self, include_footers=False):
"""Flushes the current output buffer to the nextwork."""
if self.application._wsgi:
raise Exception("WSGI applications do not support flush()")
chunk = "".join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
self._headers, chunk = transform.transform_first_chunk(
self._headers, chunk, include_footers)
headers = self._generate_headers()
else:
for transform in self._transforms:
chunk = transform.transform_chunk(chunk, include_footers)
headers = ""
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
if headers:
self.request.write(headers) # 特别注意 self.request.write() 方法
return
if headers or chunk:
self.request.write(headers + chunk)
# 超级关键代码: 写HTML页面
#
#
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
assert not self._finished
if chunk is not None:
self.write(chunk) # 特别注意, 这里的关键调用
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
if not self._headers_written:
if (self._status_code == 200 and self.request.method == "GET" and
"Etag" not in self._headers):
hasher = hashlib.sha1()
for part in self._write_buffer:
hasher.update(part)
etag = '"%s"' % hasher.hexdigest()
inm = self.request.headers.get("If-None-Match")
if inm and inm.find(etag) != -1:
self._write_buffer = []
self.set_status(304)
else:
self.set_header("Etag", etag)
if "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
if hasattr(self.request, "connection"):
# Now that the request is finished, clear the callback we
# set on the IOStream (which would otherwise prevent the
# garbage collection of the RequestHandler when there
# are keepalive connections)
self.request.connection.stream.set_close_callback(None)
if not self.application._wsgi:
self.flush(include_footers=True)
self.request.finish() # 注意调用
self._log()
self._finished = True
# 给浏览器,返回 内部错误
def send_error(self, status_code=500, **kwargs):
"""Sends the given HTTP error code to the browser.
We also send the error HTML for the given error code as returned by
get_error_html. Override that method if you want custom error pages
for your application.
"""
if self._headers_written:
logging.error("Cannot send error response after headers written")
if not self._finished:
self.finish()
return
self.clear()
self.set_status(status_code)
message = self.get_error_html(status_code, **kwargs)
self.finish(message) # 写出信息
def get_error_html(self, status_code, **kwargs):
"""Override to implement custom error pages.
If this error was caused by an uncaught exception, the
exception object can be found in kwargs e.g. kwargs['exception']
"""
return "<html><title>%(code)d: %(message)s</title>" \
"<body>%(code)d: %(message)s</body></html>" % {
"code": status_code,
"message": httplib.responses[status_code],
}
# 本地配置: 通常用于设置 国际化-语言 (浏览器语言)
#
@property
def locale(self):
"""The local for the current session.
Determined by either get_user_locale, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or get_browser_locale, which uses the Accept-Language
header.
"""
if not hasattr(self, "_locale"):
self._locale = self.get_user_locale() # 配置为 用户设置
if not self._locale:
self._locale = self.get_browser_locale() # 配置为 浏览器默认设置
assert self._locale
return self._locale
# 预定义接口 - 用户配置
# - 使用前, 需覆写该函数
def get_user_locale(self):
"""Override to determine the locale from the authenticated user.
If None is returned, we use the Accept-Language header.
"""
return None
# 默认浏览器设置语言环境
def get_browser_locale(self, default="en_US"):
"""Determines the user's locale from Accept-Language header.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
if "Accept-Language" in self.request.headers:
languages = self.request.headers["Accept-Language"].split(",")
locales = []
for language in languages:
parts = language.strip().split(";")
if len(parts) > 1 and parts[1].startswith("q="):
try:
score = float(parts[1][2:])
except (ValueError, TypeError):
score = 0.0
else:
score = 1.0
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda (l, s): s, reverse=True)
codes = [l[0] for l in locales]
return locale.get(*codes)
return locale.get(default)
# 获取当前用户
@property
def current_user(self):
"""The authenticated user for this request.
Determined by either get_current_user, which you can override to
set the user based on, e.g., a cookie. If that method is not
overridden, this method always returns None.
We lazy-load the current user the first time this method is called
and cache the result after that.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
# 预定义接口 - 获取当前用户
# - 使用前, 需覆写
# - 特别说明: 通常都需要用到该接口, 基本上一定是需要 覆写的
def get_current_user(self):
"""Override to determine the current user from, e.g., a cookie."""
return None
# ----------------------------------------------------
# 如下2个函数, 用于获取 默认配置参数
# - 登录 URL
# - 模板路径
# - 支持
# ----------------------------------------------------
def get_login_url(self):
"""Override to customize the login URL based on the request.
By default, we use the 'login_url' application setting.
"""
self.require_setting("login_url", "@tornado.web.authenticated")
return self.application.settings["login_url"]
def get_template_path(self):
"""Override to customize template path for each handler.
By default, we use the 'template_path' application setting.
Return None to load templates relative to the calling file.
"""
return self.application.settings.get("template_path")
# 预防 跨站攻击
#
# - 默认先判断是否记录了 token
# - 若已记录, 直接返回
# - 若未记录, 尝试从 cookie 中 获取
# - 若 cookie 中 存在, 从 cookie 中获取,并返回
# - 若 cookie 中 不存在, 主动生成 token, 并同步写入 cookie. (目的是,无需重复生成)
#
@property
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if not hasattr(self, "_xsrf_token"):
token = self.get_cookie("_xsrf") # cookie 中获取
if not token:
token = binascii.b2a_hex(uuid.uuid4().bytes) # token 生成方法
expires_days = 30 if self.current_user else None # token 有效期
self.set_cookie("_xsrf", token, expires_days=expires_days) # 更新 cookie
self._xsrf_token = token # 更新 token
return self._xsrf_token
def check_xsrf_cookie(self):
"""Verifies that the '_xsrf' cookie matches the '_xsrf' argument.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if self.request.headers.get("X-Requested-With") == "XMLHttpRequest":
return
token = self.get_argument("_xsrf", None)
if not token:
raise HTTPError(403, "'_xsrf' argument missing from POST")
if self.xsrf_token != token:
raise HTTPError(403, "XSRF cookie does not match POST argument")
# 提交表单 - 预防 xsrf 攻击方法
def xsrf_form_html(self):
"""An HTML <input/> element to be included with all POST forms.
It defines the _xsrf input value, which we check on all POST
requests to prevent cross-site request forgery.
If you have set the 'xsrf_cookies' application setting, you must include this
HTML within all of your HTML forms.
See check_xsrf_cookie() above for more information.
"""
# 特别注意: 该 <表单提交> HTML字符串, 要含有 (name="_xsrf") 字段
return '<input type="hidden" name="_xsrf" value="' + \
escape.xhtml_escape(self.xsrf_token) + '"/>'
# 静态资源路径
def static_url(self, path):
"""Returns a static URL for the given relative static file path.
This method requires you set the 'static_path' setting in your
application (which specifies the root directory of your static
files).
We append ?v=<signature> to the returned URL, which makes our
static file handler set an infinite expiration header on the
returned content. The signature is based on the content of the
file.
If this handler has a "include_host" attribute, we include the
full host for every static URL, including the "http://". Set
this attribute for handlers whose output needs non-relative static
path names.
"""
self.require_setting("static_path", "static_url")
if not hasattr(RequestHandler, "_static_hashes"):
RequestHandler._static_hashes = {}
hashes = RequestHandler._static_hashes
if path not in hashes:
try:
f = open(os.path.join(
self.application.settings["static_path"], path))
hashes[path] = hashlib.md5(f.read()).hexdigest()
f.close()
except:
logging.error("Could not open static file %r", path)
hashes[path] = None
base = self.request.protocol + "://" + self.request.host \
if getattr(self, "include_host", False) else ""
static_url_prefix = self.settings.get('static_url_prefix', '/static/')
if hashes.get(path):
return base + static_url_prefix + path + "?v=" + hashes[path][:5]
else:
return base + static_url_prefix + path
# 异步回调
def async_callback(self, callback, *args, **kwargs):
"""Wrap callbacks with this if they are used on asynchronous requests.
Catches exceptions and properly finishes the request.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception, e:
if self._headers_written:
logging.error("Exception after headers written",
exc_info=True)
else:
self._handle_request_exception(e)
return wrapper
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name):
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
def reverse_url(self, name, *args):
return self.application.reverse_url(name, *args)
# 关键代码:
#
def _execute(self, transforms, *args, **kwargs):
"""Executes this request with the given output transforms."""
self._transforms = transforms
try:
if self.request.method not in self.SUPPORTED_METHODS:
raise HTTPError(405)
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if self.request.method == "POST" and \
self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie() # 检查
self.prepare() # 注意调用时机
if not self._finished:
getattr(self, self.request.method.lower())(*args, **kwargs)
if self._auto_finish and not self._finished:
self.finish() # 关键调用
except Exception, e:
self._handle_request_exception(e)
def _generate_headers(self):
lines = [self.request.version + " " + str(self._status_code) + " " +
httplib.responses[self._status_code]]
lines.extend(["%s: %s" % (n, v) for n, v in self._headers.iteritems()])
for cookie_dict in getattr(self, "_new_cookies", []):
for cookie in cookie_dict.values():
lines.append("Set-Cookie: " + cookie.OutputString(None))
return "\r\n".join(lines) + "\r\n\r\n"
# 打印出错日志
def _log(self):
if self._status_code < 400:
log_method = logging.info
elif self._status_code < 500:
log_method = logging.warning
else:
log_method = logging.error
request_time = 1000.0 * self.request.request_time()
# 日志打印
log_method("%d %s %.2fms", self._status_code,
self._request_summary(), request_time)
def _request_summary(self):
return self.request.method + " " + self.request.uri + " (" + \
self.request.remote_ip + ")"
def _handle_request_exception(self, e):
if isinstance(e, HTTPError):
if e.log_message:
format = "%d %s: " + e.log_message
args = [e.status_code, self._request_summary()] + list(e.args)
logging.warning(format, *args)
if e.status_code not in httplib.responses:
logging.error("Bad HTTP status code: %d", e.status_code)
self.send_error(500, exception=e)
else:
self.send_error(e.status_code, exception=e)
else:
logging.error("Uncaught exception %s\n%r", self._request_summary(),
self.request, exc_info=e)
self.send_error(500, exception=e)
def _ui_module(self, name, module):
def render(*args, **kwargs):
if not hasattr(self, "_active_modules"):
self._active_modules = {}
if name not in self._active_modules:
self._active_modules[name] = module(self)
rendered = self._active_modules[name].render(*args, **kwargs)
return rendered
return render
def _ui_method(self, method):
return lambda *args, **kwargs: method(self, *args, **kwargs)
# 装饰器定义: 异步处理
def asynchronous(method):
"""Wrap request handler methods with this if they are asynchronous.
If this decorator is given, the response is not finished when the
method returns. It is up to the request handler to call self.finish()
to finish the HTTP request. Without this decorator, the request is
automatically finished when the get() or post() method returns.
class MyRequestHandler(web.RequestHandler):
@web.asynchronous
def get(self):
http = httpclient.AsyncHTTPClient()
http.fetch("http://friendfeed.com/", self._on_download)
def _on_download(self, response):
self.write("Downloaded!")
self.finish()
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.application._wsgi:
raise Exception("@asynchronous is not supported for WSGI apps")
self._auto_finish = False
return method(self, *args, **kwargs)
return wrapper
# 装饰器定义: 去 斜杠(/)
def removeslash(method):
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to '/foo/' would redirect to '/foo' with this
decorator. Your request handler mapping should use a regular expression
like r'/foo/*' in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.request.path.endswith("/"): # 结尾含 /
if self.request.method == "GET":
uri = self.request.path.rstrip("/") # 过滤掉 /
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri) # 重定向
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
# 装饰器定义: 添加 斜杠(/)
def addslash(method):
"""Use this decorator to add a missing trailing slash to the request path.
For example, a request to '/foo' would redirect to '/foo/' with this
decorator. Your request handler mapping should use a regular expression
like r'/foo/?' in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.request.path.endswith("/"):
if self.request.method == "GET":
uri = self.request.path + "/"
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri) # 重定向
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
# ----------------------------------------------------------------
# 入口:
#
#
# ----------------------------------------------------------------
class Application(object):
"""A collection of request handlers that make up a web application.
Instances of this class are callable and can be passed directly to
HTTPServer to serve the application:
application = web.Application([
(r"/", MainPageHandler),
])
http_server = httpserver.HTTPServer(application)
http_server.listen(8080)
ioloop.IOLoop.instance().start()
The constructor for this class takes in a list of URLSpec objects
or (regexp, request_class) tuples. When we receive requests, we
iterate over the list in order and instantiate an instance of the
first request class whose regexp matches the request path.
Each tuple can contain an optional third element, which should be a
dictionary if it is present. That dictionary is passed as keyword
arguments to the contructor of the handler. This pattern is used
for the StaticFileHandler below:
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
We support virtual hosts with the add_handlers method, which takes in
a host regular expression as the first argument:
application.add_handlers(r"www\.myhost\.com", [
(r"/article/([0-9]+)", ArticleHandler),
])
You can serve static files by sending the static_path setting as a
keyword argument. We will serve those files from the /static/ URI
(this is configurable with the static_url_prefix setting),
and we will serve /favicon.ico and /robots.txt from the same directory.
"""
def __init__(self, handlers=None, default_host="", transforms=None,
wsgi=False, **settings):
"""
:param handlers:
:param default_host:
:param transforms:
:param wsgi:
:param settings:
- gzip : 压缩
- static_path : 静态资源路径
- debug : 调试开关
:return:
"""
if transforms is None:
self.transforms = []
if settings.get("gzip"): # 配置选项
self.transforms.append(GZipContentEncoding)
self.transforms.append(ChunkedTransferEncoding)
else:
self.transforms = transforms
self.handlers = []
self.named_handlers = {}
self.default_host = default_host
self.settings = settings # 自定义配置项
self.ui_modules = {}
self.ui_methods = {}
self._wsgi = wsgi
self._load_ui_modules(settings.get("ui_modules", {}))
self._load_ui_methods(settings.get("ui_methods", {}))
if self.settings.get("static_path"): # 配置项中含: 静态资源路径
path = self.settings["static_path"]
handlers = list(handlers or [])
static_url_prefix = settings.get("static_url_prefix",
"/static/")
handlers = [
(re.escape(static_url_prefix) + r"(.*)", StaticFileHandler,
dict(path=path)),
(r"/(favicon\.ico)", StaticFileHandler, dict(path=path)),
(r"/(robots\.txt)", StaticFileHandler, dict(path=path)),
] + handlers
if handlers:
self.add_handlers(".*$", handlers) # 关键调用
# Automatically reload modified modules
if self.settings.get("debug") and not wsgi: # 调试模式时, 自动监测,并重启项目
import autoreload # tornado 自定义模块
autoreload.start()
def add_handlers(self, host_pattern, host_handlers):
"""Appends the given handlers to our handler list."""
if not host_pattern.endswith("$"):
host_pattern += "$"
handlers = []
# The handlers with the wildcard host_pattern are a special
# case - they're added in the constructor but should have lower
# precedence than the more-precise handlers added later.
# If a wildcard handler group exists, it should always be last
# in the list, so insert new groups just before it.
if self.handlers and self.handlers[-1][0].pattern == '.*$':
self.handlers.insert(-1, (re.compile(host_pattern), handlers)) # 正则匹配
else:
self.handlers.append((re.compile(host_pattern), handlers)) # 正则匹配
for spec in host_handlers:
if type(spec) is type(()): # 元组
assert len(spec) in (2, 3)
pattern = spec[0]
handler = spec[1]
if len(spec) == 3:
kwargs = spec[2]
else:
kwargs = {}
spec = URLSpec(pattern, handler, kwargs) # 关键调用
handlers.append(spec)
if spec.name:
if spec.name in self.named_handlers:
logging.warning(
"Multiple handlers named %s; replacing previous value",
spec.name)
self.named_handlers[spec.name] = spec
def add_transform(self, transform_class):
"""Adds the given OutputTransform to our transform list."""
self.transforms.append(transform_class)
def _get_host_handlers(self, request):
host = request.host.lower().split(':')[0]
for pattern, handlers in self.handlers:
if pattern.match(host):
return handlers
# Look for default host if not behind load balancer (for debugging)
if "X-Real-Ip" not in request.headers:
for pattern, handlers in self.handlers:
if pattern.match(self.default_host):
return handlers
return None
def _load_ui_methods(self, methods):
if type(methods) is types.ModuleType:
self._load_ui_methods(dict((n, getattr(methods, n))
for n in dir(methods)))
elif isinstance(methods, list):
for m in methods:
self._load_ui_methods(m)
else:
for name, fn in methods.iteritems():
if not name.startswith("_") and hasattr(fn, "__call__") \
and name[0].lower() == name[0]:
self.ui_methods[name] = fn
def _load_ui_modules(self, modules):
if type(modules) is types.ModuleType:
self._load_ui_modules(dict((n, getattr(modules, n))
for n in dir(modules)))
elif isinstance(modules, list):
for m in modules:
self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.iteritems():
try:
if issubclass(cls, UIModule):
self.ui_modules[name] = cls
except TypeError:
pass
# 关键定义: 类对象 --> 可调用对象
#
# 注意: 被调用时机
# - wsgi.py
# - WSGIApplication()
# - self.__call__() 方法
#
def __call__(self, request):
"""Called by HTTPServer to execute the request."""
transforms = [t(request) for t in self.transforms]
handler = None
args = []
kwargs = {}
handlers = self._get_host_handlers(request)
if not handlers:
handler = RedirectHandler(
request, "http://" + self.default_host + "/")
else:
for spec in handlers:
match = spec.regex.match(request.path)
if match:
# None-safe wrapper around urllib.unquote to handle
# unmatched optional groups correctly
def unquote(s):
if s is None: return s
return urllib.unquote(s)
handler = spec.handler_class(self, request, **spec.kwargs)
# Pass matched groups to the handler. Since
# match.groups() includes both named and unnamed groups,
# we want to use either groups or groupdict but not both.
kwargs = dict((k, unquote(v))
for (k, v) in match.groupdict().iteritems())
if kwargs:
args = []
else:
args = [unquote(s) for s in match.groups()]
break
if not handler:
handler = ErrorHandler(self, request, 404)
# In debug mode, re-compile templates and reload static files on every
# request so you don't need to restart to see changes
if self.settings.get("debug"):
if getattr(RequestHandler, "_templates", None):
map(lambda loader: loader.reset(),
RequestHandler._templates.values())
RequestHandler._static_hashes = {}
# 关键代码调用时机:
handler._execute(transforms, *args, **kwargs)
return handler
def reverse_url(self, name, *args):
"""Returns a URL path for handler named `name`
The handler must be added to the application as a named URLSpec
"""
if name in self.named_handlers:
return self.named_handlers[name].reverse(*args)
raise KeyError("%s not found in named urls" % name)
# ----------------------------------------------------
# 异常基类
# ----------------------------------------------------
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response."""
def __init__(self, status_code, log_message=None, *args):
self.status_code = status_code
self.log_message = log_message
self.args = args
def __str__(self):
message = "HTTP %d: %s" % (
self.status_code, httplib.responses[self.status_code])
if self.log_message:
return message + " (" + (self.log_message % self.args) + ")"
else:
return message
# ----------------------------------------------------
# 扩展子类: 出错处理
# ----------------------------------------------------
class ErrorHandler(RequestHandler):
"""Generates an error response with status_code for all requests."""
def __init__(self, application, request, status_code):
RequestHandler.__init__(self, application, request)
self.set_status(status_code)
def prepare(self):
raise HTTPError(self._status_code)
# ----------------------------------------------------
# 扩展子类: 重定向处理
# ----------------------------------------------------
class RedirectHandler(RequestHandler):
"""Redirects the client to the given URL for all GET requests.
You should provide the keyword argument "url" to the handler, e.g.:
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
"""
def __init__(self, application, request, url, permanent=True):
RequestHandler.__init__(self, application, request)
self._url = url
self._permanent = permanent
# GET 请求,变成 重定向调用
def get(self):
self.redirect(self._url, permanent=self._permanent)
# ----------------------------------------------------
# 扩展子类: 静态资源处理
# 说明:
# - 覆写 get(), head() 方法
# ----------------------------------------------------
class StaticFileHandler(RequestHandler):
"""A simple handler that can serve static content from a directory.
To map a path to this handler for a static data directory /var/www,
you would add a line to your application like:
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
The local root directory of the content should be passed as the "path"
argument to the handler.
To support aggressive browser caching, if the argument "v" is given
with the path, we set an infinite HTTP expiration header. So, if you
want browsers to cache a file indefinitely, send them to, e.g.,
/static/images/myimage.png?v=xxx.
"""
def __init__(self, application, request, path):
RequestHandler.__init__(self, application, request)
self.root = os.path.abspath(path) + os.path.sep
def head(self, path):
self.get(path, include_body=False)
def get(self, path, include_body=True):
abspath = os.path.abspath(os.path.join(self.root, path))
if not abspath.startswith(self.root):
raise HTTPError(403, "%s is not in root static directory", path)
if not os.path.exists(abspath):
raise HTTPError(404)
if not os.path.isfile(abspath):
raise HTTPError(403, "%s is not a file", path)
stat_result = os.stat(abspath)
modified = datetime.datetime.fromtimestamp(stat_result[stat.ST_MTIME])
self.set_header("Last-Modified", modified)
if "v" in self.request.arguments:
self.set_header("Expires", datetime.datetime.utcnow() + \
datetime.timedelta(days=365*10))
self.set_header("Cache-Control", "max-age=" + str(86400*365*10))
else:
self.set_header("Cache-Control", "public")
mime_type, encoding = mimetypes.guess_type(abspath)
if mime_type:
self.set_header("Content-Type", mime_type)
self.set_extra_headers(path)
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if_since = datetime.datetime.fromtimestamp(time.mktime(date_tuple))
if if_since >= modified:
self.set_status(304)
return
if not include_body:
return
self.set_header("Content-Length", stat_result[stat.ST_SIZE])
file = open(abspath, "rb") # 读文件
try:
self.write(file.read()) # 写出
finally:
file.close()
def set_extra_headers(self, path):
"""For subclass to add extra headers to the response"""
pass
# ----------------------------------------------------
# 扩展子类: 包裹 另外一个 回调
# 说明:
# - 覆写 prepare() 预定义接口
# ----------------------------------------------------
class FallbackHandler(RequestHandler):
"""A RequestHandler that wraps another HTTP server callback.
The fallback is a callable object that accepts an HTTPRequest,
such as an Application or tornado.wsgi.WSGIContainer. This is most
useful to use both tornado RequestHandlers and WSGI in the same server.
Typical usage:
wsgi_app = tornado.wsgi.WSGIContainer(
django.core.handlers.wsgi.WSGIHandler())
application = tornado.web.Application([
(r"/foo", FooHandler),
(r".*", FallbackHandler, dict(fallback=wsgi_app),
])
"""
def __init__(self, app, request, fallback):
RequestHandler.__init__(self, app, request)
self.fallback = fallback
# 覆写接口
def prepare(self):
self.fallback(self.request)
self._finished = True
# ----------------------------------------------------
# 自定义基类: 输出转换
# 说明:
# - 2个子类
# - GZipContentEncoding()
# - ChunkedTransferEncoding()
# ----------------------------------------------------
class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
A new transform instance is created for every request. See the
ChunkedTransferEncoding example below if you want to implement a
new Transform.
"""
def __init__(self, request):
pass
def transform_first_chunk(self, headers, chunk, finishing):
return headers, chunk
def transform_chunk(self, chunk, finishing):
return chunk
class GZipContentEncoding(OutputTransform):
"""Applies the gzip content encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
"""
CONTENT_TYPES = set([
"text/plain", "text/html", "text/css", "text/xml",
"application/x-javascript", "application/xml", "application/atom+xml",
"text/javascript", "application/json", "application/xhtml+xml"])
MIN_LENGTH = 5
def __init__(self, request):
self._gzipping = request.supports_http_1_1() and \
"gzip" in request.headers.get("Accept-Encoding", "")
def transform_first_chunk(self, headers, chunk, finishing):
if self._gzipping:
ctype = headers.get("Content-Type", "").split(";")[0]
self._gzipping = (ctype in self.CONTENT_TYPES) and \
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
(finishing or "Content-Length" not in headers) and \
("Content-Encoding" not in headers)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
self._gzip_value = cStringIO.StringIO()
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
self._gzip_pos = 0
chunk = self.transform_chunk(chunk, finishing) # 关键调用
if "Content-Length" in headers:
headers["Content-Length"] = str(len(chunk))
return headers, chunk
def transform_chunk(self, chunk, finishing):
if self._gzipping:
self._gzip_file.write(chunk)
if finishing:
self._gzip_file.close()
else:
self._gzip_file.flush()
chunk = self._gzip_value.getvalue()
if self._gzip_pos > 0:
chunk = chunk[self._gzip_pos:]
self._gzip_pos += len(chunk)
return chunk
class ChunkedTransferEncoding(OutputTransform):
"""Applies the chunked transfer encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1
"""
def __init__(self, request):
self._chunking = request.supports_http_1_1()
def transform_first_chunk(self, headers, chunk, finishing):
if self._chunking:
# No need to chunk the output if a Content-Length is specified
if "Content-Length" in headers or "Transfer-Encoding" in headers:
self._chunking = False
else:
headers["Transfer-Encoding"] = "chunked"
chunk = self.transform_chunk(chunk, finishing)
return headers, chunk
def transform_chunk(self, block, finishing):
if self._chunking:
# Don't write out empty chunks because that means END-OF-STREAM
# with chunked encoding
if block:
block = ("%x" % len(block)) + "\r\n" + block + "\r\n"
if finishing:
block += "0\r\n\r\n"
return block
# ----------------------------------------------------
# 装饰器定义: 权限认证
# 代码功能逻辑:
# - 若当前用户已登录, 正常调用
# - 若当前用户未登录
# - 若是 GET 请求,
# - 先获取 login(网站登录页面) URL
# - URL中, 记录 next 字段参数, 记录 未登录前 访问的页面
# - 重定向到 login 页面
# - 正确登录后, 会根据 next 参数, 自动跳转到 登录前的页面
# - 其他请求, 直接抛出 403 错误页面
# 批注:
# - 权限验证的典型实现, 值得学习
# - 代码很精简, 并不复杂
# ----------------------------------------------------
def authenticated(method):
"""Decorate methods with this to require that the user be logged in."""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user: # 用户未登录
if self.request.method == "GET": # GET 请求 处理
url = self.get_login_url() # 获取登录页面的 URL
if "?" not in url:
# 关键处理:
# - 在 URL 中,添加 <next>字段 [格式: ?next=/xxxx.html]
# - 目的: 当用户成功登录后,返回到登录前,访问的页面
url += "?" + urllib.urlencode(dict(next=self.request.uri))
self.redirect(url) # 重定向
return
raise HTTPError(403) # 其他请求, 抛出 403 错误
return method(self, *args, **kwargs) # 用户已登录时, 正常调用
return wrapper
# ----------------------------------------------------
# 预定义接口类: UI模块 (处理 CSS,JS)
# 说明:
# - 预定义了一些接口方法,需要 子类化, 并覆写后,才可使用
# ----------------------------------------------------
class UIModule(object):
"""A UI re-usable, modular unit on a page.
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.ui = handler.ui
self.current_user = handler.current_user
self.locale = handler.locale
# 预定义接口: 必须要 覆写,才能用
def render(self, *args, **kwargs):
raise NotImplementedError()
def embedded_javascript(self):
"""Returns a JavaScript string that will be embedded in the page."""
return None
def javascript_files(self):
"""Returns a list of JavaScript files required by this module."""
return None
def embedded_css(self):
"""Returns a CSS string that will be embedded in the page."""
return None
def css_files(self):
"""Returns a list of CSS files required by this module."""
return None
def html_head(self):
"""Returns a CSS string that will be put in the <head/> element"""
return None
def html_body(self):
"""Returns an HTML string that will be put in the <body/> element"""
return None
def render_string(self, path, **kwargs):
return self.handler.render_string(path, **kwargs)
# ----------------------------------------------------
# 预定义接口类: URL 匹配
# 说明:
# - URL 与 handler 映射
# ----------------------------------------------------
class URLSpec(object):
"""Specifies mappings between URLs and handlers."""
def __init__(self, pattern, handler_class, kwargs={}, name=None):
"""Creates a URLSpec.
Parameters:
pattern: Regular expression to be matched. Any groups in the regex
will be passed in to the handler's get/post/etc methods as
arguments.
handler_class: RequestHandler subclass to be invoked.
kwargs (optional): A dictionary of additional arguments to be passed
to the handler's constructor.
name (optional): A name for this handler. Used by
Application.reverse_url.
"""
if not pattern.endswith('$'):
pattern += '$'
self.regex = re.compile(pattern) # 正则匹配
self.handler_class = handler_class
self.kwargs = kwargs
self.name = name
self._path, self._group_count = self._find_groups()
def _find_groups(self):
"""Returns a tuple (reverse string, group count) for a url.
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
would return ('/%s/%s/', 2).
"""
pattern = self.regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
if pattern.endswith('$'):
pattern = pattern[:-1]
if self.regex.groups != pattern.count('('):
# The pattern is too complicated for our simplistic matching,
# so we can't support reversing it.
return (None, None)
pieces = []
for fragment in pattern.split('('):
if ')' in fragment:
paren_loc = fragment.index(')')
if paren_loc >= 0:
pieces.append('%s' + fragment[paren_loc + 1:])
else:
pieces.append(fragment)
return (''.join(pieces), self.regex.groups)
def reverse(self, *args):
assert self._path is not None, \
"Cannot reverse url regex " + self.regex.pattern
assert len(args) == self._group_count, "required number of arguments "\
"not found"
if not len(args):
return self._path
return self._path % tuple([str(a) for a in args])
url = URLSpec
# ----------------------------------------------------
# UTF8 编码处理: 编码检查
# 代码逻辑:
# - 若 s 是 unicode 字符串
# - 使用 UTF8编码,并返回
# - 若 s 不是 字符串类型, 直接报错
# - 若 s 是 ASCII 字符串, 直接返回
# ----------------------------------------------------
def _utf8(s):
if isinstance(s, unicode):
return s.encode("utf-8")
assert isinstance(s, str)
return s
# ----------------------------------------------------
# unicode 编码处理: 编码检查
# 代码逻辑:
# - 基本类似 _utf8() 函数
# ----------------------------------------------------
def _unicode(s):
if isinstance(s, str):
try:
return s.decode("utf-8")
except UnicodeDecodeError:
raise HTTPError(400, "Non-utf8 argument")
assert isinstance(s, unicode)
return s
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
class _O(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
| hhstore/tornado-annotated | src/tornado-1.0.0/tornado/web.py | Python | mit | 66,814 |
class CheckerResults:
def __init__(self, entries, invalidParentEntries):
self._entries = entries
self._invalidParentEntries = invalidParentEntries
@property
def entries(self):
return self._entries
@property
def invalidParentEntries(self):
return self._invalidParentEntries
def check(conn):
curs = conn.cursor()
curs.execute("select id from moz_bookmarks")
# Record the built-in root folder with ID 0
ids = [ 0 ]
for row in curs:
ids.append(row['id'])
curs.execute("select id, parent, title from moz_bookmarks")
invalidParentEntries = []
for row in curs:
if row['parent'] not in ids:
invalidParentEntries.append(row)
conn.close()
#return { 'entries' : len(ids) - 1, 'invalidParentEntries' : len(invalidParentEntries) }
return CheckerResults(len(ids) - 1, len(invalidParentEntries))
| justincc/firefox-bookmarks-integrity-check | src/fbic/checker.py | Python | apache-2.0 | 985 |
"""Provide an authentication layer for Home Assistant."""
import asyncio
from collections import OrderedDict
from datetime import timedelta
import logging
from typing import Any, Dict, List, Optional, Tuple, cast
import jwt
from homeassistant import data_entry_flow
from homeassistant.auth.const import ACCESS_TOKEN_EXPIRATION
from homeassistant.core import HomeAssistant, callback
from homeassistant.util import dt as dt_util
from . import auth_store, models
from .const import GROUP_ID_ADMIN
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
EVENT_USER_ADDED = "user_added"
EVENT_USER_REMOVED = "user_removed"
_LOGGER = logging.getLogger(__name__)
_MfaModuleDict = Dict[str, MultiFactorAuthModule]
_ProviderKey = Tuple[str, Optional[str]]
_ProviderDict = Dict[_ProviderKey, AuthProvider]
async def auth_manager_from_config(
hass: HomeAssistant,
provider_configs: List[Dict[str, Any]],
module_configs: List[Dict[str, Any]],
) -> "AuthManager":
"""Initialize an auth manager from config.
CORE_CONFIG_SCHEMA will make sure do duplicated auth providers or
mfa modules exist in configs.
"""
store = auth_store.AuthStore(hass)
if provider_configs:
providers = await asyncio.gather(
*(
auth_provider_from_config(hass, store, config)
for config in provider_configs
)
)
else:
providers = []
# So returned auth providers are in same order as config
provider_hash: _ProviderDict = OrderedDict()
for provider in providers:
key = (provider.type, provider.id)
provider_hash[key] = provider
if module_configs:
modules = await asyncio.gather(
*(auth_mfa_module_from_config(hass, config) for config in module_configs)
)
else:
modules = []
# So returned auth modules are in same order as config
module_hash: _MfaModuleDict = OrderedDict()
for module in modules:
module_hash[module.id] = module
manager = AuthManager(hass, store, provider_hash, module_hash)
return manager
class AuthManagerFlowManager(data_entry_flow.FlowManager):
"""Manage authentication flows."""
def __init__(self, hass: HomeAssistant, auth_manager: "AuthManager"):
"""Init auth manager flows."""
super().__init__(hass)
self.auth_manager = auth_manager
async def async_create_flow(
self,
handler_key: Any,
*,
context: Optional[Dict[str, Any]] = None,
data: Optional[Dict[str, Any]] = None,
) -> data_entry_flow.FlowHandler:
"""Create a login flow."""
auth_provider = self.auth_manager.get_auth_provider(*handler_key)
if not auth_provider:
raise KeyError(f"Unknown auth provider {handler_key}")
return await auth_provider.async_login_flow(context)
async def async_finish_flow(
self, flow: data_entry_flow.FlowHandler, result: Dict[str, Any]
) -> Dict[str, Any]:
"""Return a user as result of login flow."""
flow = cast(LoginFlow, flow)
if result["type"] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
return result
# we got final result
if isinstance(result["data"], models.User):
result["result"] = result["data"]
return result
auth_provider = self.auth_manager.get_auth_provider(*result["handler"])
if not auth_provider:
raise KeyError(f"Unknown auth provider {result['handler']}")
credentials = await auth_provider.async_get_or_create_credentials(
result["data"]
)
if flow.context.get("credential_only"):
result["result"] = credentials
return result
# multi-factor module cannot enabled for new credential
# which has not linked to a user yet
if auth_provider.support_mfa and not credentials.is_new:
user = await self.auth_manager.async_get_user_by_credentials(credentials)
if user is not None:
modules = await self.auth_manager.async_get_enabled_mfa(user)
if modules:
flow.user = user
flow.available_mfa_modules = modules
return await flow.async_step_select_mfa_module()
result["result"] = await self.auth_manager.async_get_or_create_user(credentials)
return result
class AuthManager:
"""Manage the authentication for Home Assistant."""
def __init__(
self,
hass: HomeAssistant,
store: auth_store.AuthStore,
providers: _ProviderDict,
mfa_modules: _MfaModuleDict,
) -> None:
"""Initialize the auth manager."""
self.hass = hass
self._store = store
self._providers = providers
self._mfa_modules = mfa_modules
self.login_flow = AuthManagerFlowManager(hass, self)
@property
def auth_providers(self) -> List[AuthProvider]:
"""Return a list of available auth providers."""
return list(self._providers.values())
@property
def auth_mfa_modules(self) -> List[MultiFactorAuthModule]:
"""Return a list of available auth modules."""
return list(self._mfa_modules.values())
def get_auth_provider(
self, provider_type: str, provider_id: str
) -> Optional[AuthProvider]:
"""Return an auth provider, None if not found."""
return self._providers.get((provider_type, provider_id))
def get_auth_providers(self, provider_type: str) -> List[AuthProvider]:
"""Return a List of auth provider of one type, Empty if not found."""
return [
provider
for (p_type, _), provider in self._providers.items()
if p_type == provider_type
]
def get_auth_mfa_module(self, module_id: str) -> Optional[MultiFactorAuthModule]:
"""Return a multi-factor auth module, None if not found."""
return self._mfa_modules.get(module_id)
async def async_get_users(self) -> List[models.User]:
"""Retrieve all users."""
return await self._store.async_get_users()
async def async_get_user(self, user_id: str) -> Optional[models.User]:
"""Retrieve a user."""
return await self._store.async_get_user(user_id)
async def async_get_owner(self) -> Optional[models.User]:
"""Retrieve the owner."""
users = await self.async_get_users()
return next((user for user in users if user.is_owner), None)
async def async_get_group(self, group_id: str) -> Optional[models.Group]:
"""Retrieve all groups."""
return await self._store.async_get_group(group_id)
async def async_get_user_by_credentials(
self, credentials: models.Credentials
) -> Optional[models.User]:
"""Get a user by credential, return None if not found."""
for user in await self.async_get_users():
for creds in user.credentials:
if creds.id == credentials.id:
return user
return None
async def async_create_system_user(
self, name: str, group_ids: Optional[List[str]] = None
) -> models.User:
"""Create a system user."""
user = await self._store.async_create_user(
name=name, system_generated=True, is_active=True, group_ids=group_ids or []
)
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
return user
async def async_create_user(
self, name: str, group_ids: Optional[List[str]] = None
) -> models.User:
"""Create a user."""
kwargs: Dict[str, Any] = {
"name": name,
"is_active": True,
"group_ids": group_ids or [],
}
if await self._user_should_be_owner():
kwargs["is_owner"] = True
user = await self._store.async_create_user(**kwargs)
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
return user
async def async_get_or_create_user(
self, credentials: models.Credentials
) -> models.User:
"""Get or create a user."""
if not credentials.is_new:
user = await self.async_get_user_by_credentials(credentials)
if user is None:
raise ValueError("Unable to find the user.")
return user
auth_provider = self._async_get_auth_provider(credentials)
if auth_provider is None:
raise RuntimeError("Credential with unknown provider encountered")
info = await auth_provider.async_user_meta_for_credentials(credentials)
user = await self._store.async_create_user(
credentials=credentials,
name=info.name,
is_active=info.is_active,
group_ids=[GROUP_ID_ADMIN],
)
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
return user
async def async_link_user(
self, user: models.User, credentials: models.Credentials
) -> None:
"""Link credentials to an existing user."""
await self._store.async_link_user(user, credentials)
async def async_remove_user(self, user: models.User) -> None:
"""Remove a user."""
tasks = [
self.async_remove_credentials(credentials)
for credentials in user.credentials
]
if tasks:
await asyncio.wait(tasks)
await self._store.async_remove_user(user)
self.hass.bus.async_fire(EVENT_USER_REMOVED, {"user_id": user.id})
async def async_update_user(
self,
user: models.User,
name: Optional[str] = None,
group_ids: Optional[List[str]] = None,
) -> None:
"""Update a user."""
kwargs: Dict[str, Any] = {}
if name is not None:
kwargs["name"] = name
if group_ids is not None:
kwargs["group_ids"] = group_ids
await self._store.async_update_user(user, **kwargs)
async def async_activate_user(self, user: models.User) -> None:
"""Activate a user."""
await self._store.async_activate_user(user)
async def async_deactivate_user(self, user: models.User) -> None:
"""Deactivate a user."""
if user.is_owner:
raise ValueError("Unable to deactivate the owner")
await self._store.async_deactivate_user(user)
async def async_remove_credentials(self, credentials: models.Credentials) -> None:
"""Remove credentials."""
provider = self._async_get_auth_provider(credentials)
if provider is not None and hasattr(provider, "async_will_remove_credentials"):
# https://github.com/python/mypy/issues/1424
await provider.async_will_remove_credentials(credentials) # type: ignore
await self._store.async_remove_credentials(credentials)
async def async_enable_user_mfa(
self, user: models.User, mfa_module_id: str, data: Any
) -> None:
"""Enable a multi-factor auth module for user."""
if user.system_generated:
raise ValueError(
"System generated users cannot enable multi-factor auth module."
)
module = self.get_auth_mfa_module(mfa_module_id)
if module is None:
raise ValueError(f"Unable find multi-factor auth module: {mfa_module_id}")
await module.async_setup_user(user.id, data)
async def async_disable_user_mfa(
self, user: models.User, mfa_module_id: str
) -> None:
"""Disable a multi-factor auth module for user."""
if user.system_generated:
raise ValueError(
"System generated users cannot disable multi-factor auth module."
)
module = self.get_auth_mfa_module(mfa_module_id)
if module is None:
raise ValueError(f"Unable find multi-factor auth module: {mfa_module_id}")
await module.async_depose_user(user.id)
async def async_get_enabled_mfa(self, user: models.User) -> Dict[str, str]:
"""List enabled mfa modules for user."""
modules: Dict[str, str] = OrderedDict()
for module_id, module in self._mfa_modules.items():
if await module.async_is_user_setup(user.id):
modules[module_id] = module.name
return modules
async def async_create_refresh_token(
self,
user: models.User,
client_id: Optional[str] = None,
client_name: Optional[str] = None,
client_icon: Optional[str] = None,
token_type: Optional[str] = None,
access_token_expiration: timedelta = ACCESS_TOKEN_EXPIRATION,
) -> models.RefreshToken:
"""Create a new refresh token for a user."""
if not user.is_active:
raise ValueError("User is not active")
if user.system_generated and client_id is not None:
raise ValueError(
"System generated users cannot have refresh tokens connected "
"to a client."
)
if token_type is None:
if user.system_generated:
token_type = models.TOKEN_TYPE_SYSTEM
else:
token_type = models.TOKEN_TYPE_NORMAL
if user.system_generated != (token_type == models.TOKEN_TYPE_SYSTEM):
raise ValueError(
"System generated users can only have system type refresh tokens"
)
if token_type == models.TOKEN_TYPE_NORMAL and client_id is None:
raise ValueError("Client is required to generate a refresh token.")
if (
token_type == models.TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN
and client_name is None
):
raise ValueError("Client_name is required for long-lived access token")
if token_type == models.TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN:
for token in user.refresh_tokens.values():
if (
token.client_name == client_name
and token.token_type == models.TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN
):
# Each client_name can only have one
# long_lived_access_token type of refresh token
raise ValueError(f"{client_name} already exists")
return await self._store.async_create_refresh_token(
user,
client_id,
client_name,
client_icon,
token_type,
access_token_expiration,
)
async def async_get_refresh_token(
self, token_id: str
) -> Optional[models.RefreshToken]:
"""Get refresh token by id."""
return await self._store.async_get_refresh_token(token_id)
async def async_get_refresh_token_by_token(
self, token: str
) -> Optional[models.RefreshToken]:
"""Get refresh token by token."""
return await self._store.async_get_refresh_token_by_token(token)
async def async_remove_refresh_token(
self, refresh_token: models.RefreshToken
) -> None:
"""Delete a refresh token."""
await self._store.async_remove_refresh_token(refresh_token)
@callback
def async_create_access_token(
self, refresh_token: models.RefreshToken, remote_ip: Optional[str] = None
) -> str:
"""Create a new access token."""
self._store.async_log_refresh_token_usage(refresh_token, remote_ip)
now = dt_util.utcnow()
return jwt.encode(
{
"iss": refresh_token.id,
"iat": now,
"exp": now + refresh_token.access_token_expiration,
},
refresh_token.jwt_key,
algorithm="HS256",
).decode()
async def async_validate_access_token(
self, token: str
) -> Optional[models.RefreshToken]:
"""Return refresh token if an access token is valid."""
try:
unverif_claims = jwt.decode(token, verify=False)
except jwt.InvalidTokenError:
return None
refresh_token = await self.async_get_refresh_token(
cast(str, unverif_claims.get("iss"))
)
if refresh_token is None:
jwt_key = ""
issuer = ""
else:
jwt_key = refresh_token.jwt_key
issuer = refresh_token.id
try:
jwt.decode(token, jwt_key, leeway=10, issuer=issuer, algorithms=["HS256"])
except jwt.InvalidTokenError:
return None
if refresh_token is None or not refresh_token.user.is_active:
return None
return refresh_token
@callback
def _async_get_auth_provider(
self, credentials: models.Credentials
) -> Optional[AuthProvider]:
"""Get auth provider from a set of credentials."""
auth_provider_key = (
credentials.auth_provider_type,
credentials.auth_provider_id,
)
return self._providers.get(auth_provider_key)
async def _user_should_be_owner(self) -> bool:
"""Determine if user should be owner.
A user should be an owner if it is the first non-system user that is
being created.
"""
for user in await self._store.async_get_users():
if not user.system_generated:
return False
return True
| tchellomello/home-assistant | homeassistant/auth/__init__.py | Python | apache-2.0 | 17,537 |
__author__ = 'Daniel Lindsley'
__version__ = '1.1'
| toastdriven/django-rsvp | rsvp/__init__.py | Python | mit | 51 |
#!/usr/bin/env python
#
# Copyright 2016 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Generate Android.bp for Skia from GN configuration.
from __future__ import print_function
import os
import pprint
import string
import subprocess
import tempfile
import skqp_gn_args
import gn_to_bp_utils
# First we start off with a template for Android.bp,
# with holes for source lists and include directories.
bp = string.Template('''// This file is autogenerated by gn_to_bp.py.
// To make changes to this file, follow the instructions on skia.org for
// downloading Skia and submitting changes. Modify gn_to_bp.py (or the build
// files it uses) and submit. The autoroller will then create the updated
// Android.bp. Or ask a Skia engineer for help.
package {
default_applicable_licenses: ["external_skia_license"],
}
// Added automatically by a large-scale-change that took the approach of
// 'apply every license found to every target'. While this makes sure we respect
// every license restriction, it may not be entirely correct.
//
// e.g. GPL in an MIT project might only apply to the contrib/ directory.
//
// Please consider splitting the single license below into multiple licenses,
// taking care not to lose any license_kind information, and overriding the
// default license using the 'licenses: [...]' property on targets as needed.
//
// For unused files, consider creating a 'fileGroup' with "//visibility:private"
// to attach the license to, and including a comment whether the files may be
// used in the current project.
//
// large-scale-change included anything that looked like it might be a license
// text as a license_text. e.g. LICENSE, NOTICE, COPYING etc.
//
// Please consider removing redundant or irrelevant files from 'license_text:'.
//
// large-scale-change filtered out the below license kinds as false-positives:
// SPDX-license-identifier-CC-BY-NC
// SPDX-license-identifier-GPL-2.0
// SPDX-license-identifier-LGPL-2.1
// SPDX-license-identifier-OFL:by_exception_only
// See: http://go/android-license-faq
license {
name: "external_skia_license",
visibility: [":__subpackages__"],
license_kinds: [
"SPDX-license-identifier-Apache-2.0",
"SPDX-license-identifier-BSD",
"SPDX-license-identifier-CC0-1.0",
"SPDX-license-identifier-FTL",
"SPDX-license-identifier-MIT",
"legacy_unencumbered",
],
license_text: [
"LICENSE",
"NOTICE",
],
}
cc_defaults {
name: "skia_arch_defaults",
arch: {
arm: {
srcs: [
$arm_srcs
],
neon: {
srcs: [
$arm_neon_srcs
],
},
},
arm64: {
srcs: [
$arm64_srcs
],
},
x86: {
srcs: [
$x86_srcs
],
},
x86_64: {
srcs: [
$x86_srcs
],
},
},
target: {
android: {
srcs: [
"third_party/vulkanmemoryallocator/GrVulkanMemoryAllocator.cpp",
],
local_include_dirs: [
"third_party/vulkanmemoryallocator/",
],
},
},
}
cc_defaults {
name: "skia_defaults",
defaults: ["skia_arch_defaults"],
cflags: [
$cflags
],
cppflags:[
$cflags_cc
],
export_include_dirs: [
$export_includes
],
local_include_dirs: [
$local_includes
]
}
cc_library_static {
// Smaller version of Skia, without e.g. codecs, intended for use by RenderEngine.
name: "libskia_renderengine",
defaults: ["skia_defaults",
"skia_renderengine_deps"],
srcs: [
$renderengine_srcs
],
local_include_dirs: [
"renderengine",
],
export_include_dirs: [
"renderengine",
],
}
cc_library_static {
name: "libskia",
host_supported: true,
cppflags:[
// Exceptions are necessary for SkRawCodec.
// FIXME: Should we split SkRawCodec into a separate target so the rest
// of Skia need not be compiled with exceptions?
"-fexceptions",
],
srcs: [
$srcs
],
target: {
android: {
srcs: [
$android_srcs
],
local_include_dirs: [
"android",
],
export_include_dirs: [
"android",
],
},
linux_glibc: {
srcs: [
$linux_srcs
],
local_include_dirs: [
"linux",
],
export_include_dirs: [
"linux",
],
},
darwin: {
srcs: [
$mac_srcs
],
local_include_dirs: [
"mac",
],
export_include_dirs: [
"mac",
],
},
windows: {
enabled: true,
cflags: [
"-Wno-unknown-pragmas",
],
srcs: [
$win_srcs
],
local_include_dirs: [
"win",
],
export_include_dirs: [
"win",
],
},
},
defaults: ["skia_deps",
"skia_defaults",
],
}
cc_defaults {
// Subset of the larger "skia_deps", which includes only the dependencies
// needed for libskia_renderengine. Note that it includes libpng and libz
// for the purposes of MSKP captures, but we could instead leave it up to
// RenderEngine to provide its own SkSerializerProcs if another client
// wants an even smaller version of libskia.
name: "skia_renderengine_deps",
shared_libs: [
"libcutils",
"liblog",
"libpng",
"libz",
],
static_libs: [
"libarect",
],
group_static_libs: true,
target: {
android: {
shared_libs: [
"libEGL",
"libGLESv2",
"libvulkan",
"libnativewindow",
],
export_shared_lib_headers: [
"libvulkan",
],
},
},
}
cc_defaults {
name: "skia_deps",
defaults: ["skia_renderengine_deps"],
shared_libs: [
"libdng_sdk",
"libjpeg",
"libpiex",
"libexpat",
"libft2",
],
static_libs: [
"libwebp-decode",
"libwebp-encode",
"libsfntly",
"libwuffs_mirror_release_c",
],
target: {
android: {
shared_libs: [
"libheif",
],
},
darwin: {
host_ldlibs: [
"-framework AppKit",
],
},
windows: {
host_ldlibs: [
"-lgdi32",
"-loleaut32",
"-lole32",
"-lopengl32",
"-luuid",
"-lwindowscodecs",
],
},
},
}
cc_defaults {
name: "skia_tool_deps",
defaults: [
"skia_deps",
],
shared_libs: [
"libicu",
"libharfbuzz_ng",
],
static_libs: [
"libskia",
],
cflags: [
"-DSK_SHAPER_HARFBUZZ_AVAILABLE",
"-DSK_UNICODE_AVAILABLE",
"-Wno-implicit-fallthrough",
"-Wno-unused-parameter",
"-Wno-unused-variable",
],
target: {
windows: {
enabled: true,
},
},
data: [
"resources/**/*",
],
}
cc_defaults {
name: "skia_gm_srcs",
local_include_dirs: [
$gm_includes
],
srcs: [
$gm_srcs
],
}
cc_defaults {
name: "skia_test_minus_gm_srcs",
local_include_dirs: [
$test_minus_gm_includes
],
srcs: [
$test_minus_gm_srcs
],
}
cc_test {
name: "skia_dm",
defaults: [
"skia_gm_srcs",
"skia_test_minus_gm_srcs",
"skia_tool_deps",
],
local_include_dirs: [
$dm_includes
],
srcs: [
$dm_srcs
],
shared_libs: [
"libbinder",
"libutils",
],
}
cc_test {
name: "skia_nanobench",
defaults: [
"skia_gm_srcs",
"skia_tool_deps"
],
local_include_dirs: [
$nanobench_includes
],
srcs: [
$nanobench_srcs
],
lto: {
never: true,
},
}
cc_library_shared {
name: "libskqp_jni",
sdk_version: "$skqp_sdk_version",
stl: "libc++_static",
compile_multilib: "both",
defaults: [
"skia_arch_defaults",
],
cflags: [
$skqp_cflags
"-Wno-unused-parameter",
"-Wno-unused-variable",
],
cppflags:[
$skqp_cflags_cc
],
local_include_dirs: [
"skqp",
$skqp_includes
],
export_include_dirs: [
"skqp",
],
srcs: [
$skqp_srcs
],
header_libs: ["jni_headers"],
shared_libs: [
"libandroid",
"libEGL",
"libGLESv2",
"liblog",
"libvulkan",
"libz",
],
static_libs: [
"libexpat",
"libjpeg_static_ndk",
"libpng_ndk",
"libwebp-decode",
"libwebp-encode",
"libwuffs_mirror_release_c",
]
}
android_test {
name: "CtsSkQPTestCases",
defaults: ["cts_defaults"],
test_suites: [
"general-tests",
"cts",
],
libs: ["android.test.runner.stubs"],
jni_libs: ["libskqp_jni"],
compile_multilib: "both",
static_libs: [
"android-support-design",
"ctstestrunner-axt",
],
manifest: "platform_tools/android/apps/skqp/src/main/AndroidManifest.xml",
test_config: "platform_tools/android/apps/skqp/src/main/AndroidTest.xml",
asset_dirs: ["platform_tools/android/apps/skqp/src/main/assets", "resources"],
resource_dirs: ["platform_tools/android/apps/skqp/src/main/res"],
srcs: ["platform_tools/android/apps/skqp/src/main/java/**/*.java"],
sdk_version: "test_current",
}
''')
# We'll run GN to get the main source lists and include directories for Skia.
def generate_args(target_os, enable_gpu, renderengine = False):
d = {
'is_official_build': 'true',
# gn_to_bp_utils' GetArchSources will take care of architecture-specific
# files.
'target_cpu': '"none"',
# Use the custom FontMgr, as the framework will handle fonts.
'skia_enable_fontmgr_custom_directory': 'false',
'skia_enable_fontmgr_custom_embedded': 'false',
'skia_enable_fontmgr_android': 'false',
'skia_enable_fontmgr_win': 'false',
'skia_enable_fontmgr_win_gdi': 'false',
'skia_use_fonthost_mac': 'false',
# enable features used in skia_nanobench
'skia_tools_require_resources': 'true',
'skia_use_fontconfig': 'false',
'skia_include_multiframe_procs': 'false',
}
d['target_os'] = target_os
if target_os == '"android"':
d['skia_enable_tools'] = 'true'
d['skia_include_multiframe_procs'] = 'true'
if enable_gpu:
d['skia_use_vulkan'] = 'true'
else:
d['skia_use_vulkan'] = 'false'
d['skia_enable_gpu'] = 'false'
if target_os == '"win"':
# The Android Windows build system does not provide FontSub.h
d['skia_use_xps'] = 'false'
# BUILDCONFIG.gn expects these to be set when building for Windows, but
# we're just creating Android.bp, so we don't need them. Populate with
# some placeholder values.
d['win_vc'] = '"placeholder_version"'
d['win_sdk_version'] = '"placeholder_version"'
d['win_toolchain_version'] = '"placeholder_version"'
if target_os == '"android"' and not renderengine:
d['skia_use_libheif'] = 'true'
else:
d['skia_use_libheif'] = 'false'
if renderengine:
d['skia_use_libpng_decode'] = 'false'
d['skia_use_libjpeg_turbo_decode'] = 'false'
d['skia_use_libjpeg_turbo_encode'] = 'false'
d['skia_use_libwebp_decode'] = 'false'
d['skia_use_libwebp_encode'] = 'false'
d['skia_use_libgifcodec'] = 'false'
d['skia_enable_pdf'] = 'false'
d['skia_use_freetype'] = 'false'
d['skia_use_fixed_gamma_text'] = 'false'
d['skia_use_expat'] = 'false'
d['skia_enable_fontmgr_custom_empty'] = 'false'
else:
d['skia_enable_android_utils'] = 'true'
d['skia_use_freetype'] = 'true'
d['skia_use_fixed_gamma_text'] = 'true'
d['skia_enable_fontmgr_custom_empty'] = 'true'
d['skia_use_wuffs'] = 'true'
return d
gn_args = generate_args('"android"', True)
gn_args_linux = generate_args('"linux"', False)
gn_args_mac = generate_args('"mac"', False)
gn_args_win = generate_args('"win"', False)
gn_args_renderengine = generate_args('"android"', True, True)
js = gn_to_bp_utils.GenerateJSONFromGN(gn_args)
def strip_slashes(lst):
return {str(p.lstrip('/')) for p in lst}
android_srcs = strip_slashes(js['targets']['//:skia']['sources'])
cflags = strip_slashes(js['targets']['//:skia']['cflags'])
cflags_cc = strip_slashes(js['targets']['//:skia']['cflags_cc'])
local_includes = strip_slashes(js['targets']['//:skia']['include_dirs'])
export_includes = strip_slashes(js['targets']['//:public']['include_dirs'])
gm_srcs = strip_slashes(js['targets']['//:gm']['sources'])
gm_includes = strip_slashes(js['targets']['//:gm']['include_dirs'])
test_srcs = strip_slashes(js['targets']['//:tests']['sources'])
test_includes = strip_slashes(js['targets']['//:tests']['include_dirs'])
dm_srcs = strip_slashes(js['targets']['//:dm']['sources'])
dm_includes = strip_slashes(js['targets']['//:dm']['include_dirs'])
nanobench_target = js['targets']['//:nanobench']
nanobench_srcs = strip_slashes(nanobench_target['sources'])
nanobench_includes = strip_slashes(nanobench_target['include_dirs'])
gn_to_bp_utils.GrabDependentValues(js, '//:gm', 'sources', gm_srcs, '//:skia')
gn_to_bp_utils.GrabDependentValues(js, '//:tests', 'sources', test_srcs, '//:skia')
gn_to_bp_utils.GrabDependentValues(js, '//:dm', 'sources',
dm_srcs, ['//:skia', '//:gm', '//:tests'])
gn_to_bp_utils.GrabDependentValues(js, '//:nanobench', 'sources',
nanobench_srcs, ['//:skia', '//:gm'])
# skcms is a little special, kind of a second-party library.
local_includes.add("include/third_party/skcms")
gm_includes .add("include/third_party/skcms")
# Android's build will choke if we list headers.
def strip_headers(sources):
return {s for s in sources if not s.endswith('.h')}
gn_to_bp_utils.GrabDependentValues(js, '//:skia', 'sources', android_srcs, None)
android_srcs = strip_headers(android_srcs)
js_linux = gn_to_bp_utils.GenerateJSONFromGN(gn_args_linux)
linux_srcs = strip_slashes(js_linux['targets']['//:skia']['sources'])
gn_to_bp_utils.GrabDependentValues(js_linux, '//:skia', 'sources', linux_srcs,
None)
linux_srcs = strip_headers(linux_srcs)
js_mac = gn_to_bp_utils.GenerateJSONFromGN(gn_args_mac)
mac_srcs = strip_slashes(js_mac['targets']['//:skia']['sources'])
gn_to_bp_utils.GrabDependentValues(js_mac, '//:skia', 'sources', mac_srcs,
None)
mac_srcs = strip_headers(mac_srcs)
js_win = gn_to_bp_utils.GenerateJSONFromGN(gn_args_win)
win_srcs = strip_slashes(js_win['targets']['//:skia']['sources'])
gn_to_bp_utils.GrabDependentValues(js_win, '//:skia', 'sources', win_srcs,
None)
win_srcs = strip_headers(win_srcs)
srcs = android_srcs.intersection(linux_srcs).intersection(mac_srcs)
srcs = srcs.intersection(win_srcs)
android_srcs = android_srcs.difference(srcs)
linux_srcs = linux_srcs.difference(srcs)
mac_srcs = mac_srcs.difference(srcs)
win_srcs = win_srcs.difference(srcs)
gm_srcs = strip_headers(gm_srcs)
test_srcs = strip_headers(test_srcs)
dm_srcs = strip_headers(dm_srcs).difference(gm_srcs).difference(test_srcs)
nanobench_srcs = strip_headers(nanobench_srcs).difference(gm_srcs)
test_minus_gm_includes = test_includes.difference(gm_includes)
test_minus_gm_srcs = test_srcs.difference(gm_srcs)
cflags = gn_to_bp_utils.CleanupCFlags(cflags)
cflags_cc = gn_to_bp_utils.CleanupCCFlags(cflags_cc)
# Execute GN for specialized RenderEngine target
js_renderengine = gn_to_bp_utils.GenerateJSONFromGN(gn_args_renderengine)
renderengine_srcs = strip_slashes(
js_renderengine['targets']['//:skia']['sources'])
gn_to_bp_utils.GrabDependentValues(js_renderengine, '//:skia', 'sources',
renderengine_srcs, None)
renderengine_srcs = strip_headers(renderengine_srcs)
# Execute GN for specialized SkQP target
skqp_sdk_version = 26
js_skqp = gn_to_bp_utils.GenerateJSONFromGN(skqp_gn_args.GetGNArgs(api_level=skqp_sdk_version,
debug=False,
is_android_bp=True))
skqp_srcs = strip_slashes(js_skqp['targets']['//:libskqp_app']['sources'])
skqp_includes = strip_slashes(js_skqp['targets']['//:libskqp_app']['include_dirs'])
skqp_cflags = strip_slashes(js_skqp['targets']['//:libskqp_app']['cflags'])
skqp_cflags_cc = strip_slashes(js_skqp['targets']['//:libskqp_app']['cflags_cc'])
skqp_defines = strip_slashes(js_skqp['targets']['//:libskqp_app']['defines'])
skqp_includes.update(strip_slashes(js_skqp['targets']['//:public']['include_dirs']))
gn_to_bp_utils.GrabDependentValues(js_skqp, '//:libskqp_app', 'sources',
skqp_srcs, None)
gn_to_bp_utils.GrabDependentValues(js_skqp, '//:libskqp_app', 'include_dirs',
skqp_includes, ['//:gif'])
gn_to_bp_utils.GrabDependentValues(js_skqp, '//:libskqp_app', 'cflags',
skqp_cflags, None)
gn_to_bp_utils.GrabDependentValues(js_skqp, '//:libskqp_app', 'cflags_cc',
skqp_cflags_cc, None)
gn_to_bp_utils.GrabDependentValues(js_skqp, '//:libskqp_app', 'defines',
skqp_defines, None)
skqp_defines.add("SK_ENABLE_DUMP_GPU")
skqp_defines.add("SK_BUILD_FOR_SKQP")
skqp_defines.add("SK_ALLOW_STATIC_GLOBAL_INITIALIZERS=1")
skqp_srcs = strip_headers(skqp_srcs)
skqp_cflags = gn_to_bp_utils.CleanupCFlags(skqp_cflags)
skqp_cflags_cc = gn_to_bp_utils.CleanupCCFlags(skqp_cflags_cc)
here = os.path.dirname(__file__)
defs = gn_to_bp_utils.GetArchSources(os.path.join(here, 'opts.gni'))
def get_defines(json):
return {str(d) for d in json['targets']['//:skia']['defines']}
android_defines = get_defines(js)
linux_defines = get_defines(js_linux)
mac_defines = get_defines(js_mac)
win_defines = get_defines(js_win)
renderengine_defines = get_defines(js_renderengine)
renderengine_defines.add('SK_IN_RENDERENGINE')
def mkdir_if_not_exists(path):
if not os.path.exists(path):
os.makedirs(path)
mkdir_if_not_exists('android/include/config/')
mkdir_if_not_exists('linux/include/config/')
mkdir_if_not_exists('mac/include/config/')
mkdir_if_not_exists('win/include/config/')
mkdir_if_not_exists('renderengine/include/config/')
mkdir_if_not_exists('skqp/include/config/')
platforms = { 'IOS', 'MAC', 'WIN', 'ANDROID', 'UNIX' }
def disallow_platforms(config, desired):
with open(config, 'a') as f:
p = sorted(platforms.difference({ desired }))
s = '#if '
for i in range(len(p)):
s = s + 'defined(SK_BUILD_FOR_%s)' % p[i]
if i < len(p) - 1:
s += ' || '
if i % 2 == 1:
s += '\\\n '
print(s, file=f)
print(' #error "Only SK_BUILD_FOR_%s should be defined!"' % desired, file=f)
print('#endif', file=f)
def append_to_file(config, s):
with open(config, 'a') as f:
print(s, file=f)
def write_android_config(config_path, defines, isNDKConfig = False):
gn_to_bp_utils.WriteUserConfig(config_path, defines)
append_to_file(config_path, '''
#ifndef SK_BUILD_FOR_ANDROID
#error "SK_BUILD_FOR_ANDROID must be defined!"
#endif''')
disallow_platforms(config_path, 'ANDROID')
if isNDKConfig:
append_to_file(config_path, '''
#undef SK_BUILD_FOR_ANDROID_FRAMEWORK''')
write_android_config('android/include/config/SkUserConfig.h', android_defines)
write_android_config('renderengine/include/config/SkUserConfig.h', renderengine_defines)
write_android_config('skqp/include/config/SkUserConfig.h', skqp_defines, True)
def write_config(config_path, defines, platform):
gn_to_bp_utils.WriteUserConfig(config_path, defines)
append_to_file(config_path, '''
// Correct SK_BUILD_FOR flags that may have been set by
// SkTypes.h/Android.bp
#ifndef SK_BUILD_FOR_%s
#define SK_BUILD_FOR_%s
#endif
#ifdef SK_BUILD_FOR_ANDROID
#undef SK_BUILD_FOR_ANDROID
#endif''' % (platform, platform))
disallow_platforms(config_path, platform)
write_config('linux/include/config/SkUserConfig.h', linux_defines, 'UNIX')
write_config('mac/include/config/SkUserConfig.h', mac_defines, 'MAC')
write_config('win/include/config/SkUserConfig.h', win_defines, 'WIN')
# Turn a list of strings into the style bpfmt outputs.
def bpfmt(indent, lst, sort=True):
if sort:
lst = sorted(lst)
return ('\n' + ' '*indent).join('"%s",' % v for v in lst)
# OK! We have everything to fill in Android.bp...
with open('Android.bp', 'w') as Android_bp:
print(bp.substitute({
'export_includes': bpfmt(8, export_includes),
'local_includes': bpfmt(8, local_includes),
'srcs': bpfmt(8, srcs),
'cflags': bpfmt(8, cflags, False),
'cflags_cc': bpfmt(8, cflags_cc),
'arm_srcs': bpfmt(16, strip_headers(defs['armv7'])),
'arm_neon_srcs': bpfmt(20, strip_headers(defs['neon'])),
'arm64_srcs': bpfmt(16, strip_headers(defs['arm64'] +
defs['crc32'])),
'x86_srcs': bpfmt(16, strip_headers(defs['sse2'] +
defs['ssse3'] +
defs['sse41'] +
defs['sse42'] +
defs['avx' ] +
defs['hsw' ] +
defs['skx' ])),
'gm_includes' : bpfmt(8, gm_includes),
'gm_srcs' : bpfmt(8, gm_srcs),
'test_minus_gm_includes' : bpfmt(8, test_minus_gm_includes),
'test_minus_gm_srcs' : bpfmt(8, test_minus_gm_srcs),
'dm_includes' : bpfmt(8, dm_includes),
'dm_srcs' : bpfmt(8, dm_srcs),
'nanobench_includes' : bpfmt(8, nanobench_includes),
'nanobench_srcs' : bpfmt(8, nanobench_srcs),
'skqp_sdk_version': skqp_sdk_version,
'skqp_includes': bpfmt(8, skqp_includes),
'skqp_srcs': bpfmt(8, skqp_srcs),
'skqp_cflags': bpfmt(8, skqp_cflags, False),
'skqp_cflags_cc': bpfmt(8, skqp_cflags_cc),
'android_srcs': bpfmt(10, android_srcs),
'linux_srcs': bpfmt(10, linux_srcs),
'mac_srcs': bpfmt(10, mac_srcs),
'win_srcs': bpfmt(10, win_srcs),
'renderengine_srcs': bpfmt(8, renderengine_srcs),
}), file=Android_bp)
| google/skia | gn/gn_to_bp.py | Python | bsd-3-clause | 23,280 |
# -*- encoding: utf-8 -*-
__author__ = 'wangss'
# 数据处理脚本
import xlrd
def sqlExcute():
file1 = open("E://111.sql")
file2 = open("E://222.sql")
re = []
for i in file2:
# print i.decode('gbk')
if i.find("values") > -1:
re.append(i.split("values")[1].split(",")[0][3:-1])
for j in file1:
if j.find("values") > -1:
temp = j.split("values")[1].split(",")[0][3:-1]
if temp not in re:
print 'insert into wf_process_type (TYPE_ID, TYPE_NAME, DESCRIPTION, DISPLAY_ORDER) ' + j.decode('gbk')
def excelExc(path):
re = []
ex = xlrd.open_workbook(path)
table = ex.sheets()[0]
rownum = table.nrows
for i in range(rownum):
row = table.row_values(i)
# print row[2]
if row[2] not in re:
re.append(row[2])
print len(re)
return re
if __name__ == '__main__':
re1 = excelExc("E://222.xls")
re2 = excelExc("E://111.xls")
temp = []
for i in re1:
if i not in re2:
temp.append(i)
print len(temp)
f = open("E://222.sql")
m = 0
for j in f:
if j.find("values") > -1:
tt = j.split('(')[1].split(',')[1][2:-1]
# print tt.decode('gbk')
if tt.decode('gbk') in temp:
m += 1
print 'insert into wf_bzns_conf (PROC_DEF_UNIQUE_ID, NAME, REFUSE_ACT_FLAG, BZNS_TABLE, BZNS_STATE_FIELD, BZNS_STATE_VALUE, BZNS_WHERE, MSG_TYPE, BY, LOG_BZNS_CODE, BZNS_PROCESS_ID, BZNS_INDV_CUST_CODE_FIELD, BZNS_MONEY_FIELD, BZNS_CURCD_FIELD, IN_USE) ' + j.decode(
'gbk')
# print
| alenX/PythonStuff | SqlDeal.py | Python | gpl-3.0 | 1,668 |
import matplotlib
matplotlib.use('Agg')
import matplotlib.pylab as plt
import matplotlib.colors as mat_col
from matplotlib.colors import LinearSegmentedColormap
import scipy
import scipy.cluster.hierarchy as sch
from scipy.cluster.hierarchy import set_link_color_palette
import numpy as np
import pandas as pd
import seaborn as sns
import glob
#from matplotlib import rcParams
#rcParams.update({'figure.autolayout': True})
#import makepds
cdict1 = {'red': ((0.0, 0.0, 0.0),
(0.5, 0.0, 1.0),
(1.0, 0.0, 1.0)),
'green': ((0.0, 0.0, 0.0),
(0.5, 0.8, 1.0),
(1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(0.5, 1.0, 0.0),
(1.0, 0.0, 0.0))
}
def clustermap(fname,matrix,thrs_row=1,thrs_col=1,row_cls=False,col_cls=True,method='average',fig_sz=(8,8),colnames=None, rownames=None,cls_info=False):
colors=sns.color_palette("Set2", 25)
colors=[mat_col.rgb2hex(color) for color in colors]
set_link_color_palette(colors)
if colnames is None:
try:
colnames=np.array(matrix.columns,str)
except AttributeError:
colnames=np.array(range(0,matrix.shape[1]),str)
if rownames is None:
try:
rownames=np.array(matrix.index,str)
except AttributeError:
rownames=np.array(range(0,matrix.shape[0]),str)
if row_cls:
D_row=scipy.spatial.distance.pdist(matrix)
if col_cls:
D_col=scipy.spatial.distance.pdist(matrix.T)
fig=plt.figure(figsize=fig_sz)
lef=0.01
bot=0.05
h_sep=0.2
v_sep=0.7
row_leg=0.01 #space for the legend of the rows plotted on the right side of the matrix
if row_cls:
if col_cls: #if want both row and column dendrogram
mat_h=v_sep-0.005-bot
mat_w=0.9-row_leg-h_sep
den_h=1-v_sep-0.005
den_w=h_sep-0.005-lef
#plot dendrogram for column clusters
ax_col=fig.add_axes([h_sep,v_sep,mat_w,den_h])
#g_col=scipy.cluster.hierarchy.linkage(D_col,method=method, metric='cosine')
g_col=scipy.cluster.hierarchy.linkage(D_col,method=method)
den_col=scipy.cluster.hierarchy.dendrogram(g_col,color_threshold=thrs_col)
idx_col = den_col['leaves']
ax_col.set_xticklabels([''])
else: #if only want row dendrogram
mat_h=1-bot*2
mat_w=0.9-0.01-h_sep
den_w=h_sep-0.005-lef
idx_col=list(range(0,matrix.shape[1]))
# plot dendrogram for row clusters
ax_row=fig.add_axes([lef,bot,den_w,mat_h])
g_row=scipy.cluster.hierarchy.linkage(D_row,method=method)
den_row=scipy.cluster.hierarchy.dendrogram(g_row,color_threshold=thrs_row,orientation='left')
idx_row = den_row['leaves']
ax_row.set_yticklabels([''])
ax_mat = fig.add_axes([h_sep,bot,mat_w,mat_h])
else:
if col_cls: #if only want column clusters
lef=lef+0.04
mat_h=v_sep-0.005-bot
mat_w=0.9-row_leg-lef
den_h=1-v_sep-0.005
else:
plt.close()
raise ValueError("At least one of row_cls and col_cls has to be Ture.")
#plot dendrogram for column clusters
ax_col=fig.add_axes([lef,v_sep,mat_w,den_h])
g_col=scipy.cluster.hierarchy.linkage(D_col,method=method)
den_col=scipy.cluster.hierarchy.dendrogram(g_col,color_threshold=thrs_col)
idx_col = den_col['leaves']
idx_row=list(range(0,matrix.shape[0]))
ax_col.set_xticklabels([''])
ax_mat = fig.add_axes([lef,bot,mat_w,mat_h])
#plot data matrix as a heatmap
matrix=np.array(matrix)
D = matrix[idx_row,:]
D = D[:,idx_col]
blue_yellow1 = LinearSegmentedColormap('BlueYellow1', cdict1)
mbg = [(0, '#00FFFF'), (0.5, '#000000' ), (1, '#ffff00')]
mag_black_grn = LinearSegmentedColormap.from_list('MgBlGr', mbg, N=256)
#x=range(-1,1)
#y=range(-1,1)
#x,y=np.meshgrid(x,y)
ax_mat.set_aspect('equal')
im = ax_mat.pcolormesh(D,cmap=mag_black_grn,vmin=-2,vmax=2)
#im = ax_mat.matshow(D, aspect='auto', origin='lower', cmap=plt.cm.YlGnBu)
ax_mat.xaxis.set_ticks_position('bottom')
ax_mat.yaxis.set_ticks_position('left')
#ax_mat.yaxis.set_ticks_position('right')
ax_mat.set_xticks(list(np.asarray(list(range(0,matrix.shape[1])))+0.5))
ax_mat.set_yticks(list(np.asarray(list(range(0,matrix.shape[0])))+0.5))
#ax_mat.set_yticks(list(range(0,matrix.shape[0])))
ax_mat.set_xticklabels(colnames[idx_col], rotation = 'vertical', fontsize=5)
ax_mat.set_yticklabels(rownames[idx_row], fontsize=5)
#ax_mat.set_yticklabels(rownames[idx_row], va='center')
ax_mat.grid(False)
#plt.subplots_adjust(bottom=0.1)
#fig,ax = plt.subplots()
#fig.subplots_adjust(top=1,bottom=0.5)
#fig.tight_layout()
# Plot colorbar.
axcolor = fig.add_axes([0.94,bot,0.02,mat_h])
plt.colorbar(im, cax=axcolor)
namepre = fname.split(".")[0]
print namepre
print colnames[idx_col]
if row_cls:
namepre = namepre + "rows_"
plt.savefig(namepre+method+"_cutoff005.png",bbox_inches='tight', dpi=600)
if cls_info:
cls_dic={}
if col_cls:
cls_dic['col_ind']=den_col['leaves']
cls_dic['col_cls']=scipy.cluster.hierarchy.fcluster(g_col,t=thrs_col,criterion='distance')
if row_cls:
cls_dic['row_ind']=den_row['leaves']
cls_dic['row_cls']=scipy.cluster.hierarchy.fcluster(g_row,t=thrs_row,criterion='distance')
return(cls_dic)
# Generate a random matrix to test the clustering algorithm.
#D = np.random.rand(40,30) ## this mock data has 40 observations and 30 samples
##D=pd.DataFrame(D,index=range(1,41)) ## If input is a pandas dataframe, the algorithm will extract the row and column names and use for plotting
for fname in glob.glob("ssmd_stimulusonly_cutoff005_FIXEDPSIG_edited.csv"):
D=pd.read_csv(fname)
#D=pd.read_csv("updatedlowavedata.csv")
###D = makepds.generateDF()
method = ['ward', 'complete', 'average']
#method = ['complete']
for t in method:
cls_info=clustermap(fname,D,row_cls=False,col_cls=True,cls_info=True,method=t,thrs_col=3)
# cls_info=clustermap(D,row_cls=True,col_cls=True,cls_info=True,method=t,thrs_col=3)
#cls_info=clustermap(D,row_cls=False,col_cls=True,cls_info=True,method='ward',thrs_col=3)
#cls_info=clustermap(D,row_cls=False,col_cls=True,cls_info=True,method='average',thrs_col=3)
#cls_info=clustermap(D,row_cls=True,col_cls=True,cls_info=True,method='average',thrs_col=3)
| sthyme/ZFSchizophrenia | BehaviorAnalysis/Alternative_Analyses/SSMDanalysis_basedonDatasetsS1andS2/mayywheatmap_ssmd.py | Python | mit | 6,646 |
# Copyright 2016 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from lcm.pub.database.models import VNFFGInstModel, FPInstModel
from lcm.pub.exceptions import NSLCMException
from lcm.pub.utils.share_lock import do_biz_with_share_lock
logger = logging.getLogger(__name__)
class SfcInstance(object):
def __init__(self, data):
self.ns_inst_id = data["nsinstid"]
self.ns_model_data = data["ns_model_data"]
self.fp_id = data["fpindex"]
self.fp_inst_id = data["fpinstid"]
self.sdnControllerId = data["sdncontrollerid"]
def do_biz(self):
self.init_data()
return self.save()
def init_data(self):
self.fp_model = self.get_fp_model_by_fp_id()
logger.info("fp_model.properties:%s, fp_id:%s" % (self.fp_model["properties"], self.fp_id))
if not self.fp_model:
return
logger.info("sfc_inst_symmetric %s" % self.fp_model["properties"].get("symmetric"))
self.symmetric = self.fp_model["properties"].get("symmetric")
logger.info("sfc_inst_symmetric %s" % self.symmetric)
self.policyinfo = self.fp_model["properties"].get("policy")
self.status = "processing"
vnffg_database_info = VNFFGInstModel.objects.filter(vnffgdid=self.get_vnffgdid_by_fp_id(),
nsinstid=self.ns_inst_id).get()
self.vnffg_inst_id = vnffg_database_info.vnffginstid
def get_fp_model_by_fp_id(self):
fps_model = self.ns_model_data["fps"]
for fp_model in fps_model:
if fp_model["fp_id"] == self.fp_id:
return fp_model
return None
def get_vnffgdid_by_fp_id(self):
vnffgs_model = self.ns_model_data["vnffgs"]
for vnffg_model in vnffgs_model:
fp_ids = vnffg_model["members"]
for fp_id in fp_ids:
if fp_id == self.fp_id:
return vnffg_model["vnffg_id"]
def save(self):
try:
logger.info("Sfc Instanciate save2db start : ")
FPInstModel(fpid=self.fp_id,
fpinstid=self.fp_inst_id,
nsinstid=self.ns_inst_id,
vnffginstid=self.vnffg_inst_id,
symmetric=1 if self.symmetric else 0,
policyinfo=self.policyinfo,
status=self.status,
sdncontrollerid=self.sdnControllerId
).save()
do_biz_with_share_lock("update-sfclist-in-vnffg-%s" % self.ns_inst_id, self.update_vnfffg_info)
logger.info("Sfc Instanciate save2db end : ")
except:
logger.error('SFC instantiation failed')
raise NSLCMException('SFC instantiation failed.')
return {
"fpinstid": self.fp_inst_id
}
def update_vnfffg_info(self):
vnffg_database_info = VNFFGInstModel.objects.filter(vnffginstid=self.vnffg_inst_id).get()
fp_inst_list = vnffg_database_info.fplist
fp_inst_list = fp_inst_list + ',' + self.fp_inst_id if fp_inst_list else self.fp_inst_id
VNFFGInstModel.objects.filter(vnffginstid=self.vnffg_inst_id).update(fplist=fp_inst_list)
| open-o/nfvo | lcm/lcm/ns/sfcs/sfc_instance.py | Python | apache-2.0 | 3,786 |
import datetime
import imghdr
import posixpath
import tempfile
from urllib.parse import urlparse
import requests
from django.apps import apps
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.files.images import ImageFile
from django.urls import reverse
from django.utils import timezone
from django.utils.translation import gettext as _
from rest_framework import serializers
from adhocracy4.dashboard import components
from adhocracy4.dashboard import signals as a4dashboard_signals
from adhocracy4.images.validators import validate_image
from adhocracy4.modules import models as module_models
from adhocracy4.phases import models as phase_models
from adhocracy4.projects import models as project_models
from .models import Bplan
from .phases import StatementPhase
BPLAN_EMBED = '<iframe height="500" style="width: 100%; min-height: 300px; ' \
'max-height: 100vh" src="{}" frameborder="0"></iframe>'
DOWNLOAD_IMAGE_SIZE_LIMIT_BYTES = 10 * 1024 * 1024
class BplanSerializer(serializers.ModelSerializer):
id = serializers.IntegerField(required=False)
# make write_only for consistency reasons
start_date = serializers.DateTimeField(write_only=True)
end_date = serializers.DateTimeField(write_only=True)
image_url = serializers.URLField(
required=False,
write_only=True,
max_length=(project_models.Project._meta.
get_field('tile_image').max_length))
image_copyright = serializers.CharField(
required=False,
write_only=True,
source='tile_image_copyright',
allow_blank=True,
max_length=(project_models.Project._meta.
get_field('tile_image_copyright').max_length))
embed_code = serializers.SerializerMethodField()
class Meta:
model = Bplan
fields = (
'id', 'name', 'identifier', 'description', 'url',
'office_worker_email', 'is_draft', 'start_date', 'end_date',
'image_url', 'image_copyright', 'embed_code'
)
extra_kwargs = {
# write_only for consistency reasons
'is_draft': {'default': False, 'write_only': True},
'name': {'write_only': True},
'description': {'write_only': True},
'url': {'write_only': True},
'office_worker_email': {'write_only': True},
'identifier': {'write_only': True}
}
def create(self, validated_data):
orga_pk = self._context.get('organisation_pk', None)
orga_model = apps.get_model(settings.A4_ORGANISATIONS_MODEL)
orga = orga_model.objects.get(pk=orga_pk)
validated_data['organisation'] = orga
start_date = validated_data['start_date']
end_date = validated_data['end_date']
image_url = validated_data.pop('image_url', None)
if image_url:
validated_data['tile_image'] = \
self._download_image_from_url(image_url)
bplan = super().create(validated_data)
self._create_module_and_phase(bplan, start_date, end_date)
self._send_project_created_signal(bplan)
return bplan
def _create_module_and_phase(self, bplan, start_date, end_date):
module = module_models.Module.objects.create(
name=bplan.slug + '_module',
weight=1,
project=bplan,
)
phase_content = StatementPhase()
phase_models.Phase.objects.create(
name=_('Bplan statement phase'),
description=_('Bplan statement phase'),
type=phase_content.identifier,
module=module,
start_date=start_date,
end_date=end_date
)
def update(self, instance, validated_data):
start_date = validated_data.get('start_date', None)
end_date = validated_data.get('end_date', None)
if start_date or end_date:
self._update_phase(instance, start_date, end_date)
if end_date and end_date > timezone.localtime(timezone.now()):
instance.is_archived = False
image_url = validated_data.pop('image_url', None)
if image_url:
validated_data['tile_image'] = \
self._download_image_from_url(image_url)
instance = super().update(instance, validated_data)
self._send_component_updated_signal(instance)
return instance
def _update_phase(self, bplan, start_date, end_date):
module = module_models.Module.objects.get(project=bplan)
phase = phase_models.Phase.objects.get(module=module)
if start_date:
phase.start_date = start_date
if end_date:
phase.end_date = end_date
phase.save()
def get_embed_code(self, bplan):
url = self._get_absolute_url(bplan)
embed = BPLAN_EMBED.format(url)
return embed
def _get_absolute_url(self, bplan):
site_url = Site.objects.get_current().domain
embed_url = reverse('embed-project', kwargs={'slug': bplan.slug, })
url = 'https://{}{}'.format(site_url, embed_url)
return url
def _download_image_from_url(self, url):
parsed_url = urlparse(url)
file_name = None
try:
r = requests.get(url, stream=True, timeout=10)
downloaded_bytes = 0
with tempfile.TemporaryFile() as f:
for chunk in r.iter_content(chunk_size=1024):
downloaded_bytes += len(chunk)
if downloaded_bytes > DOWNLOAD_IMAGE_SIZE_LIMIT_BYTES:
raise serializers.ValidationError(
'Image too large to download {}'.format(url))
if chunk:
f.write(chunk)
file_name = self._generate_image_filename(parsed_url.path, f)
self._image_storage.save(file_name, f)
except Exception:
if file_name:
self._image_storage.delete(file_name)
raise serializers.ValidationError(
'Failed to download image {}'.format(url))
try:
self._validate_image(file_name)
except ValidationError as e:
self._image_storage.delete(file_name)
raise serializers.ValidationError(e)
return file_name
def _validate_image(self, file_name):
image_file = self._image_storage.open(file_name, 'rb')
image = ImageFile(image_file, file_name)
config = settings.IMAGE_ALIASES.get('*', {})
config.update(settings.IMAGE_ALIASES['tileimage'])
validate_image(image, **config)
@property
def _image_storage(self):
return project_models.Project._meta.get_field('tile_image').storage
@property
def _image_upload_to(self):
return project_models.Project._meta.get_field('tile_image').upload_to
def _generate_image_filename(self, url_path, file):
if callable(self._image_upload_to):
raise Exception('Callable upload_to fields are not supported')
root_path, extension = posixpath.splitext(url_path)
if file:
# Workaround: imghdr expects the files position on 0
file.seek(0)
extension = imghdr.what(file) or 'jpeg'
basename = posixpath.basename(root_path)
if not basename:
basename = 'bplan'
dirname = datetime.datetime.now().strftime(self._image_upload_to)
filename = posixpath.join(dirname, basename + '.' + extension)
return self._image_storage.get_available_name(filename)
def _send_project_created_signal(self, bplan):
a4dashboard_signals.project_created.send(
sender=self.__class__,
project=bplan,
user=self.context['request'].user
)
def _send_component_updated_signal(self, bplan):
component = components.projects['bplan']
a4dashboard_signals.project_component_updated.send(
sender=self.__class__,
project=bplan,
component=component,
user=self.context['request'].user
)
| liqd/a4-meinberlin | meinberlin/apps/bplan/serializers.py | Python | agpl-3.0 | 8,225 |
#!/usr/bin/python
# Copyright (C) 2014 Reece H. Dunn
#
# This file is part of libclangpy.
#
# libclangpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# libclangpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with libclangpy. If not, see <http://www.gnu.org/licenses/>.
import sys
import traceback
import libclang
class UnsupportedException(Exception):
pass
class ParseError(UnsupportedException):
pass
if sys.version_info.major >= 3:
long = int
def equals(a, b):
ta, tb = type(a), type(b)
if ta.__name__ != tb.__name__:
raise AssertionError('Type mismatch: `{0}` != `{1}`'.format(ta.__name__, tb.__name__))
if a != b:
raise AssertionError('Value mismatch: `{0}` != `{1}`'.format(str(a), str(b)))
def oneof(a, items):
for item in items:
if a != item:
continue
ta, tb = type(a), type(item)
if ta.__name__ != tb.__name__:
raise AssertionError('Type mismatch: `{0}` != `{1}`'.format(ta.__name__, tb.__name__))
return
itemstr = ', '.join(['`{0}`'.format(str(item)) for item in items])
raise AssertionError('Value mismatch: `{0}` not in [{1}]'.format(str(a), itemstr))
_passed = 0
_skipped = 0
_failed = 0
def run(version, test):
global _passed
global _skipped
global _failed
sys.stdout.write('Running {0} ... '.format(test.__name__))
try:
test()
print('passed')
_passed = _passed + 1
except libclang.MissingFunction:
if libclang.version < version:
print('skipping ... missing APIs')
_skipped = _skipped + 1
else:
print('failed ... incorrect API binding')
_failed = _failed + 1
except UnsupportedException as e:
if libclang.version < version:
print('skipping ... {0}'.format(e))
_skipped = _skipped + 1
else:
print('failed ... {0}'.format(e))
_failed = _failed + 1
except Exception as e:
print('failed')
print(traceback.format_exc())
_failed = _failed + 1
def summary():
print('-'*60)
print(' {0} passed, {1} skipped, {2} failed'.format(_passed, _skipped, _failed))
print('')
def parse_str(contents, filename='parse_str.cpp', args=None, ignore_errors=False):
index = libclang.Index()
tu = index.parse(filename, unsaved_files=[(filename, contents)], args=args)
if not ignore_errors:
diagnostics = list(tu.diagnostics)
for diagnostic in diagnostics:
if diagnostic.severity in [libclang.DiagnosticSeverity.ERROR, libclang.DiagnosticSeverity.FATAL]:
raise ParseError(diagnostic.spelling)
return [child for child in tu.cursor().children if child.location.file]
def match_location(loc, filename, line, column, offset):
if isinstance(loc.file, libclang.File):
equals(loc.file.name, filename)
else:
equals(loc.file, filename)
equals(loc.line, line)
equals(loc.column, column)
equals(loc.offset, offset)
def match_tokens(a, b):
tokens = [str(t) for t in a]
equals(tokens, b)
def match_type(a, b, cursor):
equals(isinstance(a, libclang.Type), True)
if a.kind == libclang.TypeKind.UNEXPOSED and not b == libclang.TypeKind.UNEXPOSED:
raise UnsupportedException('type is not supported')
equals(a.kind, b)
equals(a.cursor, cursor)
def match_cursor(a, b):
equals(isinstance(a, libclang.Cursor), True)
if a.kind in [libclang.CursorKind.UNEXPOSED_DECL,
libclang.CursorKind.UNEXPOSED_EXPR,
libclang.CursorKind.UNEXPOSED_STMT,
libclang.CursorKind.UNEXPOSED_ATTR]:
raise UnsupportedException('cursor is not supported')
equals(a.kind, b)
def test_version():
oneof(libclang.version, [2.7, 2.8, 2.9, 3.0, 3.1, 3.2, 3.3, 3.4, 3.5])
def test_File(f, filename):
equals(f.name, filename)
equals(f.time > 0, True)
equals(str(f), filename)
equals(f == f, True)
equals(f != f, False)
equals(f == filename, True)
equals(f != filename, False)
def test_SourceLocation():
loc = libclang.SourceLocation.null()
match_location(loc, None, 0, 0, 0)
match_location(loc.instantiation_location, None, 0, 0, 0)
equals(loc == libclang.SourceLocation.null(), True)
equals(loc != libclang.SourceLocation.null(), False)
equals(loc.is_null, True)
def test_SourceLocation29():
loc = libclang.SourceLocation.null()
match_location(loc.spelling_location, None, 0, 0, 0)
def test_SourceLocation30():
loc = libclang.SourceLocation.null()
match_location(loc.presumed_location, '', 0, 0, 0)
def test_SourceLocation31():
loc = libclang.SourceLocation.null()
match_location(loc.expansion_location, None, 0, 0, 0)
def test_SourceLocation33():
loc = libclang.SourceLocation.null()
equals(loc.is_in_system_header, False)
match_location(loc.file_location, None, 0, 0, 0)
def test_SourceLocation34():
loc = libclang.SourceLocation.null()
equals(loc.is_from_main_file, False)
def test_SourceRange():
rng1 = libclang.SourceRange.null()
equals(rng1.start, libclang.SourceLocation.null())
equals(rng1.end, libclang.SourceLocation.null())
rng2 = libclang.SourceRange(libclang.SourceLocation.null(),
libclang.SourceLocation.null())
equals(rng2.start, libclang.SourceLocation.null())
equals(rng2.end, libclang.SourceLocation.null())
equals(rng1 == rng2, True)
equals(rng1 != rng2, False)
equals(rng1.is_null, True)
def test_DiagnosticDisplayOptions():
a = libclang.DiagnosticDisplayOptions.COLUMN
b = libclang.DiagnosticDisplayOptions.SOURCE_RANGES
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 2)
equals((a | b).value, 6)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'DiagnosticDisplayOptions(2)')
def test_DiagnosticSeverity():
a = libclang.DiagnosticSeverity.NOTE
b = libclang.DiagnosticSeverity.ERROR
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 1)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'DiagnosticSeverity(1)')
def test_DiagnosticCategory29():
a = libclang.DiagnosticCategory(1)
b = libclang.DiagnosticCategory(2)
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 1)
oneof(a.name, [
'Parse Issue', # 2.9 or earlier
'Lexical or Preprocessor Issue']) # 3.0 or later
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'DiagnosticCategory(1)')
def test_Linkage():
a = libclang.Linkage.NO_LINKAGE
b = libclang.Linkage.INTERNAL
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 1)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'Linkage(1)')
def test_TokenKind():
a = libclang.TokenKind.KEYWORD
b = libclang.TokenKind.LITERAL
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 1)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'TokenKind(1)')
def test_CursorKind():
equals(libclang.CursorKind.CLASS_DECL == libclang.CursorKind.CLASS_DECL, True)
equals(libclang.CursorKind.CLASS_DECL == libclang.CursorKind.UNION_DECL, False)
equals(libclang.CursorKind.CLASS_DECL != libclang.CursorKind.CLASS_DECL, False)
equals(libclang.CursorKind.CLASS_DECL != libclang.CursorKind.UNION_DECL, True)
kind = libclang.CursorKind.STRUCT_DECL
equals(kind.spelling, 'StructDecl')
equals(str(kind), 'StructDecl')
equals(kind.is_declaration, True)
equals(kind.is_reference, False)
equals(kind.is_expression, False)
equals(kind.is_statement, False)
equals(kind.is_invalid, False)
equals(kind.is_translation_unit, False)
a = libclang.CursorKind.VAR_DECL
b = libclang.CursorKind.FIELD_DECL
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'CursorKind(9|VarDecl)')
def test_CursorKind28():
kind = libclang.CursorKind.STRUCT_DECL
equals(kind.is_preprocessing, False)
equals(kind.is_unexposed, False)
def test_CursorKind30():
kind = libclang.CursorKind.STRUCT_DECL
equals(kind.is_attribute, False)
def test_TypeKind28():
equals(libclang.TypeKind.VOID == libclang.TypeKind.VOID, True)
equals(libclang.TypeKind.VOID == libclang.TypeKind.UINT, False)
equals(libclang.TypeKind.VOID != libclang.TypeKind.VOID, False)
equals(libclang.TypeKind.VOID != libclang.TypeKind.UINT, True)
kind = libclang.TypeKind.FLOAT
equals(kind.spelling, 'Float')
equals(str(kind), 'Float')
a = libclang.TypeKind.LONG
b = libclang.TypeKind.SHORT
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'TypeKind(18|Long)')
def test_AvailabilityKind28():
a = libclang.AvailabilityKind.DEPRECATED
b = libclang.AvailabilityKind.NOT_AVAILABLE
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 1)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'AvailabilityKind(1)')
def test_LanguageKind28():
a = libclang.LanguageKind.C
b = libclang.LanguageKind.OBJC
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 1)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'LanguageKind(1)')
def test_AccessSpecifier28():
a = libclang.AccessSpecifier.PUBLIC
b = libclang.AccessSpecifier.PRIVATE
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 1)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'AccessSpecifier(1)')
def test_NameRefFlags30():
a = libclang.NameRefFlags.WANT_QUALIFIER
b = libclang.NameRefFlags.WANT_TEMPLATE_ARGS
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 1)
equals((a | b).value, 3)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'NameRefFlags(1)')
def test_TranslationUnitFlags28():
a = libclang.TranslationUnitFlags.INCOMPLETE
b = libclang.TranslationUnitFlags.CACHE_COMPLETION_RESULTS
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 2)
equals((a | b).value, 10)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'TranslationUnitFlags(2)')
def test_SaveTranslationUnitFlags28():
a = libclang.SaveTranslationUnitFlags(2)
b = libclang.SaveTranslationUnitFlags(8)
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 2)
equals((a | b).value, 10)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'SaveTranslationUnitFlags(2)')
def test_ReparseTranslationUnitFlags28():
a = libclang.ReparseTranslationUnitFlags(2)
b = libclang.ReparseTranslationUnitFlags(8)
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 2)
equals((a | b).value, 10)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'ReparseTranslationUnitFlags(2)')
def test_GlobalOptionFlags31():
a = libclang.GlobalOptionFlags(2)
b = libclang.GlobalOptionFlags(8)
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 2)
equals((a | b).value, 10)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'GlobalOptionFlags(2)')
def test_CallingConvention31():
a = libclang.CallingConvention.X86_STDCALL
b = libclang.CallingConvention.C
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 2)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'CallingConvention(2)')
def test_ObjCPropertyAttributes33():
a = libclang.ObjCPropertyAttributes(2)
b = libclang.ObjCPropertyAttributes(8)
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 2)
equals((a | b).value, 10)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'ObjCPropertyAttributes(2)')
def test_ObjCDeclQualifierKind33():
a = libclang.ObjCDeclQualifierKind(2)
b = libclang.ObjCDeclQualifierKind(8)
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 2)
equals((a | b).value, 10)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'ObjCDeclQualifierKind(2)')
def test_RefQualifierKind34():
a = libclang.RefQualifierKind.LVALUE
b = libclang.RefQualifierKind.RVALUE
equals(a == a, True)
equals(a == b, False)
equals(a != a, False)
equals(a != b, True)
equals(a.value, 1)
equals(hash(a) == hash(a), True)
equals(hash(a) == hash(b), False)
equals(repr(a), 'RefQualifierKind(1)')
def test_Index():
index = libclang.Index()
filename = 'tests/enumeration.hpp'
# no args
tu = index.parse(filename)
equals(tu.spelling, filename)
equals(len(list(tu.diagnostics)), 0)
# no args -- as keyword argument
tu = index.parse(filename=filename)
equals(tu.spelling, filename)
equals(len(list(tu.diagnostics)), 0)
# file as arg
tu = index.parse(args=[filename])
equals(tu.spelling, filename)
equals(len(list(tu.diagnostics)), 0)
# args
tu = index.parse(filename, args=['-std=c++98'])
equals(tu.spelling, filename)
equals(len(list(tu.diagnostics)), 0)
# unsaved files
tu = index.parse('unsaved.hpp', unsaved_files=[('unsaved.hpp', 'struct test {};')])
equals(tu.spelling, 'unsaved.hpp')
equals(len(list(tu.diagnostics)), 0)
# unsaved files
tu = index.parse('unsaved.cpp', unsaved_files=[('unsaved.cpp', 'struct test {};')])
equals(tu.spelling, 'unsaved.cpp')
equals(len(list(tu.diagnostics)), 0)
def test_Index31():
index = libclang.Index()
equals(index.global_options, libclang.GlobalOptionFlags.NONE)
index.global_options = libclang.GlobalOptionFlags.THREAD_BACKGROUND_PRIORITY_FOR_INDEXING
equals(index.global_options, libclang.GlobalOptionFlags.THREAD_BACKGROUND_PRIORITY_FOR_INDEXING)
def test_TranslationUnit():
index = libclang.Index()
filename = 'tests/enumeration.hpp'
tu = index.parse(filename)
equals(tu.spelling, filename)
equals(str(tu), filename)
test_File(tu.file(filename), filename)
match_location(tu.location(tu.file(filename), 3, 2), filename, 3, 2, 13)
match_location(tu.location(tu.file(filename), line=3, column=2), filename, 3, 2, 13)
match_location(tu.location(filename, 3, 2), filename, 3, 2, 13)
match_location(tu.location(filename, line=3, column=2), filename, 3, 2, 13)
match_location(tu.location(tu.spelling, 3, 2), filename, 3, 2, 13)
match_location(tu.location(tu.spelling, line=3, column=2), filename, 3, 2, 13)
equals(list(tu.diagnostics), [])
def test_TranslationUnit29():
index = libclang.Index()
filename = 'tests/enumeration.hpp'
tu = index.parse(filename)
match_location(tu.location(tu.file(filename), offset=13), filename, 3, 2, 13)
def test_TranslationUnit30():
index = libclang.Index()
filename = 'tests/enumeration.hpp'
tu = index.parse(filename)
equals(tu.is_multiple_include_guarded(tu.file(filename)), False)
def test_Diagnostic():
index = libclang.Index()
tu = index.parse('tests/error.hpp')
diagnostics = list(tu.diagnostics)
equals(len(diagnostics), 1)
d = diagnostics[0]
equals(d.spelling, 'expected \';\' after struct')
equals(str(d), 'expected \';\' after struct')
equals(d.format(),
'tests/error.hpp:3:2: error: expected \';\' after struct')
equals(d.format(libclang.DiagnosticDisplayOptions.SOURCE_LOCATION),
'tests/error.hpp:3: error: expected \';\' after struct')
equals(d.severity, libclang.DiagnosticSeverity.ERROR)
match_location(d.location, 'tests/error.hpp', 3, 2, 16)
# ranges
r = list(d.ranges)
equals(len(r), 0)
# fixits
f = list(d.fixits)
equals(len(f), 1)
match_location(f[0].extent.start, 'tests/error.hpp', 3, 2, 16)
match_location(f[0].extent.end, 'tests/error.hpp', 3, 2, 16)
equals(f[0].spelling, ';')
def test_Diagnostic29():
index = libclang.Index()
tu = index.parse('tests/error.hpp')
diagnostics = list(tu.diagnostics)
equals(len(diagnostics), 1)
d = diagnostics[0]
equals(d.option, '')
equals(d.disable_option, '')
equals(d.category.name, 'Parse Issue')
equals(d.category_text, 'Parse Issue')
def test_Cursor():
c = parse_str('enum test { a, b };', filename='tests/enumeration.hpp')[0]
equals(c == c, True)
equals(c == libclang.Cursor.null(), False)
equals(c != c, False)
equals(c != libclang.Cursor.null(), True)
equals(c.is_null, False)
equals(hash(c), hash(c))
equals(c.spelling, 'test')
equals(str(c), 'test')
equals(c.kind, libclang.CursorKind.ENUM_DECL)
equals(c.parent.kind, libclang.CursorKind.TRANSLATION_UNIT)
equals(c.linkage, libclang.Linkage.EXTERNAL)
match_location(c.location, 'tests/enumeration.hpp', 1, 6, 5)
match_location(c.extent.start, 'tests/enumeration.hpp', 1, 1, 0)
match_location(c.extent.end, 'tests/enumeration.hpp', 1, 1, 0)
equals(c.usr, 'c:@E@test')
equals(c.referenced, c)
equals(c.definition, c)
equals(c.is_definition, True)
equals(c.translation_unit.spelling, 'tests/enumeration.hpp')
# children
children = [child for child in c.children if child.location.file]
equals(len(children), 2)
equals(children[0].kind, libclang.CursorKind.ENUM_CONSTANT_DECL)
equals(children[0].parent, c)
# tokens
c = parse_str('enum test { x, y = 3, z };')[0]
x, y, z = c.children
match_tokens(c.tokens, ['enum', 'test', '{', 'x', ',', 'y', '=', '3', ',', 'z', '}', ';'])
match_tokens(x.tokens, ['x', ','])
match_tokens(y.tokens, ['y', '=', '3', ','])
match_tokens(z.tokens, ['z', '}'])
# tokens
c = parse_str('extern "C" void f(int x, int y);')[0]
f = c.children[0]
x, y = f.children
if libclang.version <= 2.8:
match_tokens(c.tokens, ['"C"', 'void'])
match_tokens(f.tokens, ['f', '(', 'int', 'x', ',', 'int', 'y', ')', ';'])
elif libclang.version == 2.9:
match_tokens(c.tokens, ['"C"', 'void', 'f', '(', 'int', 'x', ',', 'int', 'y', ')', ';'])
match_tokens(f.tokens, ['f', '(', 'int', 'x', ',', 'int', 'y', ')', ';'])
else:
match_tokens(c.tokens, ['extern', '"C"', 'void', 'f', '(', 'int', 'x', ',', 'int', 'y', ')', ';'])
match_tokens(f.tokens, ['void', 'f', '(', 'int', 'x', ',', 'int', 'y', ')', ';'])
match_tokens(x.tokens, ['int', 'x', ','])
match_tokens(y.tokens, ['int', 'y', ')'])
def test_Cursor28():
c = parse_str('enum test {};')[0]
match_type(c.type, libclang.TypeKind.ENUM, c)
match_type(c.result_type, libclang.TypeKind.INVALID, c)
match_type(c.ib_outlet_collection_type, libclang.TypeKind.INVALID, c)
equals(c.availability, libclang.AvailabilityKind.AVAILABLE)
equals(c.language, libclang.LanguageKind.C)
equals(c.template_kind, libclang.CursorKind.NO_DECL_FOUND)
equals(c.specialized_template.kind, libclang.CursorKind.INVALID_FILE)
equals(c.is_virtual_base, False)
def test_Cursor29():
c = parse_str('enum test { a };')[0]
a = c.children[0]
equals(a.semantic_parent, c)
equals(a.lexical_parent, c)
equals(c.included_file.name, None)
equals(c.objc_type_encoding, '?')
equals(len(list(c.overloads)), 0)
equals(c.display_name, 'test')
equals(c.canonical, c)
equals(len(c.overridden), 0)
def test_Cursor30():
c = parse_str('enum test {};', filename='cursor30.hpp')[0]
equals(c.is_virtual, False)
rng = c.reference_name_range(libclang.NameRefFlags.WANT_TEMPLATE_ARGS, 0)
match_location(rng.start, 'cursor30.hpp', 1, 1, 0)
match_location(rng.end, 'cursor30.hpp', 1, 1, 0)
def test_Cursor31():
c = parse_str('enum test { a = 7 };', filename='cursor31.hpp')[0]
equals(len(list(c.arguments)), 0)
equals(c.objc_selector_index, -1)
rng = c.spelling_name_range(libclang.NameRefFlags.WANT_TEMPLATE_ARGS, 0)
match_location(rng.start, 'cursor31.hpp', 1, 6, 5)
match_location(rng.end, 'cursor31.hpp', 1, 6, 5)
def test_Cursor32():
c = parse_str('enum test {};', filename='cursor32.hpp')[0]
equals(c.is_dynamic_call, False)
match_type(c.receiver_type, libclang.TypeKind.INVALID, c)
match_location(c.comment_range.start, None, 0, 0, 0)
match_location(c.comment_range.end, None, 0, 0, 0)
equals(c.raw_comment, None)
equals(c.brief_comment, None)
def test_Cursor33():
c = parse_str('enum test {};', filename='cursor33.hpp')[0]
equals(c.is_bit_field, False)
equals(c.bit_field_width, -1)
equals(c.is_variadic, False)
equals(c.objc_property_attributes, libclang.ObjCPropertyAttributes.NO_ATTR)
equals(c.objc_decl_qualifiers, libclang.ObjCDeclQualifierKind.NONE)
def test_Cursor34():
c = parse_str('enum test {};', filename='cursor34.hpp')[0]
equals(c.is_objc_optional, False)
def test_StructDecl27():
x = parse_str('struct x { int a; };')[0]
# x
match_cursor(x, libclang.CursorKind.STRUCT_DECL)
match_type(x.type, libclang.TypeKind.RECORD, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_UnionDecl27():
x = parse_str('union x { int a; };')[0]
# x
match_cursor(x, libclang.CursorKind.UNION_DECL)
match_type(x.type, libclang.TypeKind.RECORD, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ClassDecl27():
x = parse_str('class x { int a; };')[0]
# x
match_cursor(x, libclang.CursorKind.CLASS_DECL)
match_type(x.type, libclang.TypeKind.RECORD, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_EnumDecl27():
x = parse_str('enum x { a = 7 };')[0]
# x
match_cursor(x, libclang.CursorKind.ENUM_DECL)
equals(isinstance(x, libclang.EnumDecl), True)
equals(x.is_enum_class, False)
match_type(x.type, libclang.TypeKind.ENUM, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_EnumDecl29():
x, y = parse_str("""
enum class x { b };
enum class y : unsigned char { c };""", args=['-std=c++11'])
# x
match_cursor(x, libclang.CursorKind.ENUM_DECL)
equals(isinstance(x, libclang.EnumDecl), True)
equals(x.is_enum_class, True)
match_type(x.type, libclang.TypeKind.ENUM, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
# y
match_cursor(y, libclang.CursorKind.ENUM_DECL)
equals(isinstance(y, libclang.EnumDecl), True)
equals(y.is_enum_class, True)
match_type(y.type, libclang.TypeKind.ENUM, y)
equals(y.access_specifier, libclang.AccessSpecifier.INVALID)
def test_EnumDecl31():
x, y, z = parse_str("""
enum x { a = 7 };
enum class y { b };
enum class z : unsigned char { c };""", args=['-std=c++11'])
match_type(x.enum_type, libclang.TypeKind.UINT, x)
match_type(y.enum_type, libclang.TypeKind.INT, y)
match_type(z.enum_type, libclang.TypeKind.UCHAR, z)
def test_FieldDecl27():
x = parse_str('struct x { int a; };')[0]
a = x.children[0]
# a
match_cursor(a, libclang.CursorKind.FIELD_DECL)
match_type(a.type, libclang.TypeKind.INT, a)
equals(a.access_specifier, libclang.AccessSpecifier.PUBLIC)
def test_EnumConstantDecl27():
x = parse_str('enum x { a = 7 };')[0]
a = x.children[0]
# a
match_cursor(a, libclang.CursorKind.ENUM_CONSTANT_DECL)
equals(isinstance(a, libclang.EnumConstantDecl), True)
match_type(a.type, libclang.TypeKind.ENUM, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_EnumConstantDecl29():
x, y = parse_str("""
enum class x : short { b = 2 };
enum class y : unsigned char { c = 158 };""", args=['-std=c++11'])
a = x.children[0]
b = y.children[0]
# a
match_cursor(a, libclang.CursorKind.ENUM_CONSTANT_DECL)
equals(isinstance(a, libclang.EnumConstantDecl), True)
match_type(a.type, libclang.TypeKind.ENUM, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
# b
match_cursor(b, libclang.CursorKind.ENUM_CONSTANT_DECL)
equals(isinstance(b, libclang.EnumConstantDecl), True)
match_type(b.type, libclang.TypeKind.ENUM, b)
equals(b.access_specifier, libclang.AccessSpecifier.INVALID)
def test_EnumConstantDecl31():
x, y, z = parse_str("""
enum x { a = 7 };
enum class y : short { b = 2 };
enum class z : unsigned char { c = 158 };""", args=['-std=c++11'])
equals(x.children[0].enum_value, long(7))
equals(y.children[0].enum_value, 2)
equals(z.children[0].enum_value, long(158))
def test_FunctionDecl27():
f = parse_str('void f(int x);')[0]
# f
match_cursor(f, libclang.CursorKind.FUNCTION_DECL)
match_type(f.type, libclang.TypeKind.FUNCTION_PROTO, f)
equals(f.access_specifier, libclang.AccessSpecifier.INVALID)
def test_VarDecl27():
x = parse_str('int x;')[0]
# x
match_cursor(x, libclang.CursorKind.VAR_DECL)
match_type(x.type, libclang.TypeKind.INT, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ParmDecl27():
f = parse_str('void f(int x);')[0]
x = f.children[0]
# x
match_cursor(x, libclang.CursorKind.PARM_DECL)
match_type(x.type, libclang.TypeKind.INT, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCInterfaceDecl27():
x = parse_str('@interface x @end', args=['-ObjC'])[0]
# x
match_cursor(x, libclang.CursorKind.OBJC_INTERFACE_DECL)
match_type(x.type, libclang.TypeKind.OBJC_INTERFACE, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCCategoryDecl27():
x, c = parse_str("""
@interface x @end
@interface x (c) @end""", args=['-ObjC'])
# c
match_cursor(c, libclang.CursorKind.OBJC_CATEGORY_DECL)
match_type(c.type, libclang.TypeKind.INVALID, c)
equals(c.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCProtocolDecl27():
x = parse_str('@protocol x @end', args=['-ObjC'])[0]
# x
match_cursor(x, libclang.CursorKind.OBJC_PROTOCOL_DECL)
match_type(x.type, libclang.TypeKind.INVALID, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCPropertyDecl27():
x = parse_str('@interface x @property int a; @end', args=['-ObjC'])[0]
a = x.children[0]
# a
match_cursor(a, libclang.CursorKind.OBJC_PROPERTY_DECL)
match_type(a.type, libclang.TypeKind.INT, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCIvarDecl27():
x = parse_str('@interface x { int a; } @end', args=['-ObjC'])[0]
a = x.children[0]
# a
match_cursor(a, libclang.CursorKind.OBJC_IVAR_DECL)
match_type(a.type, libclang.TypeKind.INT, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCInstanceMethodDecl27():
x = parse_str('@interface x -(int)a; @end', args=['-ObjC'])[0]
a = x.children[0]
# a
match_cursor(a, libclang.CursorKind.OBJC_INSTANCE_METHOD_DECL)
match_type(a.type, libclang.TypeKind.INVALID, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCClassMethodDecl27():
x = parse_str('@interface x +(int)a; @end', args=['-ObjC'])[0]
a = x.children[0]
# a
match_cursor(a, libclang.CursorKind.OBJC_CLASS_METHOD_DECL)
match_type(a.type, libclang.TypeKind.INVALID, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCImplementationDecl27():
i, x = parse_str("""
@interface x @end
@implementation x @end""", args=['-ObjC', '-Wno-objc-root-class'])
# x
match_cursor(x, libclang.CursorKind.OBJC_IMPLEMENTATION_DECL)
match_type(x.type, libclang.TypeKind.INVALID, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCCategoryImplDecl27():
i, x = parse_str("""
@interface x @end
@implementation x (c) @end""", args=['-ObjC', '-Wno-objc-root-class'])
# x
match_cursor(x, libclang.CursorKind.OBJC_CATEGORY_IMPL_DECL)
match_type(x.type, libclang.TypeKind.INVALID, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_TypedefDecl27():
x = parse_str('typedef float x;')[0]
match_cursor(x, libclang.CursorKind.TYPEDEF_DECL)
match_type(x.type, libclang.TypeKind.TYPEDEF, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_TypedefDecl31():
x = parse_str('typedef float x;')[0]
match_type(x.type, libclang.TypeKind.TYPEDEF, x)
match_type(x.underlying_type, libclang.TypeKind.FLOAT, x)
def test_CxxMethodDecl28():
x = parse_str("""
struct x {
void f(int x);
static int g();
virtual void h() {};
virtual void i() = 0;
};""")[0]
f, g, h, i = x.children
# f
match_cursor(f, libclang.CursorKind.CXX_METHOD_DECL)
equals(isinstance(f, libclang.CxxMethodDecl), True)
equals(f.is_static, False)
match_type(f.type, libclang.TypeKind.FUNCTION_PROTO, f)
# g
match_cursor(g, libclang.CursorKind.CXX_METHOD_DECL)
equals(isinstance(g, libclang.CxxMethodDecl), True)
equals(g.is_static, True)
match_type(g.type, libclang.TypeKind.FUNCTION_PROTO, g)
# h
match_cursor(h, libclang.CursorKind.CXX_METHOD_DECL)
equals(isinstance(h, libclang.CxxMethodDecl), True)
equals(h.is_static, False)
match_type(h.type, libclang.TypeKind.FUNCTION_PROTO, h)
# i
match_cursor(i, libclang.CursorKind.CXX_METHOD_DECL)
equals(isinstance(i, libclang.CxxMethodDecl), True)
equals(i.is_static, False)
match_type(i.type, libclang.TypeKind.FUNCTION_PROTO, i)
# access_specifier
equals(f.access_specifier, libclang.AccessSpecifier.PUBLIC)
equals(g.access_specifier, libclang.AccessSpecifier.PUBLIC)
equals(h.access_specifier, libclang.AccessSpecifier.PUBLIC)
equals(i.access_specifier, libclang.AccessSpecifier.PUBLIC)
def test_CxxMethodDecl30():
x = parse_str("""
struct x {
void f(int x);
static int g();
virtual void h() {};
virtual void i() = 0;
};""")[0]
f, g, h, i = x.children
# is_virtual
equals(f.is_virtual, False)
equals(g.is_virtual, False)
equals(h.is_virtual, True)
equals(i.is_virtual, True)
def test_CxxMethodDecl34():
x = parse_str("""
struct x {
void f(int x);
static int g();
virtual void h() {};
virtual void i() = 0;
};""")[0]
f, g, h, i = x.children
# is_pure_virtual
equals(f.is_pure_virtual, False)
equals(g.is_pure_virtual, False)
equals(h.is_pure_virtual, False)
equals(i.is_pure_virtual, True)
def test_CxxMethodDecl35():
x = parse_str("""
struct x {
void f(int x);
void g(int x) const;
void h(int x) const &;
void i(int x) const &&;
};""", args=['-std=c++11'])[0]
f, g, h, i = x.children
# is_const
equals(f.is_const, False)
equals(g.is_const, True)
equals(h.is_const, True)
equals(i.is_const, True)
def test_Namespace28():
x = parse_str('namespace x {}')[0]
# x
match_cursor(x, libclang.CursorKind.NAMESPACE)
match_type(x.type, libclang.TypeKind.INVALID, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_Constructor28():
x = parse_str('struct x { x(); };')[0]
f = x.children[0]
# f
match_cursor(f, libclang.CursorKind.CONSTRUCTOR)
match_type(f.type, libclang.TypeKind.FUNCTION_PROTO, f)
equals(f.access_specifier, libclang.AccessSpecifier.PUBLIC)
def test_Destructor28():
x = parse_str('struct x { ~x(); };')[0]
f = x.children[0]
# f
match_cursor(f, libclang.CursorKind.DESTRUCTOR)
match_type(f.type, libclang.TypeKind.FUNCTION_PROTO, f)
equals(f.access_specifier, libclang.AccessSpecifier.PUBLIC)
def test_ConversionFunction28():
x = parse_str('struct x { operator float(); };')[0]
f = x.children[0]
# f
match_cursor(f, libclang.CursorKind.CONVERSION_FUNCTION)
match_type(f.type, libclang.TypeKind.FUNCTION_PROTO, f)
equals(f.access_specifier, libclang.AccessSpecifier.PUBLIC)
def test_ClassTemplate28():
x = parse_str('template<typename T> struct x {};')[0]
# x
match_cursor(x, libclang.CursorKind.CLASS_TEMPLATE)
match_type(x.type, libclang.TypeKind.INVALID, x)
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ClassTemplatePartialSpecialization28():
xt, x = parse_str('template<typename T> struct x {}; template<typename T> struct x<T *> {};')
# x
match_cursor(x, libclang.CursorKind.CLASS_TEMPLATE_PARTIAL_SPECIALIZATION)
match_type(x.type, libclang.TypeKind.UNEXPOSED, x) # FIXME
equals(x.access_specifier, libclang.AccessSpecifier.INVALID)
def test_FunctionTemplate28():
f, x = parse_str('template<typename T> void f(); struct x { template <typename T> void g(); };')
g = x.children[0]
# f
match_cursor(f, libclang.CursorKind.FUNCTION_TEMPLATE)
match_type(f.type, libclang.TypeKind.FUNCTION_PROTO, f)
equals(f.access_specifier, libclang.AccessSpecifier.INVALID)
# g -- libclang does not have a CursorKind.METHOD_TEMPLATE ...
match_cursor(g, libclang.CursorKind.FUNCTION_TEMPLATE)
match_type(g.type, libclang.TypeKind.FUNCTION_PROTO, g)
equals(g.access_specifier, libclang.AccessSpecifier.PUBLIC)
def test_TemplateTypeParameter28():
x = parse_str('template<typename T> struct x {};')[0]
t = x.children[0]
# t
match_cursor(t, libclang.CursorKind.TEMPLATE_TYPE_PARAMETER)
match_type(t.type, libclang.TypeKind.UNEXPOSED, t) # FIXME
equals(t.access_specifier, libclang.AccessSpecifier.INVALID)
def test_NonTypeTemplateParameter28():
x = parse_str('template<int T> struct x {};')[0]
t = x.children[0]
# t
match_cursor(t, libclang.CursorKind.NON_TYPE_TEMPLATE_PARAMETER)
match_type(t.type, libclang.TypeKind.INT, t)
equals(t.access_specifier, libclang.AccessSpecifier.INVALID)
def test_TemplateTemplateParameter28():
x = parse_str('template<template<typename T> class U> struct x {};')[0]
u = x.children[0]
# u
match_cursor(u, libclang.CursorKind.TEMPLATE_TEMPLATE_PARAMETER)
match_type(u.type, libclang.TypeKind.INVALID, u)
equals(u.access_specifier, libclang.AccessSpecifier.INVALID)
def test_NamespaceAlias28():
x, y = parse_str('namespace x {} namespace y = x;')
# y
match_cursor(y, libclang.CursorKind.NAMESPACE_ALIAS)
match_type(y.type, libclang.TypeKind.INVALID, y)
equals(y.access_specifier, libclang.AccessSpecifier.INVALID)
def test_UsingDirective28():
x, y = parse_str('namespace x { int a; } using namespace x;')
# y
match_cursor(y, libclang.CursorKind.USING_DIRECTIVE)
match_type(y.type, libclang.TypeKind.INVALID, y)
equals(y.access_specifier, libclang.AccessSpecifier.INVALID)
def test_UsingDeclaration28():
x, y = parse_str('namespace x { int a; } using x::a;')
# y
match_cursor(y, libclang.CursorKind.USING_DECLARATION)
match_type(y.type, libclang.TypeKind.INVALID, y)
equals(y.access_specifier, libclang.AccessSpecifier.INVALID)
def test_CxxNullPtrLiteralExpr28():
x = parse_str('int *x = nullptr;', args=['-std=c++11'])[0]
e = x.children[0] # assignment
# cursor
a = e.children[0]
match_cursor(a, libclang.CursorKind.CXX_NULLPTR_LITERAL_EXPR)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
# type
t = a.type
match_type(t, libclang.TypeKind.NULLPTR, a)
equals(isinstance(t, libclang.BuiltinType), True)
equals(t.is_signed_integer, False)
equals(t.is_unsigned_integer, False)
equals(t.is_floating_point, False)
def test_LinkageSpec30():
s = parse_str('extern "C" void f(int x);')[0]
# s
match_cursor(s, libclang.CursorKind.LINKAGE_SPEC)
match_type(s.type, libclang.TypeKind.INVALID, s)
equals(s.access_specifier, libclang.AccessSpecifier.INVALID)
def test_TypeAliasDecl30():
x, y = parse_str('struct x {}; using y = x;', args=['-std=c++11'])
# y
match_cursor(y, libclang.CursorKind.TYPE_ALIAS_DECL)
match_type(y.type, libclang.TypeKind.TYPEDEF, y)
equals(y.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCSynthesizeDecl30():
i, x = parse_str("""
@interface x { int _a; } @property int a; @end
@implementation x @synthesize a=_a; @end""", args=['-ObjC', '-Wno-objc-root-class'])
a = x.children[0]
# a
match_cursor(a, libclang.CursorKind.OBJC_SYNTHESIZE_DECL)
match_type(a.type, libclang.TypeKind.INVALID, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCDynamicDecl30():
i, x = parse_str("""
@interface x { int _a; } @property int a; @end
@implementation x @dynamic a; @end""", args=['-ObjC', '-Wno-objc-root-class'])
a = x.children[0]
# a
match_cursor(a, libclang.CursorKind.OBJC_DYNAMIC_DECL)
match_type(a.type, libclang.TypeKind.INVALID, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_CxxAccessSpecifier27():
x, y = parse_str("""
class x {
int a;
protected: int c;
public: int b;
private: int d;
};
struct y {
int e;
protected: int f;
public: int g;
private: int h;
};""")
if libclang.version == 3.0:
# libclang 3.0 has a bug where the access specifier nodes are
# duplicated.
a, x1_, x1, b, x2_, x2, c, x3_, x3, d = x.children
match_cursor(x1_, libclang.CursorKind.CXX_ACCESS_SPECIFIER)
match_cursor(x2_, libclang.CursorKind.CXX_ACCESS_SPECIFIER)
match_cursor(x3_, libclang.CursorKind.CXX_ACCESS_SPECIFIER)
e, y1_, y1, f, y2_, y2, g, y3_, y3, h = y.children
match_cursor(y1_, libclang.CursorKind.CXX_ACCESS_SPECIFIER)
match_cursor(y2_, libclang.CursorKind.CXX_ACCESS_SPECIFIER)
match_cursor(y3_, libclang.CursorKind.CXX_ACCESS_SPECIFIER)
else:
a, x1, b, x2, c, x3, d = x.children
e, y1, f, y2, g, y3, h = y.children
# x1
match_cursor(x1, libclang.CursorKind.CXX_ACCESS_SPECIFIER)
match_type(x1.type, libclang.TypeKind.INVALID, x1)
equals(x1.access_specifier, libclang.AccessSpecifier.PROTECTED)
# x2
match_cursor(x2, libclang.CursorKind.CXX_ACCESS_SPECIFIER)
match_type(x2.type, libclang.TypeKind.INVALID, x2)
equals(x2.access_specifier, libclang.AccessSpecifier.PUBLIC)
# x3
match_cursor(x3, libclang.CursorKind.CXX_ACCESS_SPECIFIER)
match_type(x3.type, libclang.TypeKind.INVALID, x3)
equals(x3.access_specifier, libclang.AccessSpecifier.PRIVATE)
# access_specifier -- class
equals(a.access_specifier, libclang.AccessSpecifier.PRIVATE)
equals(b.access_specifier, libclang.AccessSpecifier.PROTECTED)
equals(c.access_specifier, libclang.AccessSpecifier.PUBLIC)
equals(d.access_specifier, libclang.AccessSpecifier.PRIVATE)
# access_specifier -- struct
equals(e.access_specifier, libclang.AccessSpecifier.PUBLIC)
equals(f.access_specifier, libclang.AccessSpecifier.PROTECTED)
equals(g.access_specifier, libclang.AccessSpecifier.PUBLIC)
equals(h.access_specifier, libclang.AccessSpecifier.PRIVATE)
def test_ObjCSuperClassRef27():
x, y = parse_str("""
@interface x @end
@interface y : x @end""", args=['-ObjC', '-Wno-objc-root-class'])
a = y.children[0]
# a
match_cursor(a, libclang.CursorKind.OBJC_SUPER_CLASS_REF)
if libclang.version <= 2.8:
match_type(a.type, libclang.TypeKind.INVALID, a)
else:
match_type(a.type, libclang.TypeKind.OBJC_INTERFACE, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCProtocolRef27():
x, y = parse_str("""
@protocol x @end
@interface y @property id <x> a; @end""", args=['-ObjC', '-Wno-objc-root-class'])
p = y.children[0] # property
t, a = p.children
# a
match_cursor(a, libclang.CursorKind.OBJC_PROTOCOL_REF)
match_type(a.type, libclang.TypeKind.INVALID, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_ObjCClassRef27():
x, y = parse_str("""
@interface x @end
@interface y -(x *)a; @end""", args=['-ObjC', '-Wno-objc-root-class'])
a = y.children[0] # method
r = a.children[0]
# r
match_cursor(r, libclang.CursorKind.OBJC_CLASS_REF)
if libclang.version <= 2.8:
match_type(r.type, libclang.TypeKind.INVALID, r)
else:
match_type(r.type, libclang.TypeKind.OBJC_INTERFACE, r)
equals(r.access_specifier, libclang.AccessSpecifier.INVALID)
def test_TypeRef27():
x, y = parse_str("""
struct x {};
typedef x y;""")
a = y.children[0]
# a
match_cursor(a, libclang.CursorKind.TYPE_REF)
if libclang.version <= 2.8:
match_type(a.type, libclang.TypeKind.INVALID, a)
else:
match_type(a.type, libclang.TypeKind.RECORD, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_CxxBaseSpecifier28():
b1, b2, b3, b4, x, y = parse_str("""
struct b1 {};
struct b2 {};
struct b3 {};
struct b4 {};
struct x : b1, protected b2, private b3, public b4 {};
class y : b1, protected b2, private b3, public b4 {};""")
x1, x2, x3, x4 = x.children
y1, y2, y3, y4 = y.children
# a
match_cursor(x1, libclang.CursorKind.CXX_BASE_SPECIFIER)
if libclang.version <= 2.8:
match_type(x1.type, libclang.TypeKind.INVALID, x1)
else:
match_type(x1.type, libclang.TypeKind.RECORD, x1)
# access_specifier
equals(x1.access_specifier, libclang.AccessSpecifier.PUBLIC)
equals(x2.access_specifier, libclang.AccessSpecifier.PROTECTED)
equals(x3.access_specifier, libclang.AccessSpecifier.PRIVATE)
equals(x4.access_specifier, libclang.AccessSpecifier.PUBLIC)
equals(y1.access_specifier, libclang.AccessSpecifier.PRIVATE)
equals(y2.access_specifier, libclang.AccessSpecifier.PROTECTED)
equals(y3.access_specifier, libclang.AccessSpecifier.PRIVATE)
equals(y4.access_specifier, libclang.AccessSpecifier.PUBLIC)
def test_TemplateRef28():
x, y = parse_str("""
template <typename T> struct x {};
typedef x<int> y;""")
a = y.children[0]
# a
match_cursor(a, libclang.CursorKind.TEMPLATE_REF)
match_type(a.type, libclang.TypeKind.INVALID, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_NamespaceRef28():
x, y = parse_str("""
namespace x {}
namespace y = x;""")
a = y.children[0]
# a
match_cursor(a, libclang.CursorKind.NAMESPACE_REF)
match_type(a.type, libclang.TypeKind.INVALID, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_MemberRef29():
x, y = parse_str("""
struct x { int a; };
struct x y = { .a = 2 };""", args=['-std=c99'], filename='memberref.c')
i = y.children[1]
j = i.children[0]
if len(j.children) == 0:
raise UnsupportedException('designated initializers not supported')
a = j.children[0]
# a
match_cursor(a, libclang.CursorKind.MEMBER_REF)
if libclang.version <= 3.0:
match_type(a.type, libclang.TypeKind.INVALID, a)
else:
match_type(a.type, libclang.TypeKind.INT, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_LabelRef29():
f = parse_str('int main() { x:; goto x; }')[0]
c = f.children[0]
l, m = c.children
if len(m.children) == 0:
raise UnsupportedException('label references not supported')
a = m.children[0]
# a
match_cursor(a, libclang.CursorKind.LABEL_REF)
match_type(a.type, libclang.TypeKind.INVALID, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_OverloadedDeclRef29():
f, g = parse_str("""
template <typename T> void f(T x);
template <typename T> void g(T x) { f(x); }""")
if len(g.children) == 2: # libclang <= 3.2 : no TemplateTypeParameter
_, c = g.children
else:
_, _, c = g.children
e = c.children[0]
d = e.children[0]
if len(d.children) == 0:
raise UnsupportedException('overloaded declarations not supported')
a = d.children[0]
# a
match_cursor(a, libclang.CursorKind.OVERLOADED_DECL_REF)
match_type(a.type, libclang.TypeKind.INVALID, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_VariableRef31():
f = parse_str('void f() { int x; [x](){}; }', args=['-std=c++11'])[0]
c = f.children[0]
if len(c.children) == 1: # libclang <= 3.0
raise UnsupportedException('lambda expressions are not supported')
d, l = c.children
a = l.children[0]
# a
match_cursor(a, libclang.CursorKind.VARIABLE_REF)
match_type(a.type, libclang.TypeKind.INT, a)
equals(a.access_specifier, libclang.AccessSpecifier.INVALID)
def test_Token():
index = libclang.Index()
tu = index.parse('tests/enumeration.hpp')
f = tu.file('tests/enumeration.hpp')
rng = libclang.SourceRange(tu.location(f, 1, 1), tu.location(f, 2, 1))
children = [child for child in tu.cursor().children if child.location.file]
# tokenize
tokens = tu.tokenize(libclang.SourceRange.null())
equals(len(tokens), 0)
tokens = tu.tokenize(rng)
equals(len(tokens), 3)
equals(tokens[0].spelling, 'enum')
equals(tokens[0].kind, libclang.TokenKind.KEYWORD)
equals(tokens[1].spelling, 'test')
equals(tokens[1].kind, libclang.TokenKind.IDENTIFIER)
equals(tokens[2].spelling, '{')
equals(tokens[2].kind, libclang.TokenKind.PUNCTUATION)
# match
equals(tokens.match(0, libclang.TokenKind.KEYWORD), True)
equals(tokens.match(0, libclang.TokenKind.KEYWORD, 'enum'), True)
equals(tokens.match(1, libclang.TokenKind.IDENTIFIER), True)
equals(tokens.match(1, libclang.TokenKind.IDENTIFIER, 'test'), True)
equals(tokens.match(2, libclang.TokenKind.PUNCTUATION), True)
equals(tokens.match(2, libclang.TokenKind.PUNCTUATION, '{'), True)
equals(tokens.match(0, libclang.TokenKind.IDENTIFIER), False)
equals(tokens.match(0, libclang.TokenKind.KEYWORD, 'enun'), False)
equals(tokens.match(3, libclang.TokenKind.PUNCTUATION), False)
# token
token = tokens[0]
equals(str(token), 'enum')
equals(token.location, tu.location(f, 1, 1))
match_location(token.location, 'tests/enumeration.hpp', 1, 1, 0)
match_location(token.extent.start, 'tests/enumeration.hpp', 1, 1, 0)
match_location(token.extent.end, 'tests/enumeration.hpp', 1, 1, 0)
equals(token.cursor, children[0])
def test_Type28():
c = parse_str('int a;')[0]
t = c.type
equals(t == t, True)
equals(t == t.pointee_type, False)
equals(t != t, False)
equals(t != t.pointee_type, True)
# type
equals(t.kind, libclang.TypeKind.INT)
equals(t.cursor, c)
equals(t.canonical_type, t)
equals(t.canonical_type.cursor, c)
match_type(t.pointee_type, libclang.TypeKind.INVALID, c)
match_type(t.result_type, libclang.TypeKind.INVALID, c)
equals(t.declaration.kind, libclang.CursorKind.NO_DECL_FOUND)
equals(t.is_pod, True)
def test_Type29():
c = parse_str('int a;')[0]
t = c.type
equals(t.is_const_qualified, False)
equals(t.is_volatile_qualified, False)
equals(t.is_restrict_qualified, False)
def test_Type30():
c = parse_str('long a[4];')[0]
t = c.type
match_type(t.array_element_type, libclang.TypeKind.LONG, c)
equals(t.array_size, 4)
def test_Type31():
c = parse_str('long a[4];')[0]
t = c.type
equals(len(list(t.argument_types)), 0)
match_type(t.element_type, libclang.TypeKind.LONG, c)
equals(t.element_count, 4)
equals(t.is_variadic, False)
equals(t.calling_convention, libclang.CallingConvention.INVALID)
def test_Type33():
c = parse_str('short a[4];')[0]
t = c.type
equals(t.spelling, 'short [4]')
equals(str(t), 'short [4]')
equals(t.alignment, 2)
equals(t.size, 8)
equals(t.offset('a'), -1)
def test_Type35():
c = parse_str('long a[4];')[0]
t = c.type
equals(len(list(t.template_arguments)), 0)
def test_builtin_type(program, kind, args=None, ignore_errors=False, signed=False, unsigned=False, floating_point=False):
c = parse_str(program, args=args, ignore_errors=ignore_errors)[0]
t = c.type
match_type(t, kind, c)
equals(isinstance(t, libclang.BuiltinType), True)
equals(t.is_signed_integer, signed)
equals(t.is_unsigned_integer, unsigned)
equals(t.is_floating_point, floating_point)
def test_BuiltinType28():
Kind = libclang.TypeKind
test_builtin_type('void a;', Kind.VOID, ignore_errors=True,
signed=False, unsigned=False, floating_point=False)
test_builtin_type('bool a;', Kind.BOOL,
signed=False, unsigned=True, floating_point=False)
test_builtin_type('char a;', Kind.CHAR_U, args=['-funsigned-char'],
signed=False, unsigned=True, floating_point=False)
test_builtin_type('unsigned char a;', Kind.UCHAR,
signed=False, unsigned=True, floating_point=False)
test_builtin_type('wchar_t a;', Kind.WCHAR, args=['-funsigned-char'],
signed=True, unsigned=False, floating_point=False)
test_builtin_type('char16_t a;', Kind.CHAR16, args=['-std=c++11'],
signed=False, unsigned=True, floating_point=False)
test_builtin_type('char32_t a;', Kind.CHAR32, args=['-std=c++11'],
signed=False, unsigned=True, floating_point=False)
test_builtin_type('unsigned short a;', Kind.USHORT,
signed=False, unsigned=True, floating_point=False)
test_builtin_type('unsigned int a;', Kind.UINT,
signed=False, unsigned=True, floating_point=False)
test_builtin_type('unsigned long a;', Kind.ULONG,
signed=False, unsigned=True, floating_point=False)
test_builtin_type('unsigned long long a;', Kind.ULONGLONG,
signed=False, unsigned=True, floating_point=False)
test_builtin_type('char a;', Kind.CHAR_S,
signed=True, unsigned=False, floating_point=False)
test_builtin_type('signed char a;', Kind.SCHAR,
signed=True, unsigned=False, floating_point=False)
test_builtin_type('wchar_t a;', Kind.WCHAR,
signed=True, unsigned=False, floating_point=False)
test_builtin_type('short a;', Kind.SHORT,
signed=True, unsigned=False, floating_point=False)
test_builtin_type('int a;', Kind.INT,
signed=True, unsigned=False, floating_point=False)
test_builtin_type('long a;', Kind.LONG,
signed=True, unsigned=False, floating_point=False)
test_builtin_type('long long a;', Kind.LONGLONG,
signed=True, unsigned=False, floating_point=False)
test_builtin_type('float a;', Kind.FLOAT,
signed=False, unsigned=False, floating_point=True)
test_builtin_type('double a;', Kind.DOUBLE,
signed=False, unsigned=False, floating_point=True)
test_builtin_type('long double a;', Kind.LONG_DOUBLE,
signed=False, unsigned=False, floating_point=True)
def test_BuiltinType31():
Kind = libclang.TypeKind
test_builtin_type('__int128 a;', Kind.INT128,
signed=True, unsigned=False, floating_point=False)
test_builtin_type('unsigned __int128 a;', Kind.UINT128,
signed=False, unsigned=True, floating_point=False)
def test_FunctionProtoType34():
s, j = parse_str("""
struct test {
int f(float x);
int g(float x) const;
int h(float x) const &;
int i(float x) const &&;
};
int j(float x);""", args=['-std=c++11'])
f, g, h, i = s.children
# f -- no ref-qualifier
equals(f.spelling, 'f')
ft = f.type
match_type(ft, libclang.TypeKind.FUNCTION_PROTO, f)
equals(isinstance(ft, libclang.FunctionProtoType), True)
equals(ft.cxx_ref_qualifier, libclang.RefQualifierKind.NONE)
# g -- const, no ref-qualifier
equals(g.spelling, 'g')
gt = g.type
match_type(gt, libclang.TypeKind.FUNCTION_PROTO, g)
equals(isinstance(gt, libclang.FunctionProtoType), True)
equals(gt.cxx_ref_qualifier, libclang.RefQualifierKind.NONE)
# h -- const lvalue
equals(h.spelling, 'h')
ht = h.type
match_type(ht, libclang.TypeKind.FUNCTION_PROTO, h)
equals(isinstance(ht, libclang.FunctionProtoType), True)
equals(ht.cxx_ref_qualifier, libclang.RefQualifierKind.LVALUE)
# i -- const rvalue
equals(i.spelling, 'i')
it = i.type
match_type(it, libclang.TypeKind.FUNCTION_PROTO, i)
equals(isinstance(it, libclang.FunctionProtoType), True)
equals(it.cxx_ref_qualifier, libclang.RefQualifierKind.RVALUE)
# j -- no ref-qualifier (non-member function)
equals(j.spelling, 'j')
jt = j.type
match_type(jt, libclang.TypeKind.FUNCTION_PROTO, j)
equals(isinstance(jt, libclang.FunctionProtoType), True)
equals(jt.cxx_ref_qualifier, libclang.RefQualifierKind.NONE)
def test_MemberPointerType34():
s, mp = parse_str('struct A{}; int *A::* b;')
t = mp.type
match_type(t, libclang.TypeKind.MEMBER_POINTER, mp)
equals(isinstance(t, libclang.MemberPointerType), True)
equals(t.class_type.kind, libclang.TypeKind.RECORD)
if len(sys.argv) > 1:
libclang.load(name=sys.argv[1])
else:
libclang.load()
run(2.7, test_version)
run(2.7, test_SourceLocation)
run(2.9, test_SourceLocation29)
run(3.0, test_SourceLocation30)
run(3.1, test_SourceLocation31)
run(3.3, test_SourceLocation33)
run(3.4, test_SourceLocation34)
run(2.7, test_SourceRange)
run(2.7, test_DiagnosticDisplayOptions)
run(2.7, test_DiagnosticSeverity)
run(2.9, test_DiagnosticCategory29)
run(2.7, test_Linkage)
run(2.7, test_TokenKind)
run(2.7, test_CursorKind)
run(2.8, test_CursorKind28)
run(3.0, test_CursorKind30)
run(2.8, test_TypeKind28)
run(2.8, test_AvailabilityKind28)
run(2.8, test_LanguageKind28)
run(2.8, test_AccessSpecifier28)
run(3.0, test_NameRefFlags30)
run(2.8, test_TranslationUnitFlags28)
run(2.8, test_SaveTranslationUnitFlags28)
run(2.8, test_ReparseTranslationUnitFlags28)
run(3.1, test_GlobalOptionFlags31)
run(3.1, test_CallingConvention31)
run(3.3, test_ObjCPropertyAttributes33)
run(3.3, test_ObjCDeclQualifierKind33)
run(3.4, test_RefQualifierKind34)
run(2.7, test_Index)
run(3.1, test_Index31)
run(2.7, test_TranslationUnit)
run(2.9, test_TranslationUnit29)
run(3.0, test_TranslationUnit30)
run(2.7, test_Diagnostic)
run(2.9, test_Diagnostic29)
run(2.7, test_Cursor)
run(2.8, test_Cursor28)
run(2.9, test_Cursor29)
run(3.0, test_Cursor30)
run(3.1, test_Cursor31)
run(3.2, test_Cursor32)
run(3.3, test_Cursor33)
run(3.4, test_Cursor34)
run(2.7, test_StructDecl27)
run(2.7, test_UnionDecl27)
run(2.7, test_ClassDecl27)
run(2.7, test_EnumDecl27)
run(2.9, test_EnumDecl29) # C++11 enum class
run(3.1, test_EnumDecl31)
run(2.7, test_FieldDecl27)
run(2.7, test_EnumConstantDecl27)
run(2.9, test_EnumConstantDecl29) # C++11 enum class
run(3.1, test_EnumConstantDecl31)
run(2.7, test_FunctionDecl27)
run(2.7, test_VarDecl27)
run(2.7, test_ParmDecl27)
run(2.7, test_ObjCInterfaceDecl27)
run(2.7, test_ObjCCategoryDecl27)
run(2.7, test_ObjCPropertyDecl27)
run(2.7, test_ObjCProtocolDecl27)
run(2.7, test_ObjCIvarDecl27)
run(2.7, test_ObjCInstanceMethodDecl27)
run(2.7, test_ObjCClassMethodDecl27)
run(2.7, test_ObjCImplementationDecl27)
run(2.7, test_ObjCCategoryImplDecl27)
run(2.7, test_TypedefDecl27)
run(3.1, test_TypedefDecl31)
run(2.8, test_CxxMethodDecl28)
run(3.0, test_CxxMethodDecl30)
run(3.4, test_CxxMethodDecl34)
run(3.5, test_CxxMethodDecl35)
run(2.8, test_Namespace28)
run(2.8, test_Constructor28)
run(2.8, test_Destructor28)
run(2.8, test_ConversionFunction28)
run(2.8, test_ClassTemplate28)
run(2.8, test_ClassTemplatePartialSpecialization28)
run(2.8, test_FunctionTemplate28)
run(2.8, test_TemplateTypeParameter28)
run(2.8, test_NonTypeTemplateParameter28)
run(2.8, test_TemplateTemplateParameter28)
run(2.8, test_NamespaceAlias28)
run(2.8, test_UsingDirective28)
run(2.8, test_UsingDeclaration28)
run(2.8, test_CxxNullPtrLiteralExpr28)
run(3.0, test_LinkageSpec30)
run(3.0, test_TypeAliasDecl30)
run(3.0, test_ObjCSynthesizeDecl30)
run(2.7, test_CxxAccessSpecifier27)
run(2.7, test_ObjCSuperClassRef27)
run(2.7, test_ObjCProtocolRef27)
run(2.7, test_ObjCClassRef27)
run(2.7, test_TypeRef27)
run(2.8, test_CxxBaseSpecifier28)
run(2.8, test_TemplateRef28)
run(2.8, test_NamespaceRef28)
run(2.9, test_MemberRef29)
run(2.9, test_LabelRef29)
run(2.9, test_OverloadedDeclRef29)
run(3.1, test_VariableRef31)
run(2.7, test_Token)
run(2.8, test_Type28)
run(2.9, test_Type29)
run(3.0, test_Type30)
run(3.1, test_Type31)
run(3.3, test_Type33)
run(3.5, test_Type35)
run(2.8, test_BuiltinType28)
run(3.1, test_BuiltinType31)
run(3.4, test_FunctionProtoType34)
run(3.4, test_MemberPointerType34)
summary()
| rhdunn/libclangpy | tests.py | Python | gpl-3.0 | 54,828 |
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.conf.urls.static import static
from video_manager import views
urlpatterns = [url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<pk>\d+)/$', views.DetailsView.as_view(), name='details'),
url(r'^(?P<video_id>\d+)/makeanalyze/$', login_required(views.reanalize_video),
name='makeanalyze'),
url(r'^(?P<video_id>\d+)/analyze/$', views.get_video_analysis_json, name='analyze'),
url(r'^(?P<pk>\d+)/suspicious/$', views.SuspiciousDetailsView.as_view(), name='suspicious'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| iago-suarez/ancoweb-TFG | src/video_manager/urls.py | Python | apache-2.0 | 777 |
# -*- coding: utf-8 -*-
"""Finitely Presented Groups and its algorithms. """
from __future__ import print_function, division
from sympy.core.basic import Basic
from sympy.core import Symbol, Mod
from sympy.printing.defaults import DefaultPrinting
from sympy.utilities import public
from sympy.utilities.iterables import flatten
from sympy.combinatorics.free_groups import (FreeGroup, FreeGroupElement,
free_group, zero_mul_simp)
from sympy.combinatorics.rewritingsystem import RewritingSystem
from sympy.combinatorics.coset_table import (CosetTable,
coset_enumeration_r,
coset_enumeration_c)
from sympy.combinatorics import PermutationGroup
from itertools import product
@public
def fp_group(fr_grp, relators=[]):
_fp_group = FpGroup(fr_grp, relators)
return (_fp_group,) + tuple(_fp_group._generators)
@public
def xfp_group(fr_grp, relators=[]):
_fp_group = FpGroup(fr_grp, relators)
return (_fp_group, _fp_group._generators)
@public
def vfp_group(fr_grpm, relators):
_fp_group = FpGroup(symbols, relators)
pollute([sym.name for sym in _fp_group.symbols], _fp_group.generators)
return _fp_group
def _parse_relators(rels):
"""Parse the passed relators."""
return rels
###############################################################################
# FINITELY PRESENTED GROUPS #
###############################################################################
class FpGroup(DefaultPrinting):
"""
The FpGroup would take a FreeGroup and a list/tuple of relators, the
relators would be specified in such a way that each of them be equal to the
identity of the provided free group.
"""
is_group = True
is_FpGroup = True
is_PermutationGroup = False
def __init__(self, fr_grp, relators):
relators = _parse_relators(relators)
self.free_group = fr_grp
self.relators = relators
self.generators = self._generators()
self.dtype = type("FpGroupElement", (FpGroupElement,), {"group": self})
# CosetTable instance on identity subgroup
self._coset_table = None
# returns whether coset table on identity subgroup
# has been standardized
self._is_standardized = False
self._order = None
self._center = None
self._rewriting_system = RewritingSystem(self)
self._perm_isomorphism = None
return
def _generators(self):
return self.free_group.generators
def make_confluent(self):
'''
Try to make the group's rewriting system confluent
'''
self._rewriting_system.make_confluent()
return
def reduce(self, word):
'''
Return the reduced form of `word` in `self` according to the group's
rewriting system. If it's confluent, the reduced form is the unique normal
form of the word in the group.
'''
return self._rewriting_system.reduce(word)
def equals(self, word1, word2):
'''
Compare `word1` and `word2` for equality in the group
using the group's rewriting system. If the system is
confluent, the returned answer is necessarily correct.
(If it isn't, `False` could be returned in some cases
where in fact `word1 == word2`)
'''
if self.reduce(word1*word2**-1) == self.identity:
return True
elif self._rewriting_system.is_confluent:
return False
return None
@property
def identity(self):
return self.free_group.identity
def __contains__(self, g):
return g in self.free_group
def subgroup(self, gens, C=None):
'''
Return the subgroup generated by `gens` using the
Reidemeister-Schreier algorithm
'''
if not all([isinstance(g, FreeGroupElement) for g in gens]):
raise ValueError("Generators must be `FreeGroupElement`s")
if not all([g.group == self.free_group for g in gens]):
raise ValueError("Given generators are not members of the group")
g, rels = reidemeister_presentation(self, gens, C=C)
if g:
g = FpGroup(g[0].group, rels)
else:
g = FpGroup(free_group('')[0], [])
return g
def coset_enumeration(self, H, strategy="relator_based", max_cosets=None,
draft=None, incomplete=False):
"""
Return an instance of ``coset table``, when Todd-Coxeter algorithm is
run over the ``self`` with ``H`` as subgroup, using ``strategy``
argument as strategy. The returned coset table is compressed but not
standardized.
An instance of `CosetTable` for `fp_grp` can be passed as the keyword
argument `draft` in which case the coset enumeration will start with
that instance and attempt to complete it.
When `incomplete` is `True` and the function is unable to complete for
some reason, the partially complete table will be returned.
"""
if not max_cosets:
max_cosets = CosetTable.coset_table_max_limit
if strategy == 'relator_based':
C = coset_enumeration_r(self, H, max_cosets=max_cosets,
draft=draft, incomplete=incomplete)
else:
C = coset_enumeration_c(self, H, max_cosets=max_cosets,
draft=draft, incomplete=incomplete)
if C.is_complete():
C.compress()
return C
def standardize_coset_table(self):
"""
Standardized the coset table ``self`` and makes the internal variable
``_is_standardized`` equal to ``True``.
"""
self._coset_table.standardize()
self._is_standardized = True
def coset_table(self, H, strategy="relator_based", max_cosets=None,
draft=None, incomplete=False):
"""
Return the mathematical coset table of ``self`` in ``H``.
"""
if not H:
if self._coset_table != None:
if not self._is_standardized:
self.standardize_coset_table()
else:
C = self.coset_enumeration([], strategy, max_cosets=max_cosets,
draft=draft, incomplete=incomplete)
self._coset_table = C
self.standardize_coset_table()
return self._coset_table.table
else:
C = self.coset_enumeration(H, strategy, max_cosets=max_cosets,
draft=draft, incomplete=incomplete)
C.standardize()
return C.table
def order(self, strategy="relator_based"):
"""
Returns the order of the finitely presented group ``self``. It uses
the coset enumeration with identity group as subgroup, i.e ``H=[]``.
Examples
========
>>> from sympy.combinatorics.free_groups import free_group
>>> from sympy.combinatorics.fp_groups import FpGroup
>>> F, x, y = free_group("x, y")
>>> f = FpGroup(F, [x, y**2])
>>> f.order(strategy="coset_table_based")
2
"""
from sympy import S, gcd
if self._order != None:
return self._order
if self._coset_table != None:
self._order = len(self._coset_table.table)
elif len(self.relators) == 0:
self._order = self.free_group.order()
elif len(self.generators) == 1:
self._order = abs(gcd([r.array_form[0][1] for r in self.relators]))
elif self._is_infinite():
self._order = S.Infinity
else:
gens, C = self._finite_index_subgroup()
if C:
ind = len(C.table)
self._order = ind*self.subgroup(gens, C=C).order()
else:
self._order = self.index([])
return self._order
def _is_infinite(self):
'''
Test if the group is infinite. Return `True` if the test succeeds
and `None` otherwise
'''
used_gens = set()
for r in self.relators:
used_gens.update(r.contains_generators())
if any([g not in used_gens for g in self.generators]):
return True
# Abelianisation test: check is the abelianisation is infinite
abelian_rels = []
from sympy.polys.solvers import RawMatrix as Matrix
from sympy.polys.domains import ZZ
from sympy.matrices.normalforms import invariant_factors
for rel in self.relators:
abelian_rels.append([rel.exponent_sum(g) for g in self.generators])
m = Matrix(abelian_rels)
setattr(m, "ring", ZZ)
if 0 in invariant_factors(m):
return True
else:
return None
def _finite_index_subgroup(self, s=[]):
'''
Find the elements of `self` that generate a finite index subgroup
and, if found, return the list of elements and the coset table of `self` by
the subgroup, otherwise return `(None, None)`
'''
gen = self.most_frequent_generator()
rels = list(self.generators)
rels.extend(self.relators)
if not s:
if len(self.generators) == 2:
s = [gen] + [g for g in self.generators if g != gen]
else:
rand = self.free_group.identity
i = 0
while ((rand in rels or rand**-1 in rels or rand.is_identity)
and i<10):
rand = self.random()
i += 1
s = [gen, rand] + [g for g in self.generators if g != gen]
mid = (len(s)+1)//2
half1 = s[:mid]
half2 = s[mid:]
draft1 = None
draft2 = None
m = 200
C = None
while not C and (m/2 < CosetTable.coset_table_max_limit):
m = min(m, CosetTable.coset_table_max_limit)
draft1 = self.coset_enumeration(half1, max_cosets=m,
draft=draft1, incomplete=True)
if draft1.is_complete():
C = draft1
half = half1
else:
draft2 = self.coset_enumeration(half2, max_cosets=m,
draft=draft2, incomplete=True)
if draft2.is_complete():
C = draft2
half = half2
if not C:
m *= 2
if not C:
return None, None
C.compress()
return half, C
def most_frequent_generator(self):
gens = self.generators
rels = self.relators
freqs = [sum([r.generator_count(g) for r in rels]) for g in gens]
return gens[freqs.index(max(freqs))]
def random(self):
import random
r = self.free_group.identity
for i in range(random.randint(2,3)):
r = r*random.choice(self.generators)**random.choice([1,-1])
return r
def index(self, H, strategy="relator_based"):
"""
Return the index of subgroup ``H`` in group ``self``.
Examples
========
>>> from sympy.combinatorics.free_groups import free_group
>>> from sympy.combinatorics.fp_groups import FpGroup
>>> F, x, y = free_group("x, y")
>>> f = FpGroup(F, [x**5, y**4, y*x*y**3*x**3])
>>> f.index([x])
4
"""
# TODO: use |G:H| = |G|/|H| (currently H can't be made into a group)
# when we know |G| and |H|
if H == []:
return self.order()
else:
C = self.coset_enumeration(H, strategy)
return len(C.table)
def __str__(self):
if self.free_group.rank > 30:
str_form = "<fp group with %s generators>" % self.free_group.rank
else:
str_form = "<fp group on the generators %s>" % str(self.generators)
return str_form
__repr__ = __str__
#==============================================================================
# PERMUTATION GROUP METHODS
#==============================================================================
def _to_perm_group(self):
'''
Return an isomorphic permutation group and the isomorphism.
The implementation is dependent on coset enumeration so
will only terminate for finite groups.
'''
from sympy.combinatorics import Permutation, PermutationGroup
from sympy.combinatorics.homomorphisms import homomorphism
from sympy import S
if self.order() == S.Infinity:
raise NotImplementedError("Permutation presentation of infinite "
"groups is not implemented")
if self._perm_isomorphism:
T = self._perm_isomorphism
P = T.image()
else:
C = self.coset_table([])
gens = self.generators
images = [[C[i][2*gens.index(g)] for i in range(len(C))] for g in gens]
images = [Permutation(i) for i in images]
P = PermutationGroup(images)
T = homomorphism(self, P, gens, images, check=False)
self._perm_isomorphism = T
return P, T
def _perm_group_list(self, method_name, *args):
'''
Given the name of a `PermutationGroup` method (returning a subgroup
or a list of subgroups) and (optionally) additional arguments it takes,
return a list or a list of lists containing the generators of this (or
these) subgroups in terms of the generators of `self`.
'''
P, T = self._to_perm_group()
perm_result = getattr(P, method_name)(*args)
single = False
if isinstance(perm_result, PermutationGroup):
perm_result, single = [perm_result], True
result = []
for group in perm_result:
gens = group.generators
result.append(T.invert(gens))
return result[0] if single else result
def derived_series(self):
'''
Return the list of lists containing the generators
of the subgroups in the derived series of `self`.
'''
return self._perm_group_list('derived_series')
def lower_central_series(self):
'''
Return the list of lists containing the generators
of the subgroups in the lower central series of `self`.
'''
return self._perm_group_list('lower_central_series')
def center(self):
'''
Return the list of generators of the center of `self`.
'''
return self._perm_group_list('center')
def derived_subgroup(self):
'''
Return the list of generators of the derived subgroup of `self`.
'''
return self._perm_group_list('derived_subgroup')
def centralizer(self, other):
'''
Return the list of generators of the centralizer of `other`
(a list of elements of `self`) in `self`.
'''
T = self._to_perm_group()[1]
other = T(other)
return self._perm_group_list('centralizer', other)
def normal_closure(self, other):
'''
Return the list of generators of the normal closure of `other`
(a list of elements of `self`) in `self`.
'''
T = self._to_perm_group()[1]
other = T(other)
return self._perm_group_list('normal_closure', other)
def _perm_property(self, attr):
'''
Given an attribute of a `PermutationGroup`, return
its value for a permutation group isomorphic to `self`.
'''
P = self._to_perm_group()[0]
return getattr(P, attr)
@property
def is_abelian(self):
'''
Check if `self` is abelian.
'''
return self._perm_property("is_abelian")
@property
def is_nilpotent(self):
'''
Check if `self` is nilpotent.
'''
return self._perm_property("is_nilpotent")
@property
def is_solvable(self):
'''
Check if `self` is solvable.
'''
return self._perm_property("is_solvable")
@property
def elements(self):
'''
List the elements of `self`.
'''
P, T = self._to_perm_group()
return T.invert(P._elements)
class FpSubgroup(DefaultPrinting):
'''
The class implementing a subgroup of an FpGroup or a FreeGroup
(only finite index subgroups are supported at this point). This
is to be used if one wishes to check if an element of the original
group belongs to the subgroup
'''
def __init__(self, G, gens, normal=False):
super(FpSubgroup,self).__init__()
self.parent = G
self.generators = list(set([g for g in gens if g != G.identity]))
self._min_words = None #for use in __contains__
self.C = None
self.normal = normal
def __contains__(self, g):
if isinstance(self.parent, FreeGroup):
if self._min_words is None:
# make _min_words - a list of subwords such that
# g is in the subgroup if and only if it can be
# partitioned into these subwords. Infinite families of
# subwords are presented by tuples, e.g. (r, w)
# stands for the family of subwords r*w**n*r**-1
def _process(w):
# this is to be used before adding new words
# into _min_words; if the word w is not cyclically
# reduced, it will generate an infinite family of
# subwords so should be written as a tuple;
# if it is, w**-1 should be added to the list
# as well
p, r = w.cyclic_reduction(removed=True)
if not r.is_identity:
return [(r, p)]
else:
return [w, w**-1]
# make the initial list
gens = []
for w in self.generators:
if self.normal:
w = w.cyclic_reduction()
gens.extend(_process(w))
for w1 in gens:
for w2 in gens:
# if w1 and w2 are equal or are inverses, continue
if w1 == w2 or (not isinstance(w1, tuple)
and w1**-1 == w2):
continue
# if the start of one word is the inverse of the
# end of the other, their multiple should be added
# to _min_words because of cancellation
if isinstance(w1, tuple):
# start, end
s1, s2 = w1[0][0], w1[0][0]**-1
else:
s1, s2 = w1[0], w1[len(w1)-1]
if isinstance(w2, tuple):
# start, end
r1, r2 = w2[0][0], w2[0][0]**-1
else:
r1, r2 = w2[0], w2[len(w1)-1]
# p1 and p2 are w1 and w2 or, in case when
# w1 or w2 is an infinite family, a representative
p1, p2 = w1, w2
if isinstance(w1, tuple):
p1 = w1[0]*w1[1]*w1[0]**-1
if isinstance(w2, tuple):
p2 = w2[0]*w2[1]*w2[0]**-1
# add the product of the words to the list is necessary
if r1**-1 == s2 and not (p1*p2).is_identity:
new = _process(p1*p2)
if not new in gens:
gens.extend(new)
if r2**-1 == s1 and not (p2*p1).is_identity:
new = _process(p2*p1)
if not new in gens:
gens.extend(new)
self._min_words = gens
min_words = self._min_words
def _is_subword(w):
# check if w is a word in _min_words or one of
# the infinite families in it
w, r = w.cyclic_reduction(removed=True)
if r.is_identity or self.normal:
return w in min_words
else:
t = [s[1] for s in min_words if isinstance(s, tuple)
and s[0] == r]
return [s for s in t if w.power_of(s)] != []
# store the solution of words for which the result of
# _word_break (below) is known
known = {}
def _word_break(w):
# check if w can be written as a product of words
# in min_words
if len(w) == 0:
return True
i = 0
while i < len(w):
i += 1
prefix = w.subword(0, i)
if not _is_subword(prefix):
continue
rest = w.subword(i, len(w))
if rest not in known:
known[rest] = _word_break(rest)
if known[rest]:
return True
return False
if self.normal:
g = g.cyclic_reduction()
return _word_break(g)
else:
if self.C is None:
C = self.parent.coset_enumeration(self.generators)
self.C = C
i = 0
C = self.C
for j in range(len(g)):
i = C.table[i][C.A_dict[g[j]]]
return i == 0
def order(self):
if not self.generators:
return 1
if isinstance(self.parent, FreeGroup):
return S.Infinity
if self.C is None:
C = self.parent.coset_enumeration(self.generators)
self.C = C
# This is valid because `len(self.C.table)` (the index of the subgroup)
# will always be finite - otherwise coset enumeration doesn't terminate
return self.parent.order()/len(self.C.table)
def to_FpGroup(self):
if isinstance(self.parent, FreeGroup):
gen_syms = [('x_%d'%i) for i in range(len(self.generators))]
return free_group(', '.join(gen_syms))[0]
return self.parent.subgroup(C=self.C)
def __str__(self):
if len(self.generators) > 30:
str_form = "<fp subgroup with %s generators>" % len(self.generators)
else:
str_form = "<fp subgroup on the generators %s>" % str(self.generators)
return str_form
__repr__ = __str__
###############################################################################
# LOW INDEX SUBGROUPS #
###############################################################################
def low_index_subgroups(G, N, Y=[]):
"""
Implements the Low Index Subgroups algorithm, i.e find all subgroups of
``G`` upto a given index ``N``. This implements the method described in
[Sim94]. This procedure involves a backtrack search over incomplete Coset
Tables, rather than over forced coincidences.
G: An FpGroup < X|R >
N: positive integer, representing the maximum index value for subgroups
Y: (an optional argument) specifying a list of subgroup generators, such
that each of the resulting subgroup contains the subgroup generated by Y.
References
==========
[1] Holt, D., Eick, B., O'Brien, E.
"Handbook of Computational Group Theory"
Section 5.4
[2] Marston Conder and Peter Dobcsanyi
"Applications and Adaptions of the Low Index Subgroups Procedure"
Examples
========
>>> from sympy.combinatorics.free_groups import free_group
>>> from sympy.combinatorics.fp_groups import FpGroup, low_index_subgroups
>>> F, x, y = free_group("x, y")
>>> f = FpGroup(F, [x**2, y**3, (x*y)**4])
>>> L = low_index_subgroups(f, 4)
>>> for coset_table in L:
... print(coset_table.table)
[[0, 0, 0, 0]]
[[0, 0, 1, 2], [1, 1, 2, 0], [3, 3, 0, 1], [2, 2, 3, 3]]
[[0, 0, 1, 2], [2, 2, 2, 0], [1, 1, 0, 1]]
[[1, 1, 0, 0], [0, 0, 1, 1]]
"""
C = CosetTable(G, [])
R = G.relators
# length chosen for the length of the short relators
len_short_rel = 5
# elements of R2 only checked at the last step for complete
# coset tables
R2 = set([rel for rel in R if len(rel) > len_short_rel])
# elements of R1 are used in inner parts of the process to prune
# branches of the search tree,
R1 = set([rel.identity_cyclic_reduction() for rel in set(R) - R2])
R1_c_list = C.conjugates(R1)
S = []
descendant_subgroups(S, C, R1_c_list, C.A[0], R2, N, Y)
return S
def descendant_subgroups(S, C, R1_c_list, x, R2, N, Y):
A_dict = C.A_dict
A_dict_inv = C.A_dict_inv
if C.is_complete():
# if C is complete then it only needs to test
# whether the relators in R2 are satisfied
for w, alpha in product(R2, C.omega):
if not C.scan_check(alpha, w):
return
# relators in R2 are satisfied, append the table to list
S.append(C)
else:
# find the first undefined entry in Coset Table
for alpha, x in product(range(len(C.table)), C.A):
if C.table[alpha][A_dict[x]] is None:
# this is "x" in pseudo-code (using "y" makes it clear)
undefined_coset, undefined_gen = alpha, x
break
# for filling up the undefine entry we try all possible values
# of β ∈ Ω or β = n where β^(undefined_gen^-1) is undefined
reach = C.omega + [C.n]
for beta in reach:
if beta < N:
if beta == C.n or C.table[beta][A_dict_inv[undefined_gen]] is None:
try_descendant(S, C, R1_c_list, R2, N, undefined_coset, \
undefined_gen, beta, Y)
def try_descendant(S, C, R1_c_list, R2, N, alpha, x, beta, Y):
r"""
Solves the problem of trying out each individual possibility
for `\alpha^x.
"""
D = C.copy()
A_dict = D.A_dict
if beta == D.n and beta < N:
D.table.append([None]*len(D.A))
D.p.append(beta)
D.table[alpha][D.A_dict[x]] = beta
D.table[beta][D.A_dict_inv[x]] = alpha
D.deduction_stack.append((alpha, x))
if not D.process_deductions_check(R1_c_list[D.A_dict[x]], \
R1_c_list[D.A_dict_inv[x]]):
return
for w in Y:
if not D.scan_check(0, w):
return
if first_in_class(D, Y):
descendant_subgroups(S, D, R1_c_list, x, R2, N, Y)
def first_in_class(C, Y=[]):
"""
Checks whether the subgroup ``H=G1`` corresponding to the Coset Table
could possibly be the canonical representative of its conjugacy class.
Parameters
==========
C: CosetTable
Returns
=======
bool: True/False
If this returns False, then no descendant of C can have that property, and
so we can abandon C. If it returns True, then we need to process further
the node of the search tree corresponding to C, and so we call
``descendant_subgroups`` recursively on C.
Examples
========
>>> from sympy.combinatorics.free_groups import free_group
>>> from sympy.combinatorics.fp_groups import FpGroup, CosetTable, first_in_class
>>> F, x, y = free_group("x, y")
>>> f = FpGroup(F, [x**2, y**3, (x*y)**4])
>>> C = CosetTable(f, [])
>>> C.table = [[0, 0, None, None]]
>>> first_in_class(C)
True
>>> C.table = [[1, 1, 1, None], [0, 0, None, 1]]; C.p = [0, 1]
>>> first_in_class(C)
True
>>> C.table = [[1, 1, 2, 1], [0, 0, 0, None], [None, None, None, 0]]
>>> C.p = [0, 1, 2]
>>> first_in_class(C)
False
>>> C.table = [[1, 1, 1, 2], [0, 0, 2, 0], [2, None, 0, 1]]
>>> first_in_class(C)
False
# TODO:: Sims points out in [Sim94] that performance can be improved by
# remembering some of the information computed by ``first_in_class``. If
# the ``continue α`` statement is executed at line 14, then the same thing
# will happen for that value of α in any descendant of the table C, and so
# the values the values of α for which this occurs could profitably be
# stored and passed through to the descendants of C. Of course this would
# make the code more complicated.
# The code below is taken directly from the function on page 208 of [Sim94]
# ν[α]
"""
n = C.n
# lamda is the largest numbered point in Ω_c_α which is currently defined
lamda = -1
# for α ∈ Ω_c, ν[α] is the point in Ω_c_α corresponding to α
nu = [None]*n
# for α ∈ Ω_c_α, μ[α] is the point in Ω_c corresponding to α
mu = [None]*n
# mutually ν and μ are the mutually-inverse equivalence maps between
# Ω_c_α and Ω_c
next_alpha = False
# For each 0≠α ∈ [0 .. nc-1], we start by constructing the equivalent
# standardized coset table C_α corresponding to H_α
for alpha in range(1, n):
# reset ν to "None" after previous value of α
for beta in range(lamda+1):
nu[mu[beta]] = None
# we only want to reject our current table in favour of a preceding
# table in the ordering in which 1 is replaced by α, if the subgroup
# G_α corresponding to this preceding table definitely contains the
# given subgroup
for w in Y:
# TODO: this should support input of a list of general words
# not just the words which are in "A" (i.e gen and gen^-1)
if C.table[alpha][C.A_dict[w]] != alpha:
# continue with α
next_alpha = True
break
if next_alpha:
next_alpha = False
continue
# try α as the new point 0 in Ω_C_α
mu[0] = alpha
nu[alpha] = 0
# compare corresponding entries in C and C_α
lamda = 0
for beta in range(n):
for x in C.A:
gamma = C.table[beta][C.A_dict[x]]
delta = C.table[mu[beta]][C.A_dict[x]]
# if either of the entries is undefined,
# we move with next α
if gamma is None or delta is None:
# continue with α
next_alpha = True
break
if nu[delta] is None:
# delta becomes the next point in Ω_C_α
lamda += 1
nu[delta] = lamda
mu[lamda] = delta
if nu[delta] < gamma:
return False
if nu[delta] > gamma:
# continue with α
next_alpha = True
break
if next_alpha:
next_alpha = False
break
return True
#========================================================================
# Simplifying Presentation
#========================================================================
def simplify_presentation(*args, **kwargs):
'''
For an instance of `FpGroup`, return a simplified isomorphic copy of
the group (e.g. remove redundant generators or relators). Alternatively,
a list of generators and relators can be passed in which case the
simplified lists will be returned.
By default, the generators of the group are unchanged. If you would
like to remove redundant generators, set the keyword argument
`change_gens = True`.
'''
change_gens = kwargs.get("change_gens", False)
if len(args) == 1:
if not isinstance(args[0], FpGroup):
raise TypeError("The argument must be an instance of FpGroup")
G = args[0]
gens, rels = simplify_presentation(G.generators, G.relators,
change_gens=change_gens)
if gens:
return FpGroup(gens[0].group, rels)
return FpGroup([])
elif len(args) == 2:
gens, rels = args[0][:], args[1][:]
identity = gens[0].group.identity
else:
if len(args) == 0:
m = "Not enough arguments"
else:
m = "Too many arguments"
raise RuntimeError(m)
prev_gens = []
prev_rels = []
while not set(prev_rels) == set(rels):
prev_rels = rels
while change_gens and not set(prev_gens) == set(gens):
prev_gens = gens
gens, rels = elimination_technique_1(gens, rels, identity)
rels = _simplify_relators(rels, identity)
if change_gens:
syms = [g.array_form[0][0] for g in gens]
F = free_group(syms)[0]
identity = F.identity
gens = F.generators
subs = dict(zip(syms, gens))
for j, r in enumerate(rels):
a = r.array_form
rel = identity
for sym, p in a:
rel = rel*subs[sym]**p
rels[j] = rel
return gens, rels
def _simplify_relators(rels, identity):
"""Relies upon ``_simplification_technique_1`` for its functioning. """
rels = rels[:]
rels = list(set(_simplification_technique_1(rels)))
rels.sort()
rels = [r.identity_cyclic_reduction() for r in rels]
try:
rels.remove(identity)
except ValueError:
pass
return rels
# Pg 350, section 2.5.1 from [2]
def elimination_technique_1(gens, rels, identity):
rels = rels[:]
# the shorter relators are examined first so that generators selected for
# elimination will have shorter strings as equivalent
rels.sort()
gens = gens[:]
redundant_gens = {}
redundant_rels = []
used_gens = set()
# examine each relator in relator list for any generator occurring exactly
# once
for rel in rels:
# don't look for a redundant generator in a relator which
# depends on previously found ones
contained_gens = rel.contains_generators()
if any([g in contained_gens for g in redundant_gens]):
continue
contained_gens = list(contained_gens)
contained_gens.sort(reverse = True)
for gen in contained_gens:
if rel.generator_count(gen) == 1 and gen not in used_gens:
k = rel.exponent_sum(gen)
gen_index = rel.index(gen**k)
bk = rel.subword(gen_index + 1, len(rel))
fw = rel.subword(0, gen_index)
chi = bk*fw
redundant_gens[gen] = chi**(-1*k)
used_gens.update(chi.contains_generators())
redundant_rels.append(rel)
break
rels = [r for r in rels if r not in redundant_rels]
# eliminate the redundant generators from remaining relators
rels = [r.eliminate_words(redundant_gens, _all = True).identity_cyclic_reduction() for r in rels]
rels = list(set(rels))
try:
rels.remove(identity)
except ValueError:
pass
gens = [g for g in gens if g not in redundant_gens]
return gens, rels
def _simplification_technique_1(rels):
"""
All relators are checked to see if they are of the form `gen^n`. If any
such relators are found then all other relators are processed for strings
in the `gen` known order.
Examples
========
>>> from sympy.combinatorics.free_groups import free_group
>>> from sympy.combinatorics.fp_groups import _simplification_technique_1
>>> F, x, y = free_group("x, y")
>>> w1 = [x**2*y**4, x**3]
>>> _simplification_technique_1(w1)
[x**-1*y**4, x**3]
>>> w2 = [x**2*y**-4*x**5, x**3, x**2*y**8, y**5]
>>> _simplification_technique_1(w2)
[x**-1*y*x**-1, x**3, x**-1*y**-2, y**5]
>>> w3 = [x**6*y**4, x**4]
>>> _simplification_technique_1(w3)
[x**2*y**4, x**4]
"""
from sympy import gcd
rels = rels[:]
# dictionary with "gen: n" where gen^n is one of the relators
exps = {}
for i in range(len(rels)):
rel = rels[i]
if rel.number_syllables() == 1:
g = rel[0]
exp = abs(rel.array_form[0][1])
if rel.array_form[0][1] < 0:
rels[i] = rels[i]**-1
g = g**-1
if g in exps:
exp = gcd(exp, exps[g].array_form[0][1])
exps[g] = g**exp
one_syllables_words = exps.values()
# decrease some of the exponents in relators, making use of the single
# syllable relators
for i in range(len(rels)):
rel = rels[i]
if rel in one_syllables_words:
continue
rel = rel.eliminate_words(one_syllables_words, _all = True)
# if rels[i] contains g**n where abs(n) is greater than half of the power p
# of g in exps, g**n can be replaced by g**(n-p) (or g**(p-n) if n<0)
for g in rel.contains_generators():
if g in exps:
exp = exps[g].array_form[0][1]
max_exp = (exp + 1)//2
rel = rel.eliminate_word(g**(max_exp), g**(max_exp-exp), _all = True)
rel = rel.eliminate_word(g**(-max_exp), g**(-(max_exp-exp)), _all = True)
rels[i] = rel
rels = [r.identity_cyclic_reduction() for r in rels]
return rels
###############################################################################
# SUBGROUP PRESENTATIONS #
###############################################################################
# Pg 175 [1]
def define_schreier_generators(C):
y = []
gamma = 1
f = C.fp_group
X = f.generators
C.P = [[None]*len(C.A) for i in range(C.n)]
for alpha, x in product(C.omega, C.A):
beta = C.table[alpha][C.A_dict[x]]
if beta == gamma:
C.P[alpha][C.A_dict[x]] = "<identity>"
C.P[beta][C.A_dict_inv[x]] = "<identity>"
gamma += 1
elif x in X and C.P[alpha][C.A_dict[x]] is None:
y_alpha_x = '%s_%s' % (x, alpha)
y.append(y_alpha_x)
C.P[alpha][C.A_dict[x]] = y_alpha_x
grp_gens = list(free_group(', '.join(y)))
C._schreier_free_group = grp_gens.pop(0)
C._schreier_generators = grp_gens
# replace all elements of P by, free group elements
for i, j in product(range(len(C.P)), range(len(C.A))):
# if equals "<identity>", replace by identity element
if C.P[i][j] == "<identity>":
C.P[i][j] = C._schreier_free_group.identity
elif isinstance(C.P[i][j], str):
r = C._schreier_generators[y.index(C.P[i][j])]
C.P[i][j] = r
beta = C.table[i][j]
C.P[beta][j + 1] = r**-1
def reidemeister_relators(C):
R = C.fp_group.relators
rels = [rewrite(C, coset, word) for word in R for coset in range(C.n)]
identity = C._schreier_free_group.identity
order_1_gens = set([i for i in rels if len(i) == 1])
# remove all the order 1 generators from relators
rels = list(filter(lambda rel: rel not in order_1_gens, rels))
# replace order 1 generators by identity element in reidemeister relators
for i in range(len(rels)):
w = rels[i]
w = w.eliminate_words(order_1_gens, _all=True)
rels[i] = w
C._schreier_generators = [i for i in C._schreier_generators
if not (i in order_1_gens or i**-1 in order_1_gens)]
# Tietze transformation 1 i.e TT_1
# remove cyclic conjugate elements from relators
i = 0
while i < len(rels):
w = rels[i]
j = i + 1
while j < len(rels):
if w.is_cyclic_conjugate(rels[j]):
del rels[j]
else:
j += 1
i += 1
C._reidemeister_relators = rels
def rewrite(C, alpha, w):
"""
Parameters
----------
C: CosetTable
α: A live coset
w: A word in `A*`
Returns
-------
ρ(τ(α), w)
Examples
========
>>> from sympy.combinatorics.fp_groups import FpGroup, CosetTable, define_schreier_generators, rewrite
>>> from sympy.combinatorics.free_groups import free_group
>>> F, x, y = free_group("x ,y")
>>> f = FpGroup(F, [x**2, y**3, (x*y)**6])
>>> C = CosetTable(f, [])
>>> C.table = [[1, 1, 2, 3], [0, 0, 4, 5], [4, 4, 3, 0], [5, 5, 0, 2], [2, 2, 5, 1], [3, 3, 1, 4]]
>>> C.p = [0, 1, 2, 3, 4, 5]
>>> define_schreier_generators(C)
>>> rewrite(C, 0, (x*y)**6)
x_4*y_2*x_3*x_1*x_2*y_4*x_5
"""
v = C._schreier_free_group.identity
for i in range(len(w)):
x_i = w[i]
v = v*C.P[alpha][C.A_dict[x_i]]
alpha = C.table[alpha][C.A_dict[x_i]]
return v
# Pg 350, section 2.5.2 from [2]
def elimination_technique_2(C):
"""
This technique eliminates one generator at a time. Heuristically this
seems superior in that we may select for elimination the generator with
shortest equivalent string at each stage.
>>> from sympy.combinatorics.free_groups import free_group
>>> from sympy.combinatorics.fp_groups import FpGroup, coset_enumeration_r, \
reidemeister_relators, define_schreier_generators, elimination_technique_2
>>> F, x, y = free_group("x, y")
>>> f = FpGroup(F, [x**3, y**5, (x*y)**2]); H = [x*y, x**-1*y**-1*x*y*x]
>>> C = coset_enumeration_r(f, H)
>>> C.compress(); C.standardize()
>>> define_schreier_generators(C)
>>> reidemeister_relators(C)
>>> elimination_technique_2(C)
([y_1, y_2], [y_2**-3, y_2*y_1*y_2*y_1*y_2*y_1, y_1**2])
"""
rels = C._reidemeister_relators
rels.sort(reverse=True)
gens = C._schreier_generators
for i in range(len(gens) - 1, -1, -1):
rel = rels[i]
for j in range(len(gens) - 1, -1, -1):
gen = gens[j]
if rel.generator_count(gen) == 1:
k = rel.exponent_sum(gen)
gen_index = rel.index(gen**k)
bk = rel.subword(gen_index + 1, len(rel))
fw = rel.subword(0, gen_index)
rep_by = (bk*fw)**(-1*k)
del rels[i]; del gens[j]
for l in range(len(rels)):
rels[l] = rels[l].eliminate_word(gen, rep_by)
break
C._reidemeister_relators = rels
C._schreier_generators = gens
return C._schreier_generators, C._reidemeister_relators
def reidemeister_presentation(fp_grp, H, C=None):
"""
fp_group: A finitely presented group, an instance of FpGroup
H: A subgroup whose presentation is to be found, given as a list
of words in generators of `fp_grp`
Examples
========
>>> from sympy.combinatorics.free_groups import free_group
>>> from sympy.combinatorics.fp_groups import FpGroup, reidemeister_presentation
>>> F, x, y = free_group("x, y")
Example 5.6 Pg. 177 from [1]
>>> f = FpGroup(F, [x**3, y**5, (x*y)**2])
>>> H = [x*y, x**-1*y**-1*x*y*x]
>>> reidemeister_presentation(f, H)
((y_1, y_2), (y_1**2, y_2**3, y_2*y_1*y_2*y_1*y_2*y_1))
Example 5.8 Pg. 183 from [1]
>>> f = FpGroup(F, [x**3, y**3, (x*y)**3])
>>> H = [x*y, x*y**-1]
>>> reidemeister_presentation(f, H)
((x_0, y_0), (x_0**3, y_0**3, x_0*y_0*x_0*y_0*x_0*y_0))
Exercises Q2. Pg 187 from [1]
>>> f = FpGroup(F, [x**2*y**2, y**-1*x*y*x**-3])
>>> H = [x]
>>> reidemeister_presentation(f, H)
((x_0,), (x_0**4,))
Example 5.9 Pg. 183 from [1]
>>> f = FpGroup(F, [x**3*y**-3, (x*y)**3, (x*y**-1)**2])
>>> H = [x]
>>> reidemeister_presentation(f, H)
((x_0,), (x_0**6,))
"""
if not C:
C = coset_enumeration_r(fp_grp, H)
C.compress(); C.standardize()
define_schreier_generators(C)
reidemeister_relators(C)
gens, rels = C._schreier_generators, C._reidemeister_relators
gens, rels = simplify_presentation(gens, rels, change_gens=True)
C.schreier_generators = tuple(gens)
C.reidemeister_relators = tuple(rels)
return C.schreier_generators, C.reidemeister_relators
FpGroupElement = FreeGroupElement
| wxgeo/geophar | wxgeometrie/sympy/combinatorics/fp_groups.py | Python | gpl-2.0 | 44,822 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# rtstock documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import rtstock
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Realtime Stock'
copyright = u"2016, Rafael Lopes Conde dos Reis"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = rtstock.__version__
# The full version, including alpha/beta/rc tags.
release = rtstock.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'rtstockdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'rtstock.tex',
u'Realtime Stock Documentation',
u'Rafael Lopes Conde dos Reis', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'rtstock',
u'Realtime Stock Documentation',
[u'Rafael Lopes Conde dos Reis'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'rtstock',
u'Realtime Stock Documentation',
u'Rafael Lopes Conde dos Reis',
'rtstock',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| condereis/realtime-stock | docs/conf.py | Python | mit | 8,475 |
#! /usr/bin/env python
"""Script for modified the /etc/X11/xorg.conf file for sharing the desktop via
VNC server
"""
__author__ = "Greg Brissey"
__version__ = "$Revision: 1.0 $"
__date__ = "$Date: 2009/08/11 11:57:19 $"
__license__ = "Python"
#
# Read the /etc/X11/xorg.conf and create a new xorg.conf
# Done in two passes.
# 1st pass is to determine if the changes are already present
# 2nd apply those changes that have not been done.
# We are making changes to or adding the ServerLayout, Monitor, Screen and Module sections
# if we don't find any then we will add them to the end of the file
# The Screen section additions involve the 3 VNC authorization related settings
# which are placed after the DefaultDepth setting
# For the Module section the Load of vnc X module option is added
# The ServerLayout section additions involve the 3 Option to turn off blanking settings
# The Monitor section additions involve the 1 Option to turn off DPMS (Display Power Management System)
# if it blanks, the vnc client losses its access control
#
import sys
import os
import glob
import datetime
import subprocess
# subprocess.call(["foo.sh","args"],shell=True)
from pydoc import help
# from UserDict import UserDict
# help("sys")
# help("subprocess")
# help("string")
#print 'sys.argv[0] =', sys.argv[0]
#pathname = os.path.dirname(sys.argv[0])
#print 'path =', pathname
#print 'full path =', os.path.abspath(pathname)
# addition lines to the xorg.con file
sectionMonitor='Section "Monitor"\n'
sectionServerLayout='Section "ServerLayout"\n'
sectionScreen='Section "Screen"\n'
sectionModule='Section "Module"\n'
endSection='EndSection\n'
screenOption1='\tOption\t\t"SecurityTypes" "VncAuth"\n'
screenOption2='\tOption\t\t"UserPasswdVerifier" "VncAuth"\n'
screenOption3='\tOption\t\t"PasswordFile" "/root/.vnc/passwd"\n'
moduleLoad='\tLoad\t\t"vnc"\n'
MonitorOptionFrom='"DPMS"'
MonitorOption='\tOption\t\t"NODPMS"\n'
ServerLayoutOption1='\tOption\t\t"BlankTime" "0"\n'
ServerLayoutOption2='\tOption\t\t"StandbyTime" "0"\n'
ServerLayoutOption3='\tOption\t\t"SuspendTime" "0"\n'
ServerLayoutOption4='\tOption\t\t"OffTime" "0"\n'
XORGdir='/etc/X11'
XORGfile='xorg.conf'
XORGnfile='xorg.conf.vnc'
#
# get our uid and make sure we are running as root
#
uid=os.getuid()
if (uid != 0):
print "Must be run with root privileges."
sys.exit(11)
#
# create a timestamp for the file name
# e.g. filename_2009-06-15:14:54.txt
#
dateTime = datetime.datetime.today()
datetimestr = dateTime.strftime("%Y-%m-%d:%H:%M")
backupname = XORGfile + '_' + datetimestr
Xorg_filepath = os.path.join(XORGdir,XORGfile)
Xorg_bkup_filepath = os.path.join(XORGdir,backupname)
Xorg_nfilepath = os.path.join(XORGdir,XORGnfile)
# Xorg_nfilepath = os.path.join('/tmp',XORGnfile)
# creat the copy command to creat the backup
cp2bkup = 'cp ' + Xorg_filepath + ' ' + Xorg_bkup_filepath
# creat the copy command to over write xorg.conf with the new one
cp2xorg = 'cp ' + Xorg_nfilepath + ' ' + Xorg_filepath
rmxorgvnc = 'rm -f ' + Xorg_nfilepath
# invoke the cp command after the test to see if mods are needed
# these flags are inclemented for each option found for the related mod
# 1 for Monitor, 4 for ServerLayout, 3 for Screen and 1 for Module
ServerLayoutModsPresent=0
MonitorModsPresent=0
ScreenModsPresent=0
ModuleModsPresent=0
#
# Simple state machine variables
#
foundServerLayout=0
inServerLayout=0
writtenServerLayoutOption1=0
writtenServerLayoutOption2=0
writtenServerLayoutOption3=0
writtenServerLayoutOption4=0
foundMonitor=0
inMonitor=0
writtenMonitorOption=0
foundScreen=0
inScreen=0
writtenScreenOption1=0
writtenScreenOption2=0
writtenScreenOption3=0
foundModule=0
inModule=0
writtenModuleOption=0
#
# Determine if any of the mods are already present in the xorg.conf file
#
try:
inFile = open(Xorg_filepath, 'rb') # open read & binary
for line in inFile:
# skip blank or commented '#' lines
if ((line[0] == "#") and (line == "\n")):
continue
# print line
# convert to lower case for comparisons (lower) and
# remove the leading and trailing white characters (strip)
lcline=line.lower().strip()
# print lcline
if ( inScreen == 1 ):
#print 'inScreen'
#print lcline
# look for the three addition for VNC in the screen section
if ((lcline.find('securitytypes') != -1) and (lcline.find('vncauth') != -1)):
ScreenModsPresent=ScreenModsPresent + 1
elif (lcline.find('userpasswdverifier') != -1) and (lcline.find('vncauth') != -1):
ScreenModsPresent=ScreenModsPresent + 1
elif (lcline.find('passwordfile') != -1) and (lcline.find('/root/.vnc/passwd') != -1):
ScreenModsPresent=ScreenModsPresent + 1
#print "ScreenModsPresent: %d" % ScreenModsPresent
if (lcline.startswith('endsection')):
# print "ScreenModsPresent: %d" % ScreenModsPresent
# print 'Found EndSection'
# leaving the Screen section
inScreen=0
elif ( inModule == 1):
#print 'inModule'
#print lcline
# look for the one addition for VNC in the Module section
if ((lcline.find('load') != -1) and (lcline.find('vnc') != -1)):
ModuleModsPresent=1
if (lcline.startswith('endsection')):
#print 'Found EndSection'
# leaving the Module section
inModule=0
elif ( inServerLayout == 1):
# look for the four addition for blanking settings in the serverlayout section
# print lcline
if ((lcline.find('blanktime') != -1) and (lcline.find('"0"') != -1) ):
ServerLayoutModsPresent= ServerLayoutModsPresent + 1
elif ((lcline.find('standbytime') != -1) and (lcline.find('"0"') != -1) ):
ServerLayoutModsPresent= ServerLayoutModsPresent + 1
elif ((lcline.find('suspendtime') != -1) and (lcline.find('"0"') != -1) ):
ServerLayoutModsPresent= ServerLayoutModsPresent + 1
elif ((lcline.find('offtime') != -1) and (lcline.find('"0"') != -1) ):
ServerLayoutModsPresent= ServerLayoutModsPresent + 1
elif (lcline.startswith('endsection')):
inServerLayout=0
#print "ScreenModsPresent: %d" % ScreenModsPresent
elif ( inMonitor == 1):
# print lcline
# look for the change of the DPMS option in the Monitor section
if ((lcline.find('option') != -1) and (lcline.find('"nodpms"') != -1) ):
MonitorModsPresent=1
elif (lcline.startswith('endsection')):
inMonitor=0
else:
# is this a start of a Section?
if (lcline.startswith('section') ):
# extract the section name
(section, title) = lcline.split();
# print 'Section title: ' + title
# print "Section is: '%s'" & title
if ( title.strip('"') == 'screen') :
#print 'Found Screen'
# entering the Screen section
inScreen=1
elif ( title.strip('"') == 'module'):
#print 'Found Module'
# entering the Module section
inModule=1
elif ( title.strip('"') == 'serverlayout'):
# print 'Found ServerLayout'
# entering the Module section
inServerLayout=1
elif ( title.strip('"') == 'monitor'):
# print 'Found Monitor'
inMonitor=1
inFile.close()
#print "ScreenModsPresent: %d" % ScreenModsPresent
#print "ModuleModsPresent: %d" % ModuleModsPresent
#print "ServerLayoutModsPresent: %d" % ServerLayoutModsPresent
#print "MonitorModsPresent: %d" % MonitorModsPresent
if ((ScreenModsPresent > 2) and ( ModuleModsPresent > 0) and
(ServerLayoutModsPresent > 3) and (MonitorModsPresent > 0)):
print 'xorg.conf already modified for VNC'
sys.exit(0)
except IOError:
print 'file: ', Xorg_filepath, ' not found.'
sys.exit(12)
#
# read the xorg.conf and create a new xorg.conf.vnc
#
try:
inFile = open(Xorg_filepath, 'rb') # open read & binary
outFile = open(Xorg_nfilepath, 'wb') # open write & binary
for line in inFile:
# write line to new xorg file xorg.conf.vnc
# outFile.write(line)
# convert to lower case for comparisons (lower) and
# remove the leading and trailing characters (strip)
lcline=line.lower().strip()
# print lcline
# skip blank or commented '#' lines
if ((line[0] == "#") and (line == "\n")):
outFile.write(line)
continue
if ( inScreen == 1 ):
# print 'inScreen'
# print lcline
# if the options are found then overwrite them with ours
if (lcline.find('securitytypes') != -1):
outFile.write(screenOption1)
writtenScreenOption1=1
elif (lcline.find('userpasswdverifier') != -1):
outFile.write(screenOption2)
writtenScreenOption2=1
elif (lcline.find('passwordfile') != -1):
outFile.write(screenOption3)
writtenScreenOption3=1
elif (lcline.startswith('endsection')):
# if option not written then write them out now prior to section end
if (writtenScreenOption1 != 1):
outFile.write(screenOption1)
if (writtenScreenOption2 != 1):
outFile.write(screenOption2)
if (writtenScreenOption3 != 1):
outFile.write(screenOption3)
outFile.write(line)
# print 'Found EndSection'
# leaving the Screen section
inScreen=0
else:
outFile.write(line)
elif ( inModule == 1):
# print 'inModule'
# if the load vnc is found then mark it there
if ((lcline.find('load') != -1) and (lcline.find('vnc') != -1)):
ModuleModsPresent=1
if (lcline.startswith('endsection')):
if ( ModuleModsPresent != 1): # if not present write it out
outFile.write(moduleLoad)
outFile.write(line)
# print 'Found EndSection'
# leaving the Module section
inModule=0
else:
outFile.write(line)
elif ( inServerLayout == 1):
# overwrite any entry just to be sure its correct
if ((lcline.find('blanktime') != -1)):
outFile.write(ServerLayoutOption1)
writtenServerLayoutOption1=1
elif ((lcline.find('standbytime') != -1)):
outFile.write(ServerLayoutOption2)
writtenServerLayoutOption2=1
elif ((lcline.find('suspendtime') != -1)):
outFile.write(ServerLayoutOption3)
writtenServerLayoutOption3=1
elif ((lcline.find('offtime') != -1)):
outFile.write(ServerLayoutOption4)
writtenServerLayoutOption4=1
elif (lcline.startswith('endsection')):
# if the option was not written then write them out now,
# prior to section end
if (writtenServerLayoutOption1 != 1):
outFile.write(ServerLayoutOption1)
if (writtenServerLayoutOption2 != 1):
outFile.write(ServerLayoutOption2)
if (writtenServerLayoutOption3 != 1):
outFile.write(ServerLayoutOption3)
if (writtenServerLayoutOption4 != 1):
outFile.write(ServerLayoutOption4)
outFile.write(line)
inServerLayout=0
else:
outFile.write(line)
elif ( inMonitor == 1):
# if dpms option is present then overwrite it with our nodpms option
if ((lcline.find('option') != -1) and (lcline.find('"dpms"') != -1) ):
outFile.write(MonitorOption)
writtenMonitorOption=1 # note that we already written the option out
elif ((lcline.find('option') != -1) and (lcline.find('"nodpms"') != -1) ):
outFile.write(line) # write it out
writtenMonitorOption=1 # note that its already there
elif (lcline.startswith('endsection')):
# if end of section and haven't written option out, Do so
if ( writtenMonitorOption != 1):
outFile.write(MonitorOption)
outFile.write(line)
inMonitor=0
else:
outFile.write(line)
else:
outFile.write(line)
# is this a start of a Section?
if (lcline.startswith('section') ):
# extract the section name
(section, title) = lcline.split();
# print 'Section title: ' + title
# print "Section is: '%s'" & title
if ( title.strip('"') == 'screen') :
# print 'Found Screen'
# entering the Screen section
inScreen=1
foundScreen=1
elif ( title.strip('"') == 'module'):
# print 'Found Module'
# entering the Module section
inModule=1
foundModule=1
# outFile.write(moduleLoad)
elif ( title.strip('"') == 'serverlayout'):
# entering the ServerLayout Section
# print 'Found ServerLayout'
foundServerLayout=1
inServerLayout=1
elif ( title.strip('"') == 'monitor'):
# print 'Found Monitor'
foundMonitor=1
inMonitor=1
inFile.close() # close the orginial xorg file
#
# If a Screen section was never found then create it now
#
if (foundScreen == 0):
outFile.write(sectionScreen)
outFile.write(screenOption1)
outFile.write(screenOption2)
outFile.write(screenOption3)
outFile.write(endSection)
#
# If a Module section was never found then create it now
#
if (foundModule == 0):
outFile.write(sectionModule)
outFile.write(moduleLoad)
outFile.write(endSection)
#
# If a ServerLayout section was never found then create it now
#
if (foundServerLayout == 0):
outFile.write(sectionServerLayout)
outFile.write(ServerLayoutOption1)
outFile.write(ServerLayoutOption2)
outFile.write(ServerLayoutOption3)
outFile.write(ServerLayoutOption4)
outFile.write(endSection)
#
# If a Monitor section was never found then create it now
#
if (foundMonitor == 0):
outFile.write(sectionMonitor)
outFile.write(MonitorOption)
outFile.write(endSection)
outFile.close() # close the new xrog.conf file
except IOError:
print 'IOError: ', Xorg_filepath, " or ", Xorg_nfilepath,' '
sys.exit(13)
# make the back up copy now
cpproc = subprocess.Popen(cp2bkup, shell=True)
cppid,status = os.waitpid(cpproc.pid, 0)
#print status
# succesful copy returns 0 as a status
if ( status != 0 ) :
print 'Cound not copy: ', Xorg_filepath, ' to backup: ' + Xorg_bkup_filepath
print 'Abort changing xorg.conf'
sys.exit(14)
# copy modified xorg file on top of the xorg.conf file
cpproc = subprocess.Popen(cp2xorg, shell=True)
cppid,status = os.waitpid(cpproc.pid, 0)
#print status
# succesful copy returns 0 as a status
if ( status != 0 ) :
print 'Cound not copy: ', Xorg_nfilepath, ' to ' , Xorg_filepath
sys.exit(15)
#
# remove the temporary xorg.conf.vnc file
cpproc = subprocess.Popen(rmxorgvnc, shell=True)
cppid,status = os.waitpid(cpproc.pid, 0)
# no need to check status
sys.exit(1) # indicate that the Xsession will need to be restarted.
#
# for RHEL 5.X execute gdm-restart or gdm-safe-restart to restart the gnome display manager
# and Xsession to enable changes to xorg.conf
# keystroke Cntrl-Alt-Backspace also restart Xsession
#
| OpenVnmrJ/OpenVnmrJ | src/scripts/modxorg.py | Python | apache-2.0 | 15,887 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2007 Brian G. Matherly
# Copyright (C) 2010 Jakim Friant
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Contain and organize bibliographic information.
"""
#-------------------------------------------------------------------------
#
# Standard python modules
#
#-------------------------------------------------------------------------
import string
import math
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ...lib.citation import Citation as lib_Citation
class Citation:
"""
Store information about a citation and all of its references.
"""
def __init__(self):
"""
Initialize members.
"""
self.__src_handle = None
self.__ref_list = []
def get_source_handle(self):
"""
Provide the handle to the source that this citation is for.
:return: Source Handle
:rtype: handle
"""
return self.__src_handle
def set_source_handle(self, handle):
"""
Set the handle for the source that this citation is for.
:param handle: Source Handle
:type handle: handle
"""
self.__src_handle = handle
def get_ref_list(self):
"""
List all the references to this citation.
:return: a list of references
:rtype: list of :class:`~.citation.Citation` objects
"""
return self.__ref_list
def add_reference(self, source_ref):
"""
Add a reference to this citation. If a similar reference exists, don't
add another one.
:param source_ref: Source Reference
:type source_ref: :class:`~.citation.Citation`
:return: The key of the added reference among all the references.
:rtype: char
"""
letters = string.ascii_lowercase # or (e.g.) "abcdef" for testing
letter_count = len(letters)
ref_count = len(self.__ref_list)
x_ref_count = ref_count
# Return "a" for ref_count = 0, otherwise log(0) does not work
if ref_count == 0:
self.__ref_list.append((letters[0], source_ref))
return letters[0]
last_letter = letters[ ref_count % letter_count ]
key = ""
# Calculate prek number of digits.
number_of_letters = 1 + int(math.log(float(ref_count),
float(letter_count)))
# Exclude index for number_of_letters-1
for n in range(1, number_of_letters-1):
ref_count -= pow(letter_count, n)
# Adjust number_of_letters for new index
number_of_letters = 1 + int(math.log(float(ref_count),
float(letter_count)))
for n in range(1, number_of_letters):
x_ref_count -= pow(letter_count, n)
for letter in range(1, number_of_letters):
index = x_ref_count // pow(letter_count, letter) % letter_count
key += letters[index]
key = key + last_letter
self.__ref_list.append((key, source_ref))
return key
class Bibliography:
"""
Store and organize multiple citations into a bibliography.
"""
MODE_DATE = 2**0
MODE_PAGE = 2**1
MODE_CONF = 2**2
MODE_NOTE = 2**3
MODE_MEDIA = 2**4
MODE_ALL = MODE_DATE | MODE_PAGE | MODE_CONF | MODE_NOTE | MODE_MEDIA
def __init__(self, mode=MODE_ALL):
"""
A bibliography will store citations (sources) and references to those
citations (citations). Duplicate entries will not be added. To change
what is considered duplicate, you can tell the bibliography what source
ref information you are interested in by passing in the mode.
Possible modes include:
- MODE_DATE
- MODE_PAGE
- MODE_CONF
- MODE_NOTE
- MODE_MEDIA
- MODE_ALL
If you only care about pages, set "mode=MODE_PAGE".
If you only care about dates and pages, set "mode=MODE_DATE|MODE_PAGE".
If you care about everything, set "mode=MODE_ALL".
"""
self.__citation_list = []
self.mode = mode
def add_reference(self, lib_citation):
"""
Add a reference to a source to this bibliography. If the source already
exists, don't add it again. If a similar reference exists, don't
add another one.
:param citation: Citation object
:type citation: :class:`~.citation.Citation`
:return: A tuple containing the index of the source among all the
sources and the key of the reference among all the references.
If there is no reference information, the second element will
be None.
:rtype: (int,char) or (int,None)
.. note::
Within this file, the name 'citation' is used both for
gen.lib.Citation, and for _bibliography.Citation. It is not clear
how best to rename the concepts in this file to avoid the clash,
so the names have been retained. In this function, lib_citation
is used for gen.lib.Citation instances, and citation for
_bibliography.Citation instances. Elsewhere in this file,
source_ref is used for gen.lib.Citation instances.
"""
source_handle = lib_citation.get_reference_handle()
cindex = 0
rkey = ""
citation = None
citation_found = False
for citation in self.__citation_list:
if citation.get_source_handle() == source_handle:
citation_found = True
break
cindex += 1
if not citation_found:
citation = Citation()
citation.set_source_handle(source_handle)
cindex = len(self.__citation_list)
self.__citation_list.append(citation)
if self.__sref_has_info(lib_citation):
for key, ref in citation.get_ref_list():
if self.__srefs_are_equal(ref, lib_citation):
# if a reference like this already exists, don't add
# another one
return (cindex, key)
rkey = citation.add_reference(lib_citation)
return (cindex, rkey)
def get_citation_count(self):
"""
Report the number of citations in this bibliography.
:return: number of citations
:rtype: int
"""
return len(self.__citation_list)
def get_citation_list(self):
"""
Return a list containing all the citations in this bibliography.
:return: citation list
:rtype: list of :class:`Citation` objects
"""
return self.__citation_list
def __sref_has_info(self, source_ref):
"""
Determine if this source_ref has any useful information based on the
current mode.
"""
if ( self.mode & self.MODE_PAGE ) == self.MODE_PAGE:
if source_ref.get_page() != "":
return True
if ( self.mode & self.MODE_DATE ) == self.MODE_DATE:
date = source_ref.get_date_object()
if date is not None and not date.is_empty():
return True
if ( self.mode & self.MODE_CONF ) == self.MODE_CONF:
confidence = source_ref.get_confidence_level()
if confidence is not None and confidence != \
lib_Citation.CONF_NORMAL:
return True
if ( self.mode & self.MODE_NOTE ) == self.MODE_NOTE:
if len(source_ref.get_note_list()) != 0:
return True
if ( self.mode & self.MODE_MEDIA ) == self.MODE_MEDIA:
if len(source_ref.get_media_list()) != 0:
return True
# Can't find anything interesting.
return False
def __srefs_are_equal(self, source_ref1, source_ref2):
"""
Determine if two source references are equal based on the
current mode.
"""
# The criterion for equality (in mode==MODE_ALL) is changed for
# citations. Previously, it was based on is_equal from SecondaryObject,
# which does a 'cmp' on the serialised data. (Note that this might not
# have worked properly for Dates; see comments in Date.is_equal and
# EditCitation.data_has_changed). The comparison is now made as to
# whether the two gen.lib.Citations have the same handle (i.e. they are
# actually the same database objects). It is felt that this better
# reflects the intent of Citation objects, which can be merged if they
# are intended to represent the same citation.
if self.mode == self.MODE_ALL:
return source_ref1.handle == source_ref2.handle
if ( self.mode & self.MODE_PAGE ) == self.MODE_PAGE:
if source_ref1.get_page() != source_ref2.get_page():
return False
if ( self.mode & self.MODE_DATE ) == self.MODE_DATE:
date1 = source_ref1.get_date_object()
date2 = source_ref2.get_date_object()
if not date1.is_equal(date2):
return False
if ( self.mode & self.MODE_CONF ) == self.MODE_CONF:
conf1 = source_ref1.get_confidence_level()
conf2 = source_ref2.get_confidence_level()
if conf1 != conf2:
return False
if ( self.mode & self.MODE_NOTE ) == self.MODE_NOTE:
nl1 = source_ref1.get_note_list()
nl2 = source_ref2.get_note_list()
if len(nl1) != len(nl2):
return False
for notehandle in nl1:
if notehandle not in nl2:
return False
if ( self.mode & self.MODE_MEDIA ) == self.MODE_MEDIA:
nl1 = source_ref1.get_media_list()
nl2 = source_ref2.get_media_list()
if len(nl1) != len(nl2):
return False
for mediahandle in nl1:
if mediahandle not in nl2:
return False
# Can't find anything different. They must be equal.
return True
| SNoiraud/gramps | gramps/gen/plug/report/_bibliography.py | Python | gpl-2.0 | 11,017 |
import re
from .utils import validator
@validator
def card_number(value):
"""
Return whether or not given value is a valid card number.
This validator is based on Luhn algorithm.
.. luhn:
https://github.com/mmcloughlin/luhn
Examples::
>>> card_number('4242424242424242')
True
>>> card_number('4242424242424241')
ValidationFailure(func=card_number, args={'value': '4242424242424241'})
.. versionadded:: 0.15.0
:param value: card number string to validate
"""
try:
digits = list(map(int, value))
odd_sum = sum(digits[-1::-2])
even_sum = sum([sum(divmod(2 * d, 10)) for d in digits[-2::-2]])
return (odd_sum + even_sum) % 10 == 0
except ValueError:
return False
@validator
def visa(value):
"""
Return whether or not given value is a valid Visa card number.
Examples::
>>> visa('4242424242424242')
True
>>> visa('2223003122003222')
ValidationFailure(func=visa, args={'value': '2223003122003222'})
.. versionadded:: 0.15.0
:param value: Visa card number string to validate
"""
pattern = re.compile(r'^4')
return card_number(value) and len(value) == 16 and pattern.match(value)
@validator
def mastercard(value):
"""
Return whether or not given value is a valid Mastercard card number.
Examples::
>>> mastercard('5555555555554444')
True
>>> mastercard('4242424242424242')
ValidationFailure(func=mastercard, args={'value': '4242424242424242'})
.. versionadded:: 0.15.0
:param value: Mastercard card number string to validate
"""
pattern = re.compile(r'^(51|52|53|54|55|22|23|24|25|26|27)')
return card_number(value) and len(value) == 16 and pattern.match(value)
@validator
def amex(value):
"""
Return whether or not given value is a valid American Express card number.
Examples::
>>> amex('378282246310005')
True
>>> amex('4242424242424242')
ValidationFailure(func=amex, args={'value': '4242424242424242'})
.. versionadded:: 0.15.0
:param value: American Express card number string to validate
"""
pattern = re.compile(r'^(34|37)')
return card_number(value) and len(value) == 15 and pattern.match(value)
@validator
def unionpay(value):
"""
Return whether or not given value is a valid UnionPay card number.
Examples::
>>> unionpay('6200000000000005')
True
>>> unionpay('4242424242424242')
ValidationFailure(func=unionpay, args={'value': '4242424242424242'})
.. versionadded:: 0.15.0
:param value: UnionPay card number string to validate
"""
pattern = re.compile(r'^62')
return card_number(value) and len(value) == 16 and pattern.match(value)
@validator
def diners(value):
"""
Return whether or not given value is a valid Diners Club card number.
Examples::
>>> diners('3056930009020004')
True
>>> diners('4242424242424242')
ValidationFailure(func=diners, args={'value': '4242424242424242'})
.. versionadded:: 0.15.0
:param value: Diners Club card number string to validate
"""
pattern = re.compile(r'^(30|36|38|39)')
return (
card_number(value) and len(value) in [14, 16] and pattern.match(value)
)
@validator
def jcb(value):
"""
Return whether or not given value is a valid JCB card number.
Examples::
>>> jcb('3566002020360505')
True
>>> jcb('4242424242424242')
ValidationFailure(func=jcb, args={'value': '4242424242424242'})
.. versionadded:: 0.15.0
:param value: JCB card number string to validate
"""
pattern = re.compile(r'^35')
return card_number(value) and len(value) == 16 and pattern.match(value)
@validator
def discover(value):
"""
Return whether or not given value is a valid Discover card number.
Examples::
>>> discover('6011111111111117')
True
>>> discover('4242424242424242')
ValidationFailure(func=discover, args={'value': '4242424242424242'})
.. versionadded:: 0.15.0
:param value: Discover card number string to validate
"""
pattern = re.compile(r'^(60|64|65)')
return card_number(value) and len(value) == 16 and pattern.match(value)
| kvesteri/validators | validators/card.py | Python | mit | 4,383 |
from autoprotocol.util import make_dottable_dict
from autoprotocol import Unit
def glycerol_storage(protocol,params):
params = make_dottable_dict(params)
for g in params["samples"]:
container = protocol.ref(g["label"], cont_type="micro-1.5", storage = "cold_80")
protocol.provision("rs17rrhqpsxyh2", container.well(0), "500:microliter")
protocol.transfer(g["sample"], container.well(0), "500:microliter", mix_after = True)
if __name__ == '__main__':
from autoprotocol.harness import run
run(glycerol_storage, 'GlycerolStorage')
| bmiles/transcriptic_public_protocols | miles_public_protocols/glycerol_storage.py | Python | mit | 570 |
import logging
import requests
from flask import redirect, url_for, Blueprint, flash, request, session
from flask_login import login_user
from flask_oauthlib.client import OAuth
from sqlalchemy.orm.exc import NoResultFound
from redash import models, settings
from redash.authentication.org_resolving import current_org
logger = logging.getLogger('google_oauth')
oauth = OAuth()
blueprint = Blueprint('google_oauth', __name__)
def google_remote_app():
if 'google' not in oauth.remote_apps:
oauth.remote_app('google',
base_url='https://www.google.com/accounts/',
authorize_url='https://accounts.google.com/o/oauth2/auth',
request_token_url=None,
request_token_params={
'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile',
},
access_token_url='https://accounts.google.com/o/oauth2/token',
access_token_method='POST',
consumer_key=settings.GOOGLE_CLIENT_ID,
consumer_secret=settings.GOOGLE_CLIENT_SECRET)
return oauth.google
def get_user_profile(access_token):
headers = {'Authorization': 'OAuth {}'.format(access_token)}
response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers=headers)
if response.status_code == 401:
logger.warning("Failed getting user profile (response code 401).")
return None
return response.json()
def verify_profile(org, profile):
if org.is_public:
return True
email = profile['email']
domain = email.split('@')[-1]
if domain in org.google_apps_domains:
return True
if org.has_user(email) == 1:
return True
return False
def create_and_login_user(org, name, email):
try:
user_object = models.User.get_by_email_and_org(email, org)
if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name
models.db.session.commit()
except NoResultFound:
logger.debug("Creating user object (%r)", name)
user_object = models.User(org=org, name=name, email=email, group_ids=[org.default_group.id])
models.db.session.add(user_object)
login_user(user_object, remember=True)
return user_object
@blueprint.route('/<org_slug>/oauth/google', endpoint="authorize_org")
def org_login(org_slug):
session['org_slug'] = current_org.slug
return redirect(url_for(".authorize", next=request.args.get('next', None)))
@blueprint.route('/oauth/google', endpoint="authorize")
def login():
callback = url_for('.callback', _external=True)
next = request.args.get('next', url_for("redash.index", org_slug=session.get('org_slug')))
logger.debug("Callback url: %s", callback)
logger.debug("Next is: %s", next)
return google_remote_app().authorize(callback=callback, state=next)
@blueprint.route('/oauth/google_callback', endpoint="callback")
def authorized():
resp = google_remote_app().authorized_response()
access_token = resp['access_token']
if access_token is None:
logger.warning("Access token missing in call back request.")
flash("Validation error. Please retry.")
return redirect(url_for('redash.login'))
profile = get_user_profile(access_token)
if profile is None:
flash("Validation error. Please retry.")
return redirect(url_for('redash.login'))
if 'org_slug' in session:
org = models.Organization.get_by_slug(session.pop('org_slug'))
else:
org = current_org
if not verify_profile(org, profile):
logger.warning("User tried to login with unauthorized domain name: %s (org: %s)", profile['email'], org)
flash("Your Google Apps account ({}) isn't allowed.".format(profile['email']))
return redirect(url_for('redash.login', org_slug=org.slug))
create_and_login_user(org, profile['name'], profile['email'])
next = request.args.get('state') or url_for("redash.index", org_slug=org.slug)
return redirect(next)
| stefanseifert/redash | redash/authentication/google_oauth.py | Python | bsd-2-clause | 4,261 |
"""Configure number in a device through MQTT topic."""
from __future__ import annotations
import functools
import logging
import voluptuous as vol
from homeassistant.components import number
from homeassistant.components.number import (
DEFAULT_MAX_VALUE,
DEFAULT_MIN_VALUE,
DEFAULT_STEP,
NumberEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_NAME,
CONF_OPTIMISTIC,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import PLATFORMS, MqttCommandTemplate, MqttValueTemplate, subscription
from .. import mqtt
from .const import (
CONF_COMMAND_TOPIC,
CONF_ENCODING,
CONF_QOS,
CONF_RETAIN,
CONF_STATE_TOPIC,
DOMAIN,
)
from .debug_info import log_messages
from .mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity, async_setup_entry_helper
CONF_COMMAND_TEMPLATE = "command_template"
_LOGGER = logging.getLogger(__name__)
CONF_MIN = "min"
CONF_MAX = "max"
CONF_PAYLOAD_RESET = "payload_reset"
CONF_STEP = "step"
DEFAULT_NAME = "MQTT Number"
DEFAULT_OPTIMISTIC = False
DEFAULT_PAYLOAD_RESET = "None"
MQTT_NUMBER_ATTRIBUTES_BLOCKED = frozenset(
{
number.ATTR_MAX,
number.ATTR_MIN,
number.ATTR_STEP,
}
)
def validate_config(config):
"""Validate that the configuration is valid, throws if it isn't."""
if config.get(CONF_MIN) >= config.get(CONF_MAX):
raise vol.Invalid(f"'{CONF_MAX}' must be > '{CONF_MIN}'")
return config
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
vol.Optional(CONF_MAX, default=DEFAULT_MAX_VALUE): vol.Coerce(float),
vol.Optional(CONF_MIN, default=DEFAULT_MIN_VALUE): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_PAYLOAD_RESET, default=DEFAULT_PAYLOAD_RESET): cv.string,
vol.Optional(CONF_STEP, default=DEFAULT_STEP): vol.All(
vol.Coerce(float), vol.Range(min=1e-3)
),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
},
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
PLATFORM_SCHEMA = vol.All(
_PLATFORM_SCHEMA_BASE,
validate_config,
)
DISCOVERY_SCHEMA = vol.All(
_PLATFORM_SCHEMA_BASE.extend({}, extra=vol.REMOVE_EXTRA),
validate_config,
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up MQTT number through configuration.yaml."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, async_add_entities, config)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up MQTT number dynamically through MQTT discovery."""
setup = functools.partial(
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
)
await async_setup_entry_helper(hass, number.DOMAIN, setup, DISCOVERY_SCHEMA)
async def _async_setup_entity(
hass, async_add_entities, config, config_entry=None, discovery_data=None
):
"""Set up the MQTT number."""
async_add_entities([MqttNumber(hass, config, config_entry, discovery_data)])
class MqttNumber(MqttEntity, NumberEntity, RestoreEntity):
"""representation of an MQTT number."""
_entity_id_format = number.ENTITY_ID_FORMAT
_attributes_extra_blocked = MQTT_NUMBER_ATTRIBUTES_BLOCKED
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize the MQTT Number."""
self._config = config
self._optimistic = False
self._sub_state = None
self._current_number = None
NumberEntity.__init__(self)
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
@staticmethod
def config_schema():
"""Return the config schema."""
return DISCOVERY_SCHEMA
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._optimistic = config[CONF_OPTIMISTIC]
self._templates = {
CONF_COMMAND_TEMPLATE: MqttCommandTemplate(
config.get(CONF_COMMAND_TEMPLATE), entity=self
).async_render,
CONF_VALUE_TEMPLATE: MqttValueTemplate(
config.get(CONF_VALUE_TEMPLATE),
entity=self,
).async_render_with_possible_json_value,
}
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
@callback
@log_messages(self.hass, self.entity_id)
def message_received(msg):
"""Handle new MQTT messages."""
payload = self._templates[CONF_VALUE_TEMPLATE](msg.payload)
try:
if payload == self._config[CONF_PAYLOAD_RESET]:
num_value = None
elif payload.isnumeric():
num_value = int(payload)
else:
num_value = float(payload)
except ValueError:
_LOGGER.warning("Payload '%s' is not a Number", msg.payload)
return
if num_value is not None and (
num_value < self.min_value or num_value > self.max_value
):
_LOGGER.error(
"Invalid value for %s: %s (range %s - %s)",
self.entity_id,
num_value,
self.min_value,
self.max_value,
)
return
self._current_number = num_value
self.async_write_ha_state()
if self._config.get(CONF_STATE_TOPIC) is None:
# Force into optimistic mode.
self._optimistic = True
else:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._config.get(CONF_STATE_TOPIC),
"msg_callback": message_received,
"qos": self._config[CONF_QOS],
"encoding": self._config[CONF_ENCODING] or None,
}
},
)
if self._optimistic and (last_state := await self.async_get_last_state()):
self._current_number = last_state.state
@property
def min_value(self) -> float:
"""Return the minimum value."""
return self._config[CONF_MIN]
@property
def max_value(self) -> float:
"""Return the maximum value."""
return self._config[CONF_MAX]
@property
def step(self) -> float:
"""Return the increment/decrement step."""
return self._config[CONF_STEP]
@property
def unit_of_measurement(self) -> str | None:
"""Return the unit of measurement."""
return self._config.get(CONF_UNIT_OF_MEASUREMENT)
@property
def value(self):
"""Return the current value."""
return self._current_number
async def async_set_value(self, value: float) -> None:
"""Update the current value."""
current_number = value
if value.is_integer():
current_number = int(value)
payload = self._templates[CONF_COMMAND_TEMPLATE](current_number)
if self._optimistic:
self._current_number = current_number
self.async_write_ha_state()
await mqtt.async_publish(
self.hass,
self._config[CONF_COMMAND_TOPIC],
payload,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
self._config[CONF_ENCODING],
)
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
| mezz64/home-assistant | homeassistant/components/mqtt/number.py | Python | apache-2.0 | 8,495 |
"""
test setting *parts* of objects both positionally and label based
TOD: these should be split among the indexer tests
"""
import pytest
from warnings import catch_warnings
import numpy as np
import pandas as pd
from pandas import Series, DataFrame, Panel, Index, date_range
from pandas.util import testing as tm
class TestPartialSetting(object):
def test_partial_setting(self):
# GH2578, allow ix and friends to partially set
# series
s_orig = Series([1, 2, 3])
s = s_orig.copy()
s[5] = 5
expected = Series([1, 2, 3, 5], index=[0, 1, 2, 5])
tm.assert_series_equal(s, expected)
s = s_orig.copy()
s.loc[5] = 5
expected = Series([1, 2, 3, 5], index=[0, 1, 2, 5])
tm.assert_series_equal(s, expected)
s = s_orig.copy()
s[5] = 5.
expected = Series([1, 2, 3, 5.], index=[0, 1, 2, 5])
tm.assert_series_equal(s, expected)
s = s_orig.copy()
s.loc[5] = 5.
expected = Series([1, 2, 3, 5.], index=[0, 1, 2, 5])
tm.assert_series_equal(s, expected)
# iloc/iat raise
s = s_orig.copy()
def f():
s.iloc[3] = 5.
pytest.raises(IndexError, f)
def f():
s.iat[3] = 5.
pytest.raises(IndexError, f)
# ## frame ##
df_orig = DataFrame(
np.arange(6).reshape(3, 2), columns=['A', 'B'], dtype='int64')
# iloc/iat raise
df = df_orig.copy()
def f():
df.iloc[4, 2] = 5.
pytest.raises(IndexError, f)
def f():
df.iat[4, 2] = 5.
pytest.raises(IndexError, f)
# row setting where it exists
expected = DataFrame(dict({'A': [0, 4, 4], 'B': [1, 5, 5]}))
df = df_orig.copy()
df.iloc[1] = df.iloc[2]
tm.assert_frame_equal(df, expected)
expected = DataFrame(dict({'A': [0, 4, 4], 'B': [1, 5, 5]}))
df = df_orig.copy()
df.loc[1] = df.loc[2]
tm.assert_frame_equal(df, expected)
# like 2578, partial setting with dtype preservation
expected = DataFrame(dict({'A': [0, 2, 4, 4], 'B': [1, 3, 5, 5]}))
df = df_orig.copy()
df.loc[3] = df.loc[2]
tm.assert_frame_equal(df, expected)
# single dtype frame, overwrite
expected = DataFrame(dict({'A': [0, 2, 4], 'B': [0, 2, 4]}))
df = df_orig.copy()
with catch_warnings(record=True):
df.ix[:, 'B'] = df.ix[:, 'A']
tm.assert_frame_equal(df, expected)
# mixed dtype frame, overwrite
expected = DataFrame(dict({'A': [0, 2, 4], 'B': Series([0, 2, 4])}))
df = df_orig.copy()
df['B'] = df['B'].astype(np.float64)
with catch_warnings(record=True):
df.ix[:, 'B'] = df.ix[:, 'A']
tm.assert_frame_equal(df, expected)
# single dtype frame, partial setting
expected = df_orig.copy()
expected['C'] = df['A']
df = df_orig.copy()
with catch_warnings(record=True):
df.ix[:, 'C'] = df.ix[:, 'A']
tm.assert_frame_equal(df, expected)
# mixed frame, partial setting
expected = df_orig.copy()
expected['C'] = df['A']
df = df_orig.copy()
with catch_warnings(record=True):
df.ix[:, 'C'] = df.ix[:, 'A']
tm.assert_frame_equal(df, expected)
with catch_warnings(record=True):
# ## panel ##
p_orig = Panel(np.arange(16).reshape(2, 4, 2),
items=['Item1', 'Item2'],
major_axis=pd.date_range('2001/1/12', periods=4),
minor_axis=['A', 'B'], dtype='float64')
# panel setting via item
p_orig = Panel(np.arange(16).reshape(2, 4, 2),
items=['Item1', 'Item2'],
major_axis=pd.date_range('2001/1/12', periods=4),
minor_axis=['A', 'B'], dtype='float64')
expected = p_orig.copy()
expected['Item3'] = expected['Item1']
p = p_orig.copy()
p.loc['Item3'] = p['Item1']
tm.assert_panel_equal(p, expected)
# panel with aligned series
expected = p_orig.copy()
expected = expected.transpose(2, 1, 0)
expected['C'] = DataFrame({'Item1': [30, 30, 30, 30],
'Item2': [32, 32, 32, 32]},
index=p_orig.major_axis)
expected = expected.transpose(2, 1, 0)
p = p_orig.copy()
p.loc[:, :, 'C'] = Series([30, 32], index=p_orig.items)
tm.assert_panel_equal(p, expected)
# GH 8473
dates = date_range('1/1/2000', periods=8)
df_orig = DataFrame(np.random.randn(8, 4), index=dates,
columns=['A', 'B', 'C', 'D'])
expected = pd.concat([df_orig, DataFrame(
{'A': 7}, index=[dates[-1] + 1])])
df = df_orig.copy()
df.loc[dates[-1] + 1, 'A'] = 7
tm.assert_frame_equal(df, expected)
df = df_orig.copy()
df.at[dates[-1] + 1, 'A'] = 7
tm.assert_frame_equal(df, expected)
exp_other = DataFrame({0: 7}, index=[dates[-1] + 1])
expected = pd.concat([df_orig, exp_other], axis=1)
df = df_orig.copy()
df.loc[dates[-1] + 1, 0] = 7
tm.assert_frame_equal(df, expected)
df = df_orig.copy()
df.at[dates[-1] + 1, 0] = 7
tm.assert_frame_equal(df, expected)
def test_partial_setting_mixed_dtype(self):
# in a mixed dtype environment, try to preserve dtypes
# by appending
df = DataFrame([[True, 1], [False, 2]], columns=["female", "fitness"])
s = df.loc[1].copy()
s.name = 2
expected = df.append(s)
df.loc[2] = df.loc[1]
tm.assert_frame_equal(df, expected)
# columns will align
df = DataFrame(columns=['A', 'B'])
df.loc[0] = Series(1, index=range(4))
tm.assert_frame_equal(df, DataFrame(columns=['A', 'B'], index=[0]))
# columns will align
df = DataFrame(columns=['A', 'B'])
df.loc[0] = Series(1, index=['B'])
exp = DataFrame([[np.nan, 1]], columns=['A', 'B'],
index=[0], dtype='float64')
tm.assert_frame_equal(df, exp)
# list-like must conform
df = DataFrame(columns=['A', 'B'])
def f():
df.loc[0] = [1, 2, 3]
pytest.raises(ValueError, f)
# TODO: #15657, these are left as object and not coerced
df = DataFrame(columns=['A', 'B'])
df.loc[3] = [6, 7]
exp = DataFrame([[6, 7]], index=[3], columns=['A', 'B'],
dtype='object')
tm.assert_frame_equal(df, exp)
def test_series_partial_set(self):
# partial set with new index
# Regression from GH4825
ser = Series([0.1, 0.2], index=[1, 2])
# loc equiv to .reindex
expected = Series([np.nan, 0.2, np.nan], index=[3, 2, 3])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = ser.loc[[3, 2, 3]]
tm.assert_series_equal(result, expected, check_index_type=True)
result = ser.reindex([3, 2, 3])
tm.assert_series_equal(result, expected, check_index_type=True)
expected = Series([np.nan, 0.2, np.nan, np.nan], index=[3, 2, 3, 'x'])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = ser.loc[[3, 2, 3, 'x']]
tm.assert_series_equal(result, expected, check_index_type=True)
result = ser.reindex([3, 2, 3, 'x'])
tm.assert_series_equal(result, expected, check_index_type=True)
expected = Series([0.2, 0.2, 0.1], index=[2, 2, 1])
result = ser.loc[[2, 2, 1]]
tm.assert_series_equal(result, expected, check_index_type=True)
expected = Series([0.2, 0.2, np.nan, 0.1], index=[2, 2, 'x', 1])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = ser.loc[[2, 2, 'x', 1]]
tm.assert_series_equal(result, expected, check_index_type=True)
result = ser.reindex([2, 2, 'x', 1])
tm.assert_series_equal(result, expected, check_index_type=True)
# raises as nothing in in the index
pytest.raises(KeyError, lambda: ser.loc[[3, 3, 3]])
expected = Series([0.2, 0.2, np.nan], index=[2, 2, 3])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = ser.loc[[2, 2, 3]]
tm.assert_series_equal(result, expected, check_index_type=True)
result = ser.reindex([2, 2, 3])
tm.assert_series_equal(result, expected, check_index_type=True)
s = Series([0.1, 0.2, 0.3], index=[1, 2, 3])
expected = Series([0.3, np.nan, np.nan], index=[3, 4, 4])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = s.loc[[3, 4, 4]]
tm.assert_series_equal(result, expected, check_index_type=True)
result = s.reindex([3, 4, 4])
tm.assert_series_equal(result, expected, check_index_type=True)
s = Series([0.1, 0.2, 0.3, 0.4],
index=[1, 2, 3, 4])
expected = Series([np.nan, 0.3, 0.3], index=[5, 3, 3])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = s.loc[[5, 3, 3]]
tm.assert_series_equal(result, expected, check_index_type=True)
result = s.reindex([5, 3, 3])
tm.assert_series_equal(result, expected, check_index_type=True)
s = Series([0.1, 0.2, 0.3, 0.4],
index=[1, 2, 3, 4])
expected = Series([np.nan, 0.4, 0.4], index=[5, 4, 4])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = s.loc[[5, 4, 4]]
tm.assert_series_equal(result, expected, check_index_type=True)
result = s.reindex([5, 4, 4])
tm.assert_series_equal(result, expected, check_index_type=True)
s = Series([0.1, 0.2, 0.3, 0.4],
index=[4, 5, 6, 7])
expected = Series([0.4, np.nan, np.nan], index=[7, 2, 2])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = s.loc[[7, 2, 2]]
tm.assert_series_equal(result, expected, check_index_type=True)
result = s.reindex([7, 2, 2])
tm.assert_series_equal(result, expected, check_index_type=True)
s = Series([0.1, 0.2, 0.3, 0.4],
index=[1, 2, 3, 4])
expected = Series([0.4, np.nan, np.nan], index=[4, 5, 5])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = s.loc[[4, 5, 5]]
tm.assert_series_equal(result, expected, check_index_type=True)
result = s.reindex([4, 5, 5])
tm.assert_series_equal(result, expected, check_index_type=True)
# iloc
expected = Series([0.2, 0.2, 0.1, 0.1], index=[2, 2, 1, 1])
result = ser.iloc[[1, 1, 0, 0]]
tm.assert_series_equal(result, expected, check_index_type=True)
def test_series_partial_set_with_name(self):
# GH 11497
idx = Index([1, 2], dtype='int64', name='idx')
ser = Series([0.1, 0.2], index=idx, name='s')
# loc
exp_idx = Index([3, 2, 3], dtype='int64', name='idx')
expected = Series([np.nan, 0.2, np.nan], index=exp_idx, name='s')
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = ser.loc[[3, 2, 3]]
tm.assert_series_equal(result, expected, check_index_type=True)
exp_idx = Index([3, 2, 3, 'x'], dtype='object', name='idx')
expected = Series([np.nan, 0.2, np.nan, np.nan], index=exp_idx,
name='s')
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = ser.loc[[3, 2, 3, 'x']]
tm.assert_series_equal(result, expected, check_index_type=True)
exp_idx = Index([2, 2, 1], dtype='int64', name='idx')
expected = Series([0.2, 0.2, 0.1], index=exp_idx, name='s')
result = ser.loc[[2, 2, 1]]
tm.assert_series_equal(result, expected, check_index_type=True)
exp_idx = Index([2, 2, 'x', 1], dtype='object', name='idx')
expected = Series([0.2, 0.2, np.nan, 0.1], index=exp_idx, name='s')
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = ser.loc[[2, 2, 'x', 1]]
tm.assert_series_equal(result, expected, check_index_type=True)
# raises as nothing in in the index
pytest.raises(KeyError, lambda: ser.loc[[3, 3, 3]])
exp_idx = Index([2, 2, 3], dtype='int64', name='idx')
expected = Series([0.2, 0.2, np.nan], index=exp_idx, name='s')
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = ser.loc[[2, 2, 3]]
tm.assert_series_equal(result, expected, check_index_type=True)
exp_idx = Index([3, 4, 4], dtype='int64', name='idx')
expected = Series([0.3, np.nan, np.nan], index=exp_idx, name='s')
idx = Index([1, 2, 3], dtype='int64', name='idx')
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = Series([0.1, 0.2, 0.3],
index=idx,
name='s').loc[[3, 4, 4]]
tm.assert_series_equal(result, expected, check_index_type=True)
exp_idx = Index([5, 3, 3], dtype='int64', name='idx')
expected = Series([np.nan, 0.3, 0.3], index=exp_idx, name='s')
idx = Index([1, 2, 3, 4], dtype='int64', name='idx')
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = Series([0.1, 0.2, 0.3, 0.4], index=idx,
name='s').loc[[5, 3, 3]]
tm.assert_series_equal(result, expected, check_index_type=True)
exp_idx = Index([5, 4, 4], dtype='int64', name='idx')
expected = Series([np.nan, 0.4, 0.4], index=exp_idx, name='s')
idx = Index([1, 2, 3, 4], dtype='int64', name='idx')
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = Series([0.1, 0.2, 0.3, 0.4], index=idx,
name='s').loc[[5, 4, 4]]
tm.assert_series_equal(result, expected, check_index_type=True)
exp_idx = Index([7, 2, 2], dtype='int64', name='idx')
expected = Series([0.4, np.nan, np.nan], index=exp_idx, name='s')
idx = Index([4, 5, 6, 7], dtype='int64', name='idx')
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = Series([0.1, 0.2, 0.3, 0.4], index=idx,
name='s').loc[[7, 2, 2]]
tm.assert_series_equal(result, expected, check_index_type=True)
exp_idx = Index([4, 5, 5], dtype='int64', name='idx')
expected = Series([0.4, np.nan, np.nan], index=exp_idx, name='s')
idx = Index([1, 2, 3, 4], dtype='int64', name='idx')
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = Series([0.1, 0.2, 0.3, 0.4], index=idx,
name='s').loc[[4, 5, 5]]
tm.assert_series_equal(result, expected, check_index_type=True)
# iloc
exp_idx = Index([2, 2, 1, 1], dtype='int64', name='idx')
expected = Series([0.2, 0.2, 0.1, 0.1], index=exp_idx, name='s')
result = ser.iloc[[1, 1, 0, 0]]
tm.assert_series_equal(result, expected, check_index_type=True)
def test_partial_set_invalid(self):
# GH 4940
# allow only setting of 'valid' values
orig = tm.makeTimeDataFrame()
df = orig.copy()
# don't allow not string inserts
def f():
with catch_warnings(record=True):
df.loc[100.0, :] = df.ix[0]
pytest.raises(TypeError, f)
def f():
with catch_warnings(record=True):
df.loc[100, :] = df.ix[0]
pytest.raises(TypeError, f)
def f():
with catch_warnings(record=True):
df.ix[100.0, :] = df.ix[0]
pytest.raises(TypeError, f)
def f():
with catch_warnings(record=True):
df.ix[100, :] = df.ix[0]
pytest.raises(ValueError, f)
# allow object conversion here
df = orig.copy()
with catch_warnings(record=True):
df.loc['a', :] = df.ix[0]
exp = orig.append(pd.Series(df.ix[0], name='a'))
tm.assert_frame_equal(df, exp)
tm.assert_index_equal(df.index,
pd.Index(orig.index.tolist() + ['a']))
assert df.index.dtype == 'object'
def test_partial_set_empty_series(self):
# GH5226
# partially set with an empty object series
s = Series()
s.loc[1] = 1
tm.assert_series_equal(s, Series([1], index=[1]))
s.loc[3] = 3
tm.assert_series_equal(s, Series([1, 3], index=[1, 3]))
s = Series()
s.loc[1] = 1.
tm.assert_series_equal(s, Series([1.], index=[1]))
s.loc[3] = 3.
tm.assert_series_equal(s, Series([1., 3.], index=[1, 3]))
s = Series()
s.loc['foo'] = 1
tm.assert_series_equal(s, Series([1], index=['foo']))
s.loc['bar'] = 3
tm.assert_series_equal(s, Series([1, 3], index=['foo', 'bar']))
s.loc[3] = 4
tm.assert_series_equal(s, Series([1, 3, 4], index=['foo', 'bar', 3]))
def test_partial_set_empty_frame(self):
# partially set with an empty object
# frame
df = DataFrame()
def f():
df.loc[1] = 1
pytest.raises(ValueError, f)
def f():
df.loc[1] = Series([1], index=['foo'])
pytest.raises(ValueError, f)
def f():
df.loc[:, 1] = 1
pytest.raises(ValueError, f)
# these work as they don't really change
# anything but the index
# GH5632
expected = DataFrame(columns=['foo'], index=pd.Index(
[], dtype='int64'))
def f():
df = DataFrame()
df['foo'] = Series([], dtype='object')
return df
tm.assert_frame_equal(f(), expected)
def f():
df = DataFrame()
df['foo'] = Series(df.index)
return df
tm.assert_frame_equal(f(), expected)
def f():
df = DataFrame()
df['foo'] = df.index
return df
tm.assert_frame_equal(f(), expected)
expected = DataFrame(columns=['foo'],
index=pd.Index([], dtype='int64'))
expected['foo'] = expected['foo'].astype('float64')
def f():
df = DataFrame()
df['foo'] = []
return df
tm.assert_frame_equal(f(), expected)
def f():
df = DataFrame()
df['foo'] = Series(np.arange(len(df)), dtype='float64')
return df
tm.assert_frame_equal(f(), expected)
def f():
df = DataFrame()
tm.assert_index_equal(df.index, pd.Index([], dtype='object'))
df['foo'] = range(len(df))
return df
expected = DataFrame(columns=['foo'],
index=pd.Index([], dtype='int64'))
expected['foo'] = expected['foo'].astype('float64')
tm.assert_frame_equal(f(), expected)
df = DataFrame()
tm.assert_index_equal(df.columns, pd.Index([], dtype=object))
df2 = DataFrame()
df2[1] = Series([1], index=['foo'])
df.loc[:, 1] = Series([1], index=['foo'])
tm.assert_frame_equal(df, DataFrame([[1]], index=['foo'], columns=[1]))
tm.assert_frame_equal(df, df2)
# no index to start
expected = DataFrame({0: Series(1, index=range(4))},
columns=['A', 'B', 0])
df = DataFrame(columns=['A', 'B'])
df[0] = Series(1, index=range(4))
df.dtypes
str(df)
tm.assert_frame_equal(df, expected)
df = DataFrame(columns=['A', 'B'])
df.loc[:, 0] = Series(1, index=range(4))
df.dtypes
str(df)
tm.assert_frame_equal(df, expected)
def test_partial_set_empty_frame_row(self):
# GH5720, GH5744
# don't create rows when empty
expected = DataFrame(columns=['A', 'B', 'New'],
index=pd.Index([], dtype='int64'))
expected['A'] = expected['A'].astype('int64')
expected['B'] = expected['B'].astype('float64')
expected['New'] = expected['New'].astype('float64')
df = DataFrame({"A": [1, 2, 3], "B": [1.2, 4.2, 5.2]})
y = df[df.A > 5]
y['New'] = np.nan
tm.assert_frame_equal(y, expected)
# tm.assert_frame_equal(y,expected)
expected = DataFrame(columns=['a', 'b', 'c c', 'd'])
expected['d'] = expected['d'].astype('int64')
df = DataFrame(columns=['a', 'b', 'c c'])
df['d'] = 3
tm.assert_frame_equal(df, expected)
tm.assert_series_equal(df['c c'], Series(name='c c', dtype=object))
# reindex columns is ok
df = DataFrame({"A": [1, 2, 3], "B": [1.2, 4.2, 5.2]})
y = df[df.A > 5]
result = y.reindex(columns=['A', 'B', 'C'])
expected = DataFrame(columns=['A', 'B', 'C'],
index=pd.Index([], dtype='int64'))
expected['A'] = expected['A'].astype('int64')
expected['B'] = expected['B'].astype('float64')
expected['C'] = expected['C'].astype('float64')
tm.assert_frame_equal(result, expected)
def test_partial_set_empty_frame_set_series(self):
# GH 5756
# setting with empty Series
df = DataFrame(Series())
tm.assert_frame_equal(df, DataFrame({0: Series()}))
df = DataFrame(Series(name='foo'))
tm.assert_frame_equal(df, DataFrame({'foo': Series()}))
def test_partial_set_empty_frame_empty_copy_assignment(self):
# GH 5932
# copy on empty with assignment fails
df = DataFrame(index=[0])
df = df.copy()
df['a'] = 0
expected = DataFrame(0, index=[0], columns=['a'])
tm.assert_frame_equal(df, expected)
def test_partial_set_empty_frame_empty_consistencies(self):
# GH 6171
# consistency on empty frames
df = DataFrame(columns=['x', 'y'])
df['x'] = [1, 2]
expected = DataFrame(dict(x=[1, 2], y=[np.nan, np.nan]))
tm.assert_frame_equal(df, expected, check_dtype=False)
df = DataFrame(columns=['x', 'y'])
df['x'] = ['1', '2']
expected = DataFrame(
dict(x=['1', '2'], y=[np.nan, np.nan]), dtype=object)
tm.assert_frame_equal(df, expected)
df = DataFrame(columns=['x', 'y'])
df.loc[0, 'x'] = 1
expected = DataFrame(dict(x=[1], y=[np.nan]))
tm.assert_frame_equal(df, expected, check_dtype=False)
| NixaSoftware/CVis | venv/lib/python2.7/site-packages/pandas/tests/indexing/test_partial.py | Python | apache-2.0 | 23,301 |
from django.conf import settings
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.contrib import admin
from test_app import views
from simple_review.views import post_review
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^detail-test/(?P<pk>\w+)$', views.InfoDetail.as_view(), name='info_detail'),
url(r'^post_review/$', post_review, name='post_review'),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) | RamezIssac/simple-review | src/tests/tests/urls.py | Python | mit | 493 |
#!/usr/bin/env python
import argparse
import os
from pathlib import Path
import yaml
import ray
from ray.tune.config_parser import make_parser
from ray.tune.progress_reporter import CLIReporter, JupyterNotebookReporter
from ray.tune.result import DEFAULT_RESULTS_DIR
from ray.tune.resources import resources_to_json
from ray.tune.tune import run_experiments
from ray.tune.schedulers import create_scheduler
from ray.rllib.utils.deprecation import deprecation_warning
from ray.rllib.utils.framework import try_import_tf, try_import_torch
try:
class_name = get_ipython().__class__.__name__
IS_NOTEBOOK = True if "Terminal" not in class_name else False
except NameError:
IS_NOTEBOOK = False
# Try to import both backends for flag checking/warnings.
tf1, tf, tfv = try_import_tf()
torch, _ = try_import_torch()
EXAMPLE_USAGE = """
Training example via RLlib CLI:
rllib train --run DQN --env CartPole-v0
Grid search example via RLlib CLI:
rllib train -f tuned_examples/cartpole-grid-search-example.yaml
Grid search example via executable:
./train.py -f tuned_examples/cartpole-grid-search-example.yaml
Note that -f overrides all other trial-specific command-line options.
"""
def create_parser(parser_creator=None):
parser = make_parser(
parser_creator=parser_creator,
formatter_class=argparse.RawDescriptionHelpFormatter,
description="Train a reinforcement learning agent.",
epilog=EXAMPLE_USAGE,
)
# See also the base parser definition in ray/tune/config_parser.py
parser.add_argument(
"--ray-address",
default=None,
type=str,
help="Connect to an existing Ray cluster at this address instead "
"of starting a new one.",
)
parser.add_argument(
"--ray-ui", action="store_true", help="Whether to enable the Ray web UI."
)
# Deprecated: Use --ray-ui, instead.
parser.add_argument(
"--no-ray-ui",
action="store_true",
help="Deprecated! Ray UI is disabled by default now. "
"Use `--ray-ui` to enable.",
)
parser.add_argument(
"--local-mode",
action="store_true",
help="Run ray in local mode for easier debugging.",
)
parser.add_argument(
"--ray-num-cpus",
default=None,
type=int,
help="--num-cpus to use if starting a new cluster.",
)
parser.add_argument(
"--ray-num-gpus",
default=None,
type=int,
help="--num-gpus to use if starting a new cluster.",
)
parser.add_argument(
"--ray-num-nodes",
default=None,
type=int,
help="Emulate multiple cluster nodes for debugging.",
)
parser.add_argument(
"--ray-object-store-memory",
default=None,
type=int,
help="--object-store-memory to use if starting a new cluster.",
)
parser.add_argument(
"--experiment-name",
default="default",
type=str,
help="Name of the subdirectory under `local_dir` to put results in.",
)
parser.add_argument(
"--local-dir",
default=DEFAULT_RESULTS_DIR,
type=str,
help="Local dir to save training results to. Defaults to '{}'.".format(
DEFAULT_RESULTS_DIR
),
)
parser.add_argument(
"--upload-dir",
default="",
type=str,
help="Optional URI to sync training results to (e.g. s3://bucket).",
)
# This will override any framework setting found in a yaml file.
parser.add_argument(
"--framework",
choices=["tf", "tf2", "tfe", "torch"],
default=None,
help="The DL framework specifier.",
)
parser.add_argument(
"-v", action="store_true", help="Whether to use INFO level logging."
)
parser.add_argument(
"-vv", action="store_true", help="Whether to use DEBUG level logging."
)
parser.add_argument(
"--resume",
action="store_true",
help="Whether to attempt to resume previous Tune experiments.",
)
parser.add_argument(
"--trace",
action="store_true",
help="Whether to attempt to enable tracing for eager mode.",
)
parser.add_argument(
"--env", default=None, type=str, help="The gym environment to use."
)
parser.add_argument(
"-f",
"--config-file",
default=None,
type=str,
help="If specified, use config options from this file. Note that this "
"overrides any trial-specific options set via flags above.",
)
# Obsolete: Use --framework=torch|tf2|tfe instead!
parser.add_argument(
"--torch",
action="store_true",
help="Whether to use PyTorch (instead of tf) as the DL framework.",
)
parser.add_argument(
"--eager",
action="store_true",
help="Whether to attempt to enable TF eager execution.",
)
return parser
def run(args, parser):
if args.config_file:
with open(args.config_file) as f:
experiments = yaml.safe_load(f)
else:
# Note: keep this in sync with tune/config_parser.py
experiments = {
args.experiment_name: { # i.e. log to ~/ray_results/default
"run": args.run,
"checkpoint_freq": args.checkpoint_freq,
"checkpoint_at_end": args.checkpoint_at_end,
"keep_checkpoints_num": args.keep_checkpoints_num,
"checkpoint_score_attr": args.checkpoint_score_attr,
"local_dir": args.local_dir,
"resources_per_trial": (
args.resources_per_trial
and resources_to_json(args.resources_per_trial)
),
"stop": args.stop,
"config": dict(args.config, env=args.env),
"restore": args.restore,
"num_samples": args.num_samples,
"sync_config": {
"upload_dir": args.upload_dir,
},
}
}
# Ray UI.
if args.no_ray_ui:
deprecation_warning(old="--no-ray-ui", new="--ray-ui", error=False)
args.ray_ui = False
verbose = 1
for exp in experiments.values():
# Bazel makes it hard to find files specified in `args` (and `data`).
# Look for them here.
# NOTE: Some of our yaml files don't have a `config` section.
input_ = exp.get("config", {}).get("input")
if input_ and input_ != "sampler":
# This script runs in the ray/rllib dir.
rllib_dir = Path(__file__).parent
def patch_path(path):
if isinstance(path, list):
return [patch_path(i) for i in path]
elif isinstance(path, dict):
return {patch_path(k): patch_path(v) for k, v in path.items()}
elif isinstance(path, str):
if os.path.exists(path):
return path
else:
abs_path = str(rllib_dir.absolute().joinpath(path))
return abs_path if os.path.exists(abs_path) else path
else:
return path
exp["config"]["input"] = patch_path(input_)
if not exp.get("run"):
parser.error("the following arguments are required: --run")
if not exp.get("env") and not exp.get("config", {}).get("env"):
parser.error("the following arguments are required: --env")
if args.torch:
deprecation_warning("--torch", "--framework=torch")
exp["config"]["framework"] = "torch"
elif args.eager:
deprecation_warning("--eager", "--framework=[tf2|tfe]")
exp["config"]["framework"] = "tfe"
elif args.framework is not None:
exp["config"]["framework"] = args.framework
if args.trace:
if exp["config"]["framework"] not in ["tf2", "tfe"]:
raise ValueError("Must enable --eager to enable tracing.")
exp["config"]["eager_tracing"] = True
if args.v:
exp["config"]["log_level"] = "INFO"
verbose = 3 # Print details on trial result
if args.vv:
exp["config"]["log_level"] = "DEBUG"
verbose = 3 # Print details on trial result
if args.ray_num_nodes:
# Import this only here so that train.py also works with
# older versions (and user doesn't use `--ray-num-nodes`).
from ray.cluster_utils import Cluster
cluster = Cluster()
for _ in range(args.ray_num_nodes):
cluster.add_node(
num_cpus=args.ray_num_cpus or 1,
num_gpus=args.ray_num_gpus or 0,
object_store_memory=args.ray_object_store_memory,
)
ray.init(address=cluster.address)
else:
ray.init(
include_dashboard=args.ray_ui,
address=args.ray_address,
object_store_memory=args.ray_object_store_memory,
num_cpus=args.ray_num_cpus,
num_gpus=args.ray_num_gpus,
local_mode=args.local_mode,
)
if IS_NOTEBOOK:
progress_reporter = JupyterNotebookReporter(
overwrite=verbose >= 3, print_intermediate_tables=verbose >= 1
)
else:
progress_reporter = CLIReporter(print_intermediate_tables=verbose >= 1)
run_experiments(
experiments,
scheduler=create_scheduler(args.scheduler, **args.scheduler_config),
resume=args.resume,
verbose=verbose,
progress_reporter=progress_reporter,
concurrent=True,
)
ray.shutdown()
def main():
parser = create_parser()
args = parser.parse_args()
run(args, parser)
if __name__ == "__main__":
main()
| ray-project/ray | rllib/train.py | Python | apache-2.0 | 9,901 |
import unittest
from emoji_data_python import EmojiChar
class EmojiCharTestCase(unittest.TestCase):
def setUp(self):
self.emoji = EmojiChar({
"name": "WHITE UP POINTING INDEX",
"unified": "261D",
"variations": [
"261D-FE0F"
],
"docomo": None,
"au": "E4F6",
"softbank": "E00F",
"google": "FEB98",
"image": "261d.png",
"sheet_x": 1,
"sheet_y": 2,
"short_name": "point_up",
"short_names": [
"point_up"
],
"text": None,
"texts": None,
"category": "People",
"sort_order": 116,
"added_in": "1.4",
"has_img_apple": True,
"has_img_google": True,
"has_img_twitter": True,
"has_img_emojione": False,
"has_img_facebook": False,
"has_img_messenger": False,
"skin_variations": {
"1F3FB": {
"unified": "261D-1F3FB",
"image": "261d-1f3fb.png",
"sheet_x": 1,
"sheet_y": 3,
"added_in": "6.0",
"has_img_apple": True,
"has_img_google": False,
"has_img_twitter": False,
"has_img_emojione": False,
"has_img_facebook": False,
"has_img_messenger": False
},
},
"obsoletes": "ABCD-1234",
"obsoleted_by": "5678-90EF"
})
def test_init(self):
self.assertEqual("261D", self.emoji.unified)
self.assertEqual("point_up", self.emoji.short_names[0])
self.assertEqual("People", self.emoji.category)
self.assertEqual("1.4", self.emoji.added_in)
self.assertEqual("261d.png", self.emoji.image)
self.assertEqual(1, self.emoji.sheet_x)
self.assertEqual(True, self.emoji.has_img_apple)
self.assertEqual(False, self.emoji.has_img_emojione)
def test_skin_variations(self):
self.assertEqual("261D-1F3FB", self.emoji.skin_variations['1F3FB'].unified)
self.assertEqual(True, self.emoji.skin_variations['1F3FB'].has_img_apple)
def test_char(self):
self.assertEqual('☝', self.emoji.char)
def test_all_variations(self):
self.assertEqual(['261D', '261D-FE0F', '261D-1F3FB'], self.emoji.all_variations)
def test_chars(self):
self.assertEqual('☝', self.emoji.chars[0])
self.assertEqual('☝️', self.emoji.chars[1]) # Not the same char, this is \u261D\uFE0F
def test_doublebyte(self):
self.assertEqual(False, self.emoji.is_doublebyte)
def test_str(self):
self.assertEqual('WHITE UP POINTING INDEX', self.emoji.__str__())
def test_repr(self):
self.assertEqual('EmojiChar("WHITE UP POINTING INDEX")', self.emoji.__repr__())
| alexmick/emoji-data-python | tests/test_emoji_char.py | Python | mit | 3,008 |
"""
media types
"""
from __future__ import print_function
from xnb_parse.file_formats.wav import write_wav
from xnb_parse.file_formats.xml_utils import ET
from xnb_parse.xna_types.xna_primitive import Enum
class SoundEffect(object):
def __init__(self, sound_format, sound_data, loop_start, loop_length, duration, needs_swap=False):
self.sound_format = sound_format
self.sound_data = sound_data
self.loop_start = loop_start
self.loop_length = loop_length
self.duration = duration
self.needs_swap = needs_swap
def __str__(self):
return "SoundEffect fs:{} ds:{} d:{}ms ls:{} ll:{}".format(len(self.sound_format), len(self.sound_data),
self.duration, self.loop_start, self.loop_length)
def export(self, filename):
write_wav(filename, self.sound_format, self.sound_data, self.needs_swap)
def xml(self, parent=None):
if parent is None:
root = ET.Element('SoundEffect')
else:
root = ET.SubElement(parent, 'SoundEffect')
root.set('loopStart', str(self.loop_start))
root.set('loopLength', str(self.loop_length))
root.set('duration', str(self.duration))
return root
class Song(object):
def __init__(self, filename, duration):
self.filename = filename
self.duration = duration
def __str__(self):
return "Song f:'{}' d:{}ms".format(self.filename, self.duration)
def xml(self, parent=None):
if parent is None:
root = ET.Element('Song')
else:
root = ET.SubElement(parent, 'Song')
root.set('filename', self.filename)
root.set('duration', str(self.duration))
return root
class Video(object):
def __init__(self, filename, duration, width, height, fps, video_soundtrack_type):
self.filename = filename
self.duration = duration
self.width = width
self.height = height
self.fps = fps
self.video_soundtrack_type = video_soundtrack_type
def __str__(self):
return "Video f:'{}' d:{}ms s:{}x{} f:{:.2f} t:{}".format(self.filename, self.duration, self.width, self.height,
self.fps, self.video_soundtrack_type)
def xml(self, parent=None):
if parent is None:
root = ET.Element('Video')
else:
root = ET.SubElement(parent, 'Video')
if self.filename is not None:
root.set('filename', self.filename)
if self.duration is not None:
root.set('duration', str(self.duration))
if self.width is not None:
root.set('width', str(self.width))
if self.height is not None:
root.set('height', str(self.height))
if self.fps is not None:
root.set('fps', str(self.fps))
if self.video_soundtrack_type is not None:
root.set('soundtrackType', str(self.video_soundtrack_type))
return root
class VideoSoundtrackType(Enum):
__slots__ = ()
enum_values = dict(enumerate(['Music', 'Dialog', 'MusicAndDialog']))
| fesh0r/xnb_parse | xnb_parse/xna_types/xna_media.py | Python | mit | 3,201 |
try:
import simplejson as json
except ImportError:
import json
import datetime
from flask import request
from werkzeug.exceptions import NotFound
from flask_peewee.utils import check_password
from flask_peewee.utils import get_object_or_404
from flask_peewee.utils import make_password
from flask_peewee.tests.base import FlaskPeeweeTestCase
from flask_peewee.tests.test_app import Message
from flask_peewee.tests.test_app import Note
from flask_peewee.tests.test_app import User
from flask_peewee.tests.test_app import app as flask_app
class UtilsTestCase(FlaskPeeweeTestCase):
def setUp(self):
super(UtilsTestCase, self).setUp()
def test_get_object_or_404(self):
user = self.create_user('test', 'test')
# test with model as first arg
self.assertRaises(NotFound, get_object_or_404, User, User.username=='not-here')
self.assertEqual(user, get_object_or_404(User, User.username=='test'))
# test with query as first arg
active = User.select().where(User.active==True)
inactive = User.select().where(User.active==False)
self.assertRaises(NotFound, get_object_or_404, active, User.username=='not-here')
self.assertRaises(NotFound, get_object_or_404, inactive, User.username=='test')
self.assertEqual(user, get_object_or_404(active, User.username=='test'))
def test_passwords(self):
p = make_password('testing')
self.assertTrue(check_password('testing', p))
self.assertFalse(check_password('testing ', p))
self.assertFalse(check_password('Testing', p))
self.assertFalse(check_password('', p))
p2 = make_password('Testing')
self.assertFalse(p == p2)
| ariakerstein/twitterFlaskClone | project/lib/python2.7/site-packages/flask_peewee/tests/utils.py | Python | mit | 1,750 |
from math import copysign
from Util.geometry import wrap_to_pi
import numpy as np
from time import time
from collections import deque
class PID:
def __init__(self, kp: float, ki: float, kd: float, *,
deadzone: float=0.0,
signed_error: bool=False,
anti_windup: bool=True, anti_windup_max_time: float=0.5):
self.kp = kp
self.ki = ki
self.kd = kd
self.err_sum = 0
self.last_err = 0
self.last_time = 0
self.deadzone = deadzone
self.signed_error = signed_error
self.anti_windup = anti_windup
self.error_deque = deque()
self.anti_windup_time = 0
self.anti_windup_max_time = anti_windup_max_time
def execute(self, err: float) -> float:
current_time = time()
if not self.last_time:
self.last_time = current_time
return err * self.kp
dt, self.last_time = current_time - self.last_time, current_time
if self.signed_error and (abs(err) > np.pi):
d_err = err - self.last_err
d_err = copysign(d_err, wrap_to_pi(d_err))
else:
d_err = err - self.last_err
self.last_err = err
self.err_sum += err
if self.anti_windup:
self.error_deque.append((err, dt))
self.anti_windup_time += dt
while self.anti_windup_time > self.anti_windup_max_time:
old_err, old_dt = self.error_deque.popleft()
self.anti_windup_time -= old_dt
self.err_sum -= old_err
kd = 0 if abs(err) < self.deadzone else self.kd
return (err * self.kp) + (self.err_sum * self.ki * dt) + (d_err * kd / dt)
def reset(self):
self.last_time = 0
self.err_sum = 0
self.last_err = 0
self.anti_windup_time = 0
| RoboCupULaval/StrategyIA | Engine/Controller/Regulators/PID.py | Python | mit | 1,874 |
# -*- coding: utf-8 -*-
from django.core import urlresolvers
from django.conf import settings
from django.utils import translation
def reverse(viewname, urlconf=None, args=[], kwargs={}, prefix=None):
locale = kwargs.pop('locale', translation.get_language())
path = django_reverse(viewname, urlconf, args, kwargs, prefix)
script_prefix = urlresolvers.get_script_prefix()
if settings.USE_I18N:
path = script_prefix + locale + '/' + path.partition(script_prefix)[2]
return path
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
| kron4eg/django-localize | localize/models.py | Python | bsd-3-clause | 576 |
#!/usr/bin/env python
# -*- coding : Utf-8 -*-
# Les listes
#-----------------------------------------------------------------------------
L = ['Valérie', 43, 'Christian', 41, 'Adèle', 12,
'Lise', 11, 'Garance', 9]
print(L[1:-1])
L[3:3] = ['Boudin blanc'] # insère un/plusieurs élément(s)
L[3] = 'Boudin blanc' # remplace un élément par un autre
copieL = L[:] # Sinon on copie le pointeur
# Si listes emboîtées :
import copy
L2 = copy.deepcopy(L)
# Méthodes sur les listes
# REMPLACENT liste par liste modifiée
#L.append(x)
#L.extend(L)
#L.insert(i, x)
#L.remove(x)
#L.pop([i])
#L.index(i)
#L.count(x)
#L.sort()
#L.reverse()
def cherche(L,x):
n = 0
while n < len(L):
if L[n] == x:
return True
else:
n += 1
return False
# Les listes par compréhension
[3*x for x in range(4)]
from math import *
[sin(x) for x in test if x**2 > 100]
[x*sin(y) for x in test for y in range(4)]
l = [[1, 2, 3], [40, 50, 60], [700, 800, 900]]
[x for xl in l for x in xl]
| ValerieMauduit/InitBook | Chapter1/listes_comprehension.py | Python | gpl-3.0 | 1,030 |
from list_tests import * | opendoor/django-comlink | comlink/tests/__init__.py | Python | agpl-3.0 | 25 |
"""
High-level QEMU test utility functions.
This module is meant to reduce code size by performing common test procedures.
Generally, code here should look like test code.
More specifically:
- Functions in this module should raise exceptions if things go wrong
- Functions in this module typically use functions and classes from
lower-level modules (e.g. utils_misc, qemu_vm, aexpect).
- Functions in this module should not be used by lower-level modules.
- Functions in this module should be used in the right context.
For example, a function should not be used where it may display
misleading or inaccurate info or debug messages.
:copyright: 2008-2013 Red Hat Inc.
"""
import cPickle
import errno
import fcntl
import logging
import os
import socket
import threading
import time
from autotest.client import utils
from autotest.client.shared import error
from autotest.client.shared.syncdata import SyncData, SyncListenServer
from virttest import env_process, remote, storage, utils_misc
try:
from virttest.staging import utils_memory
except ImportError:
from autotest.client.shared import utils_memory
def guest_active(vm):
o = vm.monitor.info("status")
if isinstance(o, str):
return "status: running" in o
else:
if "status" in o:
return o.get("status") == "running"
else:
return o.get("running")
def get_numa_status(numa_node_info, qemu_pid, debug=True):
"""
Get the qemu process memory use status and the cpu list in each node.
:param numa_node_info: Host numa node information
:type numa_node_info: NumaInfo object
:param qemu_pid: process id of qemu
:type numa_node_info: string
:param debug: Print the debug info or not
:type debug: bool
:return: memory and cpu list in each node
:rtype: tuple
"""
node_list = numa_node_info.online_nodes
qemu_memory = []
qemu_cpu = []
cpus = utils_misc.get_pid_cpu(qemu_pid)
for node_id in node_list:
qemu_memory_status = utils_memory.read_from_numa_maps(qemu_pid,
"N%d" % node_id)
memory = sum([int(_) for _ in qemu_memory_status.values()])
qemu_memory.append(memory)
cpu = [_ for _ in cpus if _ in numa_node_info.nodes[node_id].cpus]
qemu_cpu.append(cpu)
if debug:
logging.debug("qemu-kvm process using %s pages and cpu %s in "
"node %s" % (memory, " ".join(cpu), node_id))
return (qemu_memory, qemu_cpu)
def pin_vm_threads(vm, node):
"""
Pin VM threads to single cpu of a numa node
:param vm: VM object
:param node: NumaNode object
"""
if len(vm.vcpu_threads) + len(vm.vhost_threads) < len(node.cpus):
for i in vm.vcpu_threads:
logging.info("pin vcpu thread(%s) to cpu(%s)" % (i, node.pin_cpu(i)))
for i in vm.vhost_threads:
logging.info("pin vhost thread(%s) to cpu(%s)" % (i, node.pin_cpu(i)))
elif (len(vm.vcpu_threads) <= len(node.cpus) and
len(vm.vhost_threads) <= len(node.cpus)):
for i in vm.vcpu_threads:
logging.info("pin vcpu thread(%s) to cpu(%s)" %
(i, node.pin_cpu(i)))
for i in vm.vhost_threads:
logging.info("pin vhost thread(%s) to extra cpu(%s)" %
(i, node.pin_cpu(i, extra=True)))
else:
logging.info("Skip pinning, no enough nodes")
def migrate(vm, env=None, mig_timeout=3600, mig_protocol="tcp",
mig_cancel=False, offline=False, stable_check=False,
clean=False, save_path=None, dest_host='localhost', mig_port=None):
"""
Migrate a VM locally and re-register it in the environment.
:param vm: The VM to migrate.
:param env: The environment dictionary. If omitted, the migrated VM will
not be registered.
:param mig_timeout: timeout value for migration.
:param mig_protocol: migration protocol
:param mig_cancel: Test migrate_cancel or not when protocol is tcp.
:param dest_host: Destination host (defaults to 'localhost').
:param mig_port: Port that will be used for migration.
:return: The post-migration VM, in case of same host migration, True in
case of multi-host migration.
"""
def mig_finished():
if dest_vm.is_dead():
raise error.TestFail("Dest VM died during migration.")
if not offline and vm.is_dead():
raise error.TestFail("Source VM died during migration")
try:
o = vm.monitor.info("migrate")
if isinstance(o, str):
return "status: active" not in o
else:
return o.get("status") != "active"
except Exception:
pass
def mig_succeeded():
o = vm.monitor.info("migrate")
if isinstance(o, str):
return "status: completed" in o
else:
return o.get("status") == "completed"
def mig_failed():
o = vm.monitor.info("migrate")
if isinstance(o, str):
return "status: failed" in o
else:
return o.get("status") == "failed"
def mig_cancelled():
o = vm.monitor.info("migrate")
if isinstance(o, str):
return ("Migration status: cancelled" in o or
"Migration status: canceled" in o)
else:
return (o.get("status") == "cancelled" or
o.get("status") == "canceled")
def wait_for_migration():
if not utils_misc.wait_for(mig_finished, mig_timeout, 2, 2,
"Waiting for migration to finish"):
raise error.TestFail("Timeout expired while waiting for migration "
"to finish")
if dest_host == 'localhost':
dest_vm = vm.clone()
if (dest_host == 'localhost') and stable_check:
# Pause the dest vm after creation
_ = dest_vm.params.get('extra_params', '') + ' -S'
dest_vm.params['extra_params'] = _
if dest_host == 'localhost':
dest_vm.create(migration_mode=mig_protocol, mac_source=vm)
try:
try:
if mig_protocol in ["tcp", "rdma", "x-rdma"]:
if dest_host == 'localhost':
uri = mig_protocol + ":0:%d" % dest_vm.migration_port
else:
uri = mig_protocol + ':%s:%d' % (dest_host, mig_port)
elif mig_protocol == "unix":
uri = "unix:%s" % dest_vm.migration_file
elif mig_protocol == "exec":
uri = '"exec:nc localhost %s"' % dest_vm.migration_port
if offline:
vm.pause()
vm.monitor.migrate(uri)
if mig_cancel:
time.sleep(2)
vm.monitor.cmd("migrate_cancel")
if not utils_misc.wait_for(mig_cancelled, 60, 2, 2,
"Waiting for migration "
"cancellation"):
raise error.TestFail("Failed to cancel migration")
if offline:
vm.resume()
if dest_host == 'localhost':
dest_vm.destroy(gracefully=False)
return vm
else:
wait_for_migration()
if (dest_host == 'localhost') and stable_check:
save_path = None or "/tmp"
save1 = os.path.join(save_path, "src")
save2 = os.path.join(save_path, "dst")
vm.save_to_file(save1)
dest_vm.save_to_file(save2)
# Fail if we see deltas
md5_save1 = utils.hash_file(save1)
md5_save2 = utils.hash_file(save2)
if md5_save1 != md5_save2:
raise error.TestFail("Mismatch of VM state before "
"and after migration")
if (dest_host == 'localhost') and offline:
dest_vm.resume()
except Exception:
if dest_host == 'localhost':
dest_vm.destroy()
raise
finally:
if (dest_host == 'localhost') and stable_check and clean:
logging.debug("Cleaning the state files")
if os.path.isfile(save1):
os.remove(save1)
if os.path.isfile(save2):
os.remove(save2)
# Report migration status
if mig_succeeded():
logging.info("Migration finished successfully")
elif mig_failed():
raise error.TestFail("Migration failed")
else:
status = vm.monitor.info("migrate")
raise error.TestFail("Migration ended with unknown status: %s" %
status)
if dest_host == 'localhost':
if dest_vm.monitor.verify_status("paused"):
logging.debug("Destination VM is paused, resuming it")
dest_vm.resume()
# Kill the source VM
vm.destroy(gracefully=False)
# Replace the source VM with the new cloned VM
if (dest_host == 'localhost') and (env is not None):
env.register_vm(vm.name, dest_vm)
# Return the new cloned VM
if dest_host == 'localhost':
return dest_vm
else:
return vm
class MigrationData(object):
def __init__(self, params, srchost, dsthost, vms_name, params_append):
"""
Class that contains data needed for one migration.
"""
self.params = params.copy()
self.params.update(params_append)
self.source = False
if params.get("hostid") == srchost:
self.source = True
self.destination = False
if params.get("hostid") == dsthost:
self.destination = True
self.src = srchost
self.dst = dsthost
self.hosts = [srchost, dsthost]
self.mig_id = {'src': srchost, 'dst': dsthost, "vms": vms_name}
self.vms_name = vms_name
self.vms = []
self.vm_ports = None
def is_src(self):
"""
:return: True if host is source.
"""
return self.source
def is_dst(self):
"""
:return: True if host is destination.
"""
return self.destination
class MultihostMigration(object):
"""
Class that provides a framework for multi-host migration.
Migration can be run both synchronously and asynchronously.
To specify what is going to happen during the multi-host
migration, it is necessary to reimplement the method
migration_scenario. It is possible to start multiple migrations
in separate threads, since self.migrate is thread safe.
Only one test using multihost migration framework should be
started on one machine otherwise it is necessary to solve the
problem with listen server port.
Multihost migration starts SyncListenServer through which
all messages are transferred, since the multiple hosts can
be in different states.
Class SyncData is used to transfer data over network or
synchronize the migration process. Synchronization sessions
are recognized by session_id.
It is important to note that, in order to have multi-host
migration, one needs shared guest image storage. The simplest
case is when the guest images are on an NFS server.
Example:
::
class TestMultihostMigration(utils_misc.MultihostMigration):
def __init__(self, test, params, env):
super(testMultihostMigration, self).__init__(test, params, env)
def migration_scenario(self):
srchost = self.params.get("hosts")[0]
dsthost = self.params.get("hosts")[1]
def worker(mig_data):
vm = env.get_vm("vm1")
session = vm.wait_for_login(timeout=self.login_timeout)
session.sendline("nohup dd if=/dev/zero of=/dev/null &")
session.cmd("killall -0 dd")
def check_worker(mig_data):
vm = env.get_vm("vm1")
session = vm.wait_for_login(timeout=self.login_timeout)
session.cmd("killall -9 dd")
# Almost synchronized migration, waiting to end it.
# Work is started only on first VM.
self.migrate_wait(["vm1", "vm2"], srchost, dsthost,
worker, check_worker)
# Migration started in different threads.
# It allows to start multiple migrations simultaneously.
mig1 = self.migrate(["vm1"], srchost, dsthost,
worker, check_worker)
mig2 = self.migrate(["vm2"], srchost, dsthost)
mig2.join()
mig1.join()
mig = TestMultihostMigration(test, params, env)
mig.run()
"""
def __init__(self, test, params, env, preprocess_env=True):
self.test = test
self.params = params
self.env = env
self.hosts = params.get("hosts")
self.hostid = params.get('hostid', "")
self.comm_port = int(params.get("comm_port", 13234))
vms_count = len(params["vms"].split())
self.login_timeout = int(params.get("login_timeout", 360))
self.disk_prepare_timeout = int(params.get("disk_prepare_timeout",
160 * vms_count))
self.finish_timeout = int(params.get("finish_timeout",
120 * vms_count))
self.new_params = None
if params.get("clone_master") == "yes":
self.clone_master = True
else:
self.clone_master = False
self.mig_protocol = params.get("mig_protocol")
self.mig_timeout = int(params.get("mig_timeout"))
# Port used to communicate info between source and destination
self.regain_ip_cmd = params.get("regain_ip_cmd", None)
self.not_login_after_mig = params.get("not_login_after_mig", None)
self.vm_lock = threading.Lock()
self.sync_server = None
if self.clone_master:
self.sync_server = SyncListenServer()
if preprocess_env:
self.preprocess_env()
self._hosts_barrier(self.hosts, self.hosts, 'disk_prepared',
self.disk_prepare_timeout)
def migration_scenario(self):
"""
Multi Host migration_scenario is started from method run where the
exceptions are checked. It is not necessary to take care of
cleaning up after test crash or finish.
"""
raise NotImplementedError
def post_migration(self, vm, cancel_delay, mig_offline, dsthost, vm_ports,
not_wait_for_migration, fd, mig_data):
pass
def migrate_vms_src(self, mig_data):
"""
Migrate vms source.
:param mig_Data: Data for migration.
For change way how machine migrates is necessary
re implement this method.
"""
def mig_wrapper(vm, cancel_delay, dsthost, vm_ports,
not_wait_for_migration, mig_offline, mig_data):
vm.migrate(protocol=self.mig_protocol, cancel_delay=cancel_delay,
offline=mig_offline, dest_host=dsthost,
remote_port=vm_ports[vm.name],
not_wait_for_migration=not_wait_for_migration)
self.post_migration(vm, cancel_delay, mig_offline, dsthost,
vm_ports, not_wait_for_migration, None,
mig_data)
logging.info("Start migrating now...")
cancel_delay = mig_data.params.get("cancel_delay")
if cancel_delay is not None:
cancel_delay = int(cancel_delay)
not_wait_for_migration = mig_data.params.get("not_wait_for_migration")
if not_wait_for_migration == "yes":
not_wait_for_migration = True
mig_offline = mig_data.params.get("mig_offline")
if mig_offline == "yes":
mig_offline = True
else:
mig_offline = False
multi_mig = []
for vm in mig_data.vms:
multi_mig.append((mig_wrapper, (vm, cancel_delay, mig_data.dst,
mig_data.vm_ports,
not_wait_for_migration,
mig_offline, mig_data)))
utils_misc.parallel(multi_mig)
def migrate_vms_dest(self, mig_data):
"""
Migrate vms destination. This function is started on dest host during
migration.
:param mig_Data: Data for migration.
"""
pass
def __del__(self):
if self.sync_server:
self.sync_server.close()
def master_id(self):
return self.hosts[0]
def _hosts_barrier(self, hosts, session_id, tag, timeout):
logging.debug("Barrier timeout: %d tags: %s" % (timeout, tag))
tags = SyncData(self.master_id(), self.hostid, hosts,
"%s,%s,barrier" % (str(session_id), tag),
self.sync_server).sync(tag, timeout)
logging.debug("Barrier tag %s" % (tags))
def preprocess_env(self):
"""
Prepare env to start vms.
"""
storage.preprocess_images(self.test.bindir, self.params, self.env)
def _check_vms_source(self, mig_data):
start_mig_tout = mig_data.params.get("start_migration_timeout", None)
if start_mig_tout is None:
for vm in mig_data.vms:
vm.wait_for_login(timeout=self.login_timeout)
if mig_data.params.get("host_mig_offline") != "yes":
sync = SyncData(self.master_id(), self.hostid, mig_data.hosts,
mig_data.mig_id, self.sync_server)
mig_data.vm_ports = sync.sync(timeout=240)[mig_data.dst]
logging.info("Received from destination the migration port %s",
str(mig_data.vm_ports))
def _check_vms_dest(self, mig_data):
mig_data.vm_ports = {}
for vm in mig_data.vms:
logging.info("Communicating to source migration port %s",
vm.migration_port)
mig_data.vm_ports[vm.name] = vm.migration_port
if mig_data.params.get("host_mig_offline") != "yes":
SyncData(self.master_id(), self.hostid,
mig_data.hosts, mig_data.mig_id,
self.sync_server).sync(mig_data.vm_ports, timeout=240)
def _prepare_params(self, mig_data):
"""
Prepare separate params for vm migration.
:param vms_name: List of vms.
"""
new_params = mig_data.params.copy()
new_params["vms"] = " ".join(mig_data.vms_name)
return new_params
def _check_vms(self, mig_data):
"""
Check if vms are started correctly.
:param vms: list of vms.
:param source: Must be True if is source machine.
"""
if mig_data.is_src():
self._check_vms_source(mig_data)
else:
self._check_vms_dest(mig_data)
def _quick_check_vms(self, mig_data):
"""
Check if vms are started correctly.
:param vms: list of vms.
:param source: Must be True if is source machine.
"""
logging.info("Try check vms %s" % (mig_data.vms_name))
for vm in mig_data.vms_name:
if self.env.get_vm(vm) not in mig_data.vms:
mig_data.vms.append(self.env.get_vm(vm))
for vm in mig_data.vms:
logging.info("Check vm %s on host %s" % (vm.name, self.hostid))
vm.verify_alive()
def prepare_for_migration(self, mig_data, migration_mode):
"""
Prepare destination of migration for migration.
:param mig_data: Class with data necessary for migration.
:param migration_mode: Migration mode for prepare machine.
"""
new_params = self._prepare_params(mig_data)
new_params['migration_mode'] = migration_mode
new_params['start_vm'] = 'yes'
if self.params.get("migration_sync_vms", "no") == "yes":
if mig_data.is_src():
self.vm_lock.acquire()
env_process.process(self.test, new_params, self.env,
env_process.preprocess_image,
env_process.preprocess_vm)
self.vm_lock.release()
self._quick_check_vms(mig_data)
# Send vms configuration to dst host.
vms = cPickle.dumps([self.env.get_vm(vm_name)
for vm_name in mig_data.vms_name])
self.env.get_vm(mig_data.vms_name[0]).monitor.info("qtree")
SyncData(self.master_id(), self.hostid,
mig_data.hosts, mig_data.mig_id,
self.sync_server).sync(vms, timeout=240)
elif mig_data.is_dst():
# Load vms configuration from src host.
vms = cPickle.loads(SyncData(self.master_id(), self.hostid,
mig_data.hosts, mig_data.mig_id,
self.sync_server).sync(timeout=240)[mig_data.src])
for vm in vms:
# Save config to env. Used for create machine.
# When reuse_previous_config params is set don't check
# machine.
vm.address_cache = self.env.get("address_cache")
self.env.register_vm(vm.name, vm)
self.vm_lock.acquire()
env_process.process(self.test, new_params, self.env,
env_process.preprocess_image,
env_process.preprocess_vm)
vms[0].monitor.info("qtree")
self.vm_lock.release()
self._quick_check_vms(mig_data)
else:
self.vm_lock.acquire()
env_process.process(self.test, new_params, self.env,
env_process.preprocess_image,
env_process.preprocess_vm)
self.vm_lock.release()
self._quick_check_vms(mig_data)
self._check_vms(mig_data)
def migrate_vms(self, mig_data):
"""
Migrate vms.
"""
if mig_data.is_src():
self.migrate_vms_src(mig_data)
else:
self.migrate_vms_dest(mig_data)
def check_vms_dst(self, mig_data):
"""
Check vms after migrate.
:param mig_data: object with migration data.
"""
for vm in mig_data.vms:
vm.resume()
if not guest_active(vm):
raise error.TestFail("Guest not active after migration")
logging.info("Migrated guest appears to be running")
logging.info("Logging into migrated guest after migration...")
for vm in mig_data.vms:
if self.regain_ip_cmd is not None:
session_serial = vm.wait_for_serial_login(timeout=self.login_timeout)
# There is sometime happen that system sends some message on
# serial console and IP renew command block test. Because
# there must be added "sleep" in IP renew command.
session_serial.cmd(self.regain_ip_cmd)
if not self.not_login_after_mig:
vm.wait_for_login(timeout=self.login_timeout)
def check_vms_src(self, mig_data):
"""
Check vms after migrate.
:param mig_data: object with migration data.
"""
pass
def postprocess_env(self):
"""
Kill vms and delete cloned images.
"""
pass
def before_migration(self, mig_data):
"""
Do something right before migration.
:param mig_data: object with migration data.
"""
pass
def migrate(self, vms_name, srchost, dsthost, start_work=None,
check_work=None, params_append=None):
"""
Migrate machine from srchost to dsthost. It executes start_work on
source machine before migration and executes check_work on dsthost
after migration.
Migration execution progress:
::
source host | dest host
--------------------------------------------------------
prepare guest on both sides of migration
- start machine and check if machine works
- synchronize transfer data needed for migration
--------------------------------------------------------
start work on source guests | wait for migration
--------------------------------------------------------
migrate guest to dest host.
wait on finish migration synchronization
--------------------------------------------------------
| check work on vms
--------------------------------------------------------
wait for sync on finish migration
:param vms_name: List of vms.
:param srchost: src host id.
:param dsthost: dst host id.
:param start_work: Function started before migration.
:param check_work: Function started after migration.
:param params_append: Append params to self.params only for migration.
"""
def migrate_wrap(vms_name, srchost, dsthost, start_work=None,
check_work=None, params_append=None):
logging.info("Starting migrate vms %s from host %s to %s" %
(vms_name, srchost, dsthost))
pause = self.params.get("paused_after_start_vm")
mig_error = None
mig_data = MigrationData(self.params, srchost, dsthost,
vms_name, params_append)
cancel_delay = self.params.get("cancel_delay", None)
host_offline_migration = self.params.get("host_mig_offline")
try:
try:
if mig_data.is_src():
self.prepare_for_migration(mig_data, None)
elif self.hostid == dsthost:
if host_offline_migration != "yes":
self.prepare_for_migration(mig_data, self.mig_protocol)
else:
return
if mig_data.is_src():
if start_work:
if pause != "yes":
start_work(mig_data)
else:
raise error.TestNAError("Can't start work if "
"vm is paused.")
# Starts VM and waits timeout before migration.
if pause == "yes" and mig_data.is_src():
for vm in mig_data.vms:
vm.resume()
wait = self.params.get("start_migration_timeout", 0)
logging.debug("Wait for migration %s seconds." %
(wait))
time.sleep(int(wait))
self.before_migration(mig_data)
self.migrate_vms(mig_data)
timeout = 60
if cancel_delay is None:
if host_offline_migration == "yes":
self._hosts_barrier(self.hosts,
mig_data.mig_id,
'wait_for_offline_mig',
self.finish_timeout)
if mig_data.is_dst():
self.prepare_for_migration(mig_data, self.mig_protocol)
self._hosts_barrier(self.hosts,
mig_data.mig_id,
'wait2_for_offline_mig',
self.finish_timeout)
if (not mig_data.is_src()):
timeout = self.mig_timeout
self._hosts_barrier(mig_data.hosts, mig_data.mig_id,
'mig_finished', timeout)
if mig_data.is_dst():
self.check_vms_dst(mig_data)
if check_work:
check_work(mig_data)
else:
self.check_vms_src(mig_data)
if check_work:
check_work(mig_data)
except:
mig_error = True
raise
finally:
if mig_error and cancel_delay is not None:
self._hosts_barrier(self.hosts,
mig_data.mig_id,
'test_finihed',
self.finish_timeout)
elif mig_error:
raise
def wait_wrap(vms_name, srchost, dsthost):
mig_data = MigrationData(self.params, srchost, dsthost, vms_name,
None)
timeout = (self.login_timeout + self.mig_timeout +
self.finish_timeout)
self._hosts_barrier(self.hosts, mig_data.mig_id,
'test_finihed', timeout)
if (self.hostid in [srchost, dsthost]):
mig_thread = utils.InterruptedThread(migrate_wrap, (vms_name,
srchost,
dsthost,
start_work,
check_work,
params_append))
else:
mig_thread = utils.InterruptedThread(wait_wrap, (vms_name,
srchost,
dsthost))
mig_thread.start()
return mig_thread
def migrate_wait(self, vms_name, srchost, dsthost, start_work=None,
check_work=None, params_append=None):
"""
Migrate machine from srchost to dsthost and wait for finish.
It executes start_work on source machine before migration and executes
check_work on dsthost after migration.
:param vms_name: List of vms.
:param srchost: src host id.
:param dsthost: dst host id.
:param start_work: Function which is started before migration.
:param check_work: Function which is started after
done of migration.
"""
self.migrate(vms_name, srchost, dsthost, start_work, check_work,
params_append).join()
def cleanup(self):
"""
Cleanup env after test.
"""
if self.clone_master:
self.sync_server.close()
self.postprocess_env()
def run(self):
"""
Start multihost migration scenario.
After scenario is finished or if scenario crashed it calls postprocess
machines and cleanup env.
"""
try:
self.migration_scenario()
self._hosts_barrier(self.hosts, self.hosts, 'all_test_finished',
self.finish_timeout)
finally:
self.cleanup()
class MultihostMigrationFd(MultihostMigration):
def __init__(self, test, params, env, preprocess_env=True):
super(MultihostMigrationFd, self).__init__(test, params, env,
preprocess_env)
def migrate_vms_src(self, mig_data):
"""
Migrate vms source.
:param mig_Data: Data for migration.
For change way how machine migrates is necessary
re implement this method.
"""
def mig_wrapper(vm, cancel_delay, mig_offline, dsthost, vm_ports,
not_wait_for_migration, fd):
vm.migrate(cancel_delay=cancel_delay, offline=mig_offline,
dest_host=dsthost,
not_wait_for_migration=not_wait_for_migration,
protocol=self.mig_protocol,
fd_src=fd)
self.post_migration(vm, cancel_delay, mig_offline, dsthost,
vm_ports, not_wait_for_migration, fd, mig_data)
logging.info("Start migrating now...")
cancel_delay = mig_data.params.get("cancel_delay")
if cancel_delay is not None:
cancel_delay = int(cancel_delay)
not_wait_for_migration = mig_data.params.get("not_wait_for_migration")
if not_wait_for_migration == "yes":
not_wait_for_migration = True
mig_offline = mig_data.params.get("mig_offline")
if mig_offline == "yes":
mig_offline = True
else:
mig_offline = False
multi_mig = []
for vm in mig_data.vms:
fd = vm.params.get("migration_fd")
multi_mig.append((mig_wrapper, (vm, cancel_delay, mig_offline,
mig_data.dst, mig_data.vm_ports,
not_wait_for_migration,
fd)))
utils_misc.parallel(multi_mig)
def _check_vms_source(self, mig_data):
start_mig_tout = mig_data.params.get("start_migration_timeout", None)
if start_mig_tout is None:
for vm in mig_data.vms:
vm.wait_for_login(timeout=self.login_timeout)
self._hosts_barrier(mig_data.hosts, mig_data.mig_id,
'prepare_VMS', 60)
def _check_vms_dest(self, mig_data):
self._hosts_barrier(mig_data.hosts, mig_data.mig_id,
'prepare_VMS', 120)
for vm in mig_data.vms:
fd = vm.params.get("migration_fd")
os.close(fd)
def _connect_to_server(self, host, port, timeout=60):
"""
Connect to network server.
"""
endtime = time.time() + timeout
sock = None
while endtime > time.time():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect((host, port))
break
except socket.error, err:
(code, _) = err
if (code != errno.ECONNREFUSED):
raise
time.sleep(1)
return sock
def _create_server(self, port, timeout=60):
"""
Create network server.
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(timeout)
sock.bind(('', port))
sock.listen(1)
return sock
def migrate_wait(self, vms_name, srchost, dsthost, start_work=None,
check_work=None, params_append=None):
vms_count = len(vms_name)
mig_ports = []
if self.params.get("hostid") == srchost:
last_port = 5199
for _ in range(vms_count):
last_port = utils_misc.find_free_port(last_port + 1, 6000)
mig_ports.append(last_port)
sync = SyncData(self.master_id(), self.hostid,
self.params.get("hosts"),
{'src': srchost, 'dst': dsthost,
'port': "ports"}, self.sync_server)
mig_ports = sync.sync(mig_ports, timeout=120)
mig_ports = mig_ports[srchost]
logging.debug("Migration port %s" % (mig_ports))
if self.params.get("hostid") != srchost:
sockets = []
for mig_port in mig_ports:
sockets.append(self._connect_to_server(srchost, mig_port))
try:
fds = {}
for s, vm_name in zip(sockets, vms_name):
fds["migration_fd_%s" % vm_name] = s.fileno()
logging.debug("File descriptors %s used for"
" migration." % (fds))
super_cls = super(MultihostMigrationFd, self)
super_cls.migrate_wait(vms_name, srchost, dsthost,
start_work=start_work,
params_append=fds)
finally:
for s in sockets:
s.close()
else:
sockets = []
for mig_port in mig_ports:
sockets.append(self._create_server(mig_port))
try:
conns = []
for s in sockets:
conns.append(s.accept()[0])
fds = {}
for conn, vm_name in zip(conns, vms_name):
fds["migration_fd_%s" % vm_name] = conn.fileno()
logging.debug("File descriptors %s used for"
" migration." % (fds))
# Prohibits descriptor inheritance.
for fd in fds.values():
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags |= fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
super_cls = super(MultihostMigrationFd, self)
super_cls.migrate_wait(vms_name, srchost, dsthost,
start_work=start_work,
params_append=fds)
for conn in conns:
conn.close()
finally:
for s in sockets:
s.close()
class MultihostMigrationExec(MultihostMigration):
def __init__(self, test, params, env, preprocess_env=True):
super(MultihostMigrationExec, self).__init__(test, params, env,
preprocess_env)
def post_migration(self, vm, cancel_delay, mig_offline, dsthost,
mig_exec_cmd, not_wait_for_migration, fd,
mig_data):
if mig_data.params.get("host_mig_offline") == "yes":
src_tmp = vm.params.get("migration_sfiles_path")
dst_tmp = vm.params.get("migration_dfiles_path")
username = vm.params.get("username")
password = vm.params.get("password")
remote.scp_to_remote(dsthost, "22", username, password,
src_tmp, dst_tmp)
def migrate_vms_src(self, mig_data):
"""
Migrate vms source.
:param mig_Data: Data for migration.
For change way how machine migrates is necessary
re implement this method.
"""
def mig_wrapper(vm, cancel_delay, mig_offline, dsthost, mig_exec_cmd,
not_wait_for_migration, mig_data):
vm.migrate(cancel_delay=cancel_delay,
offline=mig_offline,
dest_host=dsthost,
not_wait_for_migration=not_wait_for_migration,
protocol=self.mig_protocol,
migration_exec_cmd_src=mig_exec_cmd)
self.post_migration(vm, cancel_delay, mig_offline,
dsthost, mig_exec_cmd,
not_wait_for_migration, None, mig_data)
logging.info("Start migrating now...")
cancel_delay = mig_data.params.get("cancel_delay")
if cancel_delay is not None:
cancel_delay = int(cancel_delay)
not_wait_for_migration = mig_data.params.get("not_wait_for_migration")
if not_wait_for_migration == "yes":
not_wait_for_migration = True
mig_offline = mig_data.params.get("mig_offline")
if mig_offline == "yes":
mig_offline = True
else:
mig_offline = False
multi_mig = []
for vm in mig_data.vms:
mig_exec_cmd = vm.params.get("migration_exec_cmd_src")
multi_mig.append((mig_wrapper, (vm, cancel_delay,
mig_offline,
mig_data.dst,
mig_exec_cmd,
not_wait_for_migration,
mig_data)))
utils_misc.parallel(multi_mig)
def _check_vms_source(self, mig_data):
start_mig_tout = mig_data.params.get("start_migration_timeout", None)
if start_mig_tout is None:
for vm in mig_data.vms:
vm.wait_for_login(timeout=self.login_timeout)
if mig_data.params.get("host_mig_offline") != "yes":
self._hosts_barrier(mig_data.hosts, mig_data.mig_id,
'prepare_VMS', 60)
def _check_vms_dest(self, mig_data):
if mig_data.params.get("host_mig_offline") != "yes":
self._hosts_barrier(mig_data.hosts, mig_data.mig_id,
'prepare_VMS', 120)
def migrate_wait(self, vms_name, srchost, dsthost, start_work=None,
check_work=None, params_append=None):
vms_count = len(vms_name)
mig_ports = []
host_offline_migration = self.params.get("host_mig_offline")
sync = SyncData(self.master_id(), self.hostid,
self.params.get("hosts"),
{'src': srchost, 'dst': dsthost,
'port': "ports"}, self.sync_server)
mig_params = {}
if host_offline_migration != "yes":
if self.params.get("hostid") == dsthost:
last_port = 5199
for _ in range(vms_count):
last_port = utils_misc.find_free_port(last_port + 1, 6000)
mig_ports.append(last_port)
mig_ports = sync.sync(mig_ports, timeout=120)
mig_ports = mig_ports[dsthost]
logging.debug("Migration port %s" % (mig_ports))
mig_cmds = {}
for mig_port, vm_name in zip(mig_ports, vms_name):
mig_dst_cmd = "nc -l %s %s" % (dsthost, mig_port)
mig_src_cmd = "nc %s %s" % (dsthost, mig_port)
mig_params["migration_exec_cmd_src_%s" %
(vm_name)] = mig_src_cmd
mig_params["migration_exec_cmd_dst_%s" %
(vm_name)] = mig_dst_cmd
else:
# Generate filenames for migration.
mig_fnam = {}
for vm_name in vms_name:
while True:
fnam = ("mig_" + utils.generate_random_string(6) +
"." + vm_name)
fpath = os.path.join(self.test.tmpdir, fnam)
if (fnam not in mig_fnam.values() and
not os.path.exists(fnam)):
mig_fnam[vm_name] = fpath
break
mig_fs = sync.sync(mig_fnam, timeout=120)
mig_cmds = {}
# Prepare cmd and files.
if self.params.get("hostid") == srchost:
mig_src_cmd = "gzip -c > %s"
for vm_name in vms_name:
mig_params["migration_sfiles_path_%s" % (vm_name)] = (
mig_fs[srchost][vm_name])
mig_params["migration_dfiles_path_%s" % (vm_name)] = (
mig_fs[dsthost][vm_name])
mig_params["migration_exec_cmd_src_%s" % (vm_name)] = (
mig_src_cmd % mig_fs[srchost][vm_name])
if self.params.get("hostid") == dsthost:
mig_dst_cmd = "gzip -c -d %s"
for vm_name in vms_name:
mig_params["migration_exec_cmd_dst_%s" % (vm_name)] = (
mig_dst_cmd % mig_fs[dsthost][vm_name])
logging.debug("Exec commands %s", mig_cmds)
super_cls = super(MultihostMigrationExec, self)
super_cls.migrate_wait(vms_name, srchost, dsthost,
start_work=start_work,
params_append=mig_params)
class MultihostMigrationRdma(MultihostMigration):
def __init__(self, test, params, env, preprocess_env=True):
super(MultihostMigrationRdma, self).__init__(test, params, env,
preprocess_env)
def migrate_vms_src(self, mig_data):
"""
Migrate vms source.
:param mig_Data: Data for migration.
For change way how machine migrates is necessary
re implement this method.
"""
def mig_wrapper(vm, cancel_delay, dsthost, vm_ports,
not_wait_for_migration, mig_offline, mig_data):
vm.migrate(cancel_delay=cancel_delay, offline=mig_offline,
dest_host=dsthost, remote_port=vm_ports[vm.name],
not_wait_for_migration=not_wait_for_migration,
protocol=self.mig_protocol)
self.post_migration(vm, cancel_delay, mig_offline, dsthost,
vm_ports, not_wait_for_migration, None,
mig_data)
logging.info("Start migrating now...")
# Use of RDMA during migration requires pinning and registering memory
# with the hardware.
enable_rdma_pin_all = mig_data.params.get("enable_rdma_pin_all",
"migrate_set_capability rdma-pin-all on")
cancel_delay = mig_data.params.get("cancel_delay")
if cancel_delay is not None:
cancel_delay = int(cancel_delay)
not_wait_for_migration = mig_data.params.get("not_wait_for_migration")
if not_wait_for_migration == "yes":
not_wait_for_migration = True
mig_offline = mig_data.params.get("mig_offline")
if mig_offline == "yes":
mig_offline = True
else:
mig_offline = False
multi_mig = []
for vm in mig_data.vms:
vm.monitor.human_monitor_cmd(enable_rdma_pin_all)
multi_mig.append((mig_wrapper, (vm, cancel_delay, mig_data.dst,
mig_data.vm_ports,
not_wait_for_migration,
mig_offline, mig_data)))
utils_misc.parallel(multi_mig)
class GuestSuspend(object):
"""
Suspend guest, supports both Linux and Windows.
"""
SUSPEND_TYPE_MEM = "mem"
SUSPEND_TYPE_DISK = "disk"
def __init__(self, params, vm):
if not params or not vm:
raise error.TestError("Missing 'params' or 'vm' parameters")
self._open_session_list = []
self.vm = vm
self.params = params
self.login_timeout = float(self.params.get("login_timeout", 360))
self.services_up_timeout = float(self.params.get("services_up_timeout",
30))
self.os_type = self.params.get("os_type")
def _get_session(self):
self.vm.verify_alive()
session = self.vm.wait_for_login(timeout=self.login_timeout)
return session
def _session_cmd_close(self, session, cmd):
try:
return session.cmd_status_output(cmd)
finally:
try:
session.close()
except Exception:
pass
def _cleanup_open_session(self):
try:
for s in self._open_session_list:
if s:
s.close()
except Exception:
pass
@error.context_aware
def setup_bg_program(self, **args):
"""
Start up a program as a flag in guest.
"""
suspend_bg_program_setup_cmd = args.get("suspend_bg_program_setup_cmd")
error.context("Run a background program as a flag", logging.info)
session = self._get_session()
self._open_session_list.append(session)
logging.debug("Waiting all services in guest are fully started.")
time.sleep(self.services_up_timeout)
session.sendline(suspend_bg_program_setup_cmd)
@error.context_aware
def check_bg_program(self, **args):
"""
Make sure the background program is running as expected
"""
suspend_bg_program_chk_cmd = args.get("suspend_bg_program_chk_cmd")
error.context("Verify background program is running", logging.info)
session = self._get_session()
s, _ = self._session_cmd_close(session, suspend_bg_program_chk_cmd)
if s:
raise error.TestFail("Background program is dead. Suspend failed.")
@error.context_aware
def kill_bg_program(self, **args):
error.context("Kill background program after resume")
suspend_bg_program_kill_cmd = args.get("suspend_bg_program_kill_cmd")
try:
session = self._get_session()
self._session_cmd_close(session, suspend_bg_program_kill_cmd)
except Exception, e:
logging.warn("Could not stop background program: '%s'", e)
pass
@error.context_aware
def _check_guest_suspend_log(self, **args):
error.context("Check whether guest supports suspend",
logging.info)
suspend_support_chk_cmd = args.get("suspend_support_chk_cmd")
session = self._get_session()
s, o = self._session_cmd_close(session, suspend_support_chk_cmd)
return s, o
def verify_guest_support_suspend(self, **args):
s, _ = self._check_guest_suspend_log(**args)
if s:
raise error.TestError("Guest doesn't support suspend.")
@error.context_aware
def start_suspend(self, **args):
suspend_start_cmd = args.get("suspend_start_cmd")
error.context("Start suspend [%s]" % (suspend_start_cmd), logging.info)
session = self._get_session()
self._open_session_list.append(session)
# Suspend to disk
session.sendline(suspend_start_cmd)
@error.context_aware
def verify_guest_down(self, **args):
# Make sure the VM goes down
error.context("Wait for guest goes down after suspend")
suspend_timeout = 240 + int(self.params.get("smp")) * 60
if not utils_misc.wait_for(self.vm.is_dead, suspend_timeout, 2, 2):
raise error.TestFail("VM refuses to go down. Suspend failed.")
@error.context_aware
def resume_guest_mem(self, **args):
error.context("Resume suspended VM from memory")
self.vm.monitor.system_wakeup()
@error.context_aware
def resume_guest_disk(self, **args):
error.context("Resume suspended VM from disk")
self.vm.create()
@error.context_aware
def verify_guest_up(self, **args):
error.context("Verify guest system log", logging.info)
suspend_log_chk_cmd = args.get("suspend_log_chk_cmd")
session = self._get_session()
s, o = self._session_cmd_close(session, suspend_log_chk_cmd)
if s:
raise error.TestError("Could not find suspend log. [%s]" % (o))
@error.context_aware
def action_before_suspend(self, **args):
error.context("Actions before suspend")
pass
@error.context_aware
def action_during_suspend(self, **args):
error.context("Sleep a while before resuming guest", logging.info)
time.sleep(10)
if self.os_type == "windows":
# Due to WinXP/2003 won't suspend immediately after issue S3 cmd,
# delay 10~60 secs here, maybe there's a bug in windows os.
logging.info("WinXP/2003 need more time to suspend, sleep 50s.")
time.sleep(50)
@error.context_aware
def action_after_suspend(self, **args):
error.context("Actions after suspend")
pass
| igord-daynix/virt-test | virttest/utils_test/qemu.py | Python | gpl-2.0 | 52,890 |
##
##
## Apache License
## Version 2.0, January 2004
## http://www.apache.org/licenses/
##
## TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
##
## 1. Definitions.
##
## "License" shall mean the terms and conditions for use, reproduction,
## and distribution as defined by Sections 1 through 9 of this document.
##
## "Licensor" shall mean the copyright owner or entity authorized by
## the copyright owner that is granting the License.
##
## "Legal Entity" shall mean the union of the acting entity and all
## other entities that control, are controlled by, or are under common
## control with that entity. For the purposes of this definition,
## "control" means (i) the power, direct or indirect, to cause the
## direction or management of such entity, whether by contract or
## otherwise, or (ii) ownership of fifty percent (50%) or more of the
## outstanding shares, or (iii) beneficial ownership of such entity.
##
## "You" (or "Your") shall mean an individual or Legal Entity
## exercising permissions granted by this License.
##
## "Source" form shall mean the preferred form for making modifications,
## including but not limited to software source code, documentation
## source, and configuration files.
##
## "Object" form shall mean any form resulting from mechanical
## transformation or translation of a Source form, including but
## not limited to compiled object code, generated documentation,
## and conversions to other media types.
##
## "Work" shall mean the work of authorship, whether in Source or
## Object form, made available under the License, as indicated by a
## copyright notice that is included in or attached to the work
## (an example is provided in the Appendix below).
##
## "Derivative Works" shall mean any work, whether in Source or Object
## form, that is based on (or derived from) the Work and for which the
## editorial revisions, annotations, elaborations, or other modifications
## represent, as a whole, an original work of authorship. For the purposes
## of this License, Derivative Works shall not include works that remain
## separable from, or merely link (or bind by name) to the interfaces of,
## the Work and Derivative Works thereof.
##
## "Contribution" shall mean any work of authorship, including
## the original version of the Work and any modifications or additions
## to that Work or Derivative Works thereof, that is intentionally
## submitted to Licensor for inclusion in the Work by the copyright owner
## or by an individual or Legal Entity authorized to submit on behalf of
## the copyright owner. For the purposes of this definition, "submitted"
## means any form of electronic, verbal, or written communication sent
## to the Licensor or its representatives, including but not limited to
## communication on electronic mailing lists, source code control systems,
## and issue tracking systems that are managed by, or on behalf of, the
## Licensor for the purpose of discussing and improving the Work, but
## excluding communication that is conspicuously marked or otherwise
## designated in writing by the copyright owner as "Not a Contribution."
##
## "Contributor" shall mean Licensor and any individual or Legal Entity
## on behalf of whom a Contribution has been received by Licensor and
## subsequently incorporated within the Work.
##
## 2. Grant of Copyright License. Subject to the terms and conditions of
## this License, each Contributor hereby grants to You a perpetual,
## worldwide, non-exclusive, no-charge, royalty-free, irrevocable
## copyright license to reproduce, prepare Derivative Works of,
## publicly display, publicly perform, sublicense, and distribute the
## Work and such Derivative Works in Source or Object form.
##
## 3. Grant of Patent License. Subject to the terms and conditions of
## this License, each Contributor hereby grants to You a perpetual,
## worldwide, non-exclusive, no-charge, royalty-free, irrevocable
## (except as stated in this section) patent license to make, have made,
## use, offer to sell, sell, import, and otherwise transfer the Work,
## where such license applies only to those patent claims licensable
## by such Contributor that are necessarily infringed by their
## Contribution(s) alone or by combination of their Contribution(s)
## with the Work to which such Contribution(s) was submitted. If You
## institute patent litigation against any entity (including a
## cross-claim or counterclaim in a lawsuit) alleging that the Work
## or a Contribution incorporated within the Work constitutes direct
## or contributory patent infringement, then any patent licenses
## granted to You under this License for that Work shall terminate
## as of the date such litigation is filed.
##
## 4. Redistribution. You may reproduce and distribute copies of the
## Work or Derivative Works thereof in any medium, with or without
## modifications, and in Source or Object form, provided that You
## meet the following conditions:
##
## (a) You must give any other recipients of the Work or
## Derivative Works a copy of this License; and
##
## (b) You must cause any modified files to carry prominent notices
## stating that You changed the files; and
##
## (c) You must retain, in the Source form of any Derivative Works
## that You distribute, all copyright, patent, trademark, and
## attribution notices from the Source form of the Work,
## excluding those notices that do not pertain to any part of
## the Derivative Works; and
##
## (d) If the Work includes a "NOTICE" text file as part of its
## distribution, then any Derivative Works that You distribute must
## include a readable copy of the attribution notices contained
## within such NOTICE file, excluding those notices that do not
## pertain to any part of the Derivative Works, in at least one
## of the following places: within a NOTICE text file distributed
## as part of the Derivative Works; within the Source form or
## documentation, if provided along with the Derivative Works; or,
## within a display generated by the Derivative Works, if and
## wherever such third-party notices normally appear. The contents
## of the NOTICE file are for informational purposes only and
## do not modify the License. You may add Your own attribution
## notices within Derivative Works that You distribute, alongside
## or as an addendum to the NOTICE text from the Work, provided
## that such additional attribution notices cannot be construed
## as modifying the License.
##
## You may add Your own copyright statement to Your modifications and
## may provide additional or different license terms and conditions
## for use, reproduction, or distribution of Your modifications, or
## for any such Derivative Works as a whole, provided Your use,
## reproduction, and distribution of the Work otherwise complies with
## the conditions stated in this License.
##
## 5. Submission of Contributions. Unless You explicitly state otherwise,
## any Contribution intentionally submitted for inclusion in the Work
## by You to the Licensor shall be under the terms and conditions of
## this License, without any additional terms or conditions.
## Notwithstanding the above, nothing herein shall supersede or modify
## the terms of any separate license agreement you may have executed
## with Licensor regarding such Contributions.
##
## 6. Trademarks. This License does not grant permission to use the trade
## names, trademarks, service marks, or product names of the Licensor,
## except as required for reasonable and customary use in describing the
## origin of the Work and reproducing the content of the NOTICE file.
##
## 7. Disclaimer of Warranty. Unless required by applicable law or
## agreed to in writing, Licensor provides the Work (and each
## Contributor provides its Contributions) on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
## implied, including, without limitation, any warranties or conditions
## of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
## PARTICULAR PURPOSE. You are solely responsible for determining the
## appropriateness of using or redistributing the Work and assume any
## risks associated with Your exercise of permissions under this License.
##
## 8. Limitation of Liability. In no event and under no legal theory,
## whether in tort (including negligence), contract, or otherwise,
## unless required by applicable law (such as deliberate and grossly
## negligent acts) or agreed to in writing, shall any Contributor be
## liable to You for damages, including any direct, indirect, special,
## incidental, or consequential damages of any character arising as a
## result of this License or out of the use or inability to use the
## Work (including but not limited to damages for loss of goodwill,
## work stoppage, computer failure or malfunction, or any and all
## other commercial damages or losses), even if such Contributor
## has been advised of the possibility of such damages.
##
## 9. Accepting Warranty or Additional Liability. While redistributing
## the Work or Derivative Works thereof, You may choose to offer,
## and charge a fee for, acceptance of support, warranty, indemnity,
## or other liability obligations and/or rights consistent with this
## License. However, in accepting such obligations, You may act only
## on Your own behalf and on Your sole responsibility, not on behalf
## of any other Contributor, and only if You agree to indemnify,
## defend, and hold each Contributor harmless for any liability
## incurred by, or claims asserted against, such Contributor by reason
## of your accepting any such warranty or additional liability.
##
## END OF TERMS AND CONDITIONS
##
## APPENDIX: How to apply the Apache License to your work.
##
## To apply the Apache License to your work, attach the following
## boilerplate notice, with the fields enclosed by brackets "[]"
## replaced with your own identifying information. (Don't include
## the brackets!) The text should be enclosed in the appropriate
## comment syntax for the file format. We also recommend that a
## file or class name and description of purpose be included on the
## same "printed page" as the copyright notice for easier
## identification within third-party archives.
##
## Copyright [yyyy] [name of copyright owner]
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
#-------------------------------------------------------------------------------
# Name: es.py
# Purpose: This file implements fuzzy expert system.
#
# Author: Roman Graf
#
# Created: 19.09.2014
# Copyright: (c) GrafR 2014
# Licence: Apache 2.0
#-------------------------------------------------------------------------------
#!/usr/bin/env python
import os
DEFAULT_FILE_NAME = 'D:\\fuzzy_gui\\matchbox.fcl'
DEFAULT_FILE_NAME_DEF = DEFAULT_FILE_NAME.replace('.fcl','-def.fcl')
OUTPUT_RULE = 0
INPUT_RULE = 1
LINE_WIDTH = 2
X_START = 20
Y_START = 20
ADD_RULE_ID = 0
REMOVE_RULE_ID = 1
RUN_ID = 2
EXPORT_ID = 3
WIDTH = 100
HEIGHT = 20
DX = 20
DY = 40
FONT_SIZE = 10
ID_PLOT = 4
# dialog
ADD_TERM = 1
# the number of terms
TERM_NUMBER = 4
# the number of range input fields
RANGE_NUMBER = TERM_NUMBER*2
try:
import numpy as N
import numpy.random as RandomArray
haveNumpy = True
#print "Using numpy, version:", N.__version__
except ImportError:
# numpy isn't there
haveNumpy = False
errorText = (
"The Expert System Tool requires the numpy module, version 1.* \n\n"
"You can get info about it at:\n"
"http://numpy.scipy.org/\n\n"
)
#---------------------------------------------------------------------------
def BuildDrawFrame(): # this gets called when needed, rather than on import
try:
from floatcanvas import NavCanvas, FloatCanvas, Resources
except ImportError: # if it's not there locally, try the wxPython lib.
from wx.lib.floatcanvas import NavCanvas, FloatCanvas, Resources
import wx.lib.colourdb
import time, random
import wx.lib.dialogs
class RuleEditDialog(wx.Dialog):
def __init__(self, parent, id, title):
wx.Dialog.__init__(self, parent, id, 'Edit rule: ' + title, size=(935,140))
self.ruleName = title
self.sizer = wx.FlexGridSizer(cols=4 + RANGE_NUMBER, hgap=6, vgap=6)
self.terms = [0 for x in xrange(TERM_NUMBER)]
self.ranges = [[0 for x in xrange(RANGE_NUMBER)] for x in xrange(TERM_NUMBER)]
self.removeFlags = [0 for x in xrange(TERM_NUMBER)]
print 'dialog rectangle terms', len(Model.terms)
termIdx = 0
for term in Model.terms:
print 'term name: ', term.rule, 'title: ', title.replace(' ', '_')
if term.rule == title.replace(' ', '_'):
print 'term.range: ', len(term.range_dict)
termLabel = wx.StaticText(self, -1, "Term:")
self.terms[termIdx] = wx.TextCtrl(self, -1, term.name, size=(175, -1))
self.terms[termIdx].SetInsertionPoint(0)
rangeLabel = wx.StaticText(self, -1, "Range:")
print 'range str: ', term.range_dict
range_size = len(term.range_dict)
for i, value in enumerate(term.range_dict):
#if value is not None:
initRange = wx.TextCtrl(self, -1, "kkk" + str(i), size=(6000, -1000))
self.ranges[termIdx][i] = initRange
print 'added init range: ', self.ranges[termIdx][i].GetValue(), 'termIdx: ', termIdx, 'i: ', i
print 'init self.ranges len: ', len(self.ranges[termIdx])
for i, value in enumerate(term.range_dict):
rangeValue = ""
if value is not None:
rangeValue = value
curRange = wx.TextCtrl(self, -1, rangeValue, size=(60, -1))
self.ranges[termIdx][i] = curRange
print 'current ranges: ', self.ranges[termIdx][i].GetValue(), 'termIdx: ', termIdx, 'i: ', i
print 'range size: ', range_size
if range_size < RANGE_NUMBER:
count = range_size
while (count < RANGE_NUMBER):
print 'The count is:', count
curRange = wx.TextCtrl(self, -1, "", size=(60, -1))
self.ranges[termIdx][count] = curRange
print 'place holder ranges: ', self.ranges[termIdx][count].GetValue(), 'termIdx: ', termIdx, 'count: ', count
count = count + 1
self.removeFlags[termIdx] = wx.CheckBox(self, -1, 'Remove Term', (10, 10))
self.removeFlags[termIdx].SetValue(False)
wx.EVT_CHECKBOX(self, self.removeFlags[termIdx].GetId(), self.RemoveTerm(termIdx))
print 'self.ranges len: ', len(self.ranges[termIdx])
self.sizer.AddMany([termLabel, self.terms[termIdx], rangeLabel,
self.ranges[termIdx][0], self.ranges[termIdx][1],
self.ranges[termIdx][2], self.ranges[termIdx][3],
self.ranges[termIdx][4], self.ranges[termIdx][5],
self.ranges[termIdx][6], self.ranges[termIdx][7],
self.removeFlags[termIdx]])
termIdx = termIdx + 1
okButton = wx.Button(self, wx.ID_OK, "OK")
self.sizer.Add(okButton)
cancelButton = wx.Button(self, wx.ID_CANCEL, "Cancel", (50, 50))
self.sizer.Add(cancelButton)
plotButton = wx.Button(self, ID_PLOT, "Plot")
self.sizer.Add(plotButton, 1, wx.EXPAND)
self.Bind(wx.EVT_BUTTON, self.OnCmdButton, plotButton)
self.SetSizer(self.sizer)
def RemoveTerm(self, termIdx):
if self.removeFlags[termIdx].GetValue():
self.SetTitle('checkbox.py')
else: self.SetTitle('')
def OnCmdButton(self, event):
if event.GetId() == ID_PLOT:
self.showPlot(self.ruleName)
def showPlot(self, rule):
import os
import sys
import string
from PIL import Image
from pylab import plot
from pylab import title
from pylab import show
from pylab import ylabel
from pylab import xlabel
from pylab import text
from pylab import legend
x = 100
col_averages = 200
width = 100
mean = 50
left_x = 0
min_left_rand = 20
max_left_rand = 80
right_x = 0
min_right_rand = 20
max_right_rand = 80
FLAG_SIZE = 100
GAP_RATIO = 2
idx = 0
colours = []
colours.extend('r')
colours.extend('b')
colours.extend('g')
termNames = []
for term in Model.terms:
#print 'term name: ', term.rule, 'title: ', rule.replace(' ', '_')
if term.rule == rule.replace(' ', '_'):
print 'plot term size: ', len(term.range_dict)
print 'range str: ', term.range_dict
x_values = term.range_dict[::2]
y_values = term.range_dict[1::2]
print 'x_values', x_values, 'y_values', y_values
print 'x_max', max(x_values), 'y_max', max(y_values)
range_size = len(term.range_dict)
# define max on X and on Y
max_x = 0
max_y = 0
termNames.append(term.name)
# First term is blue, second term is red, third term is orange
# if multiple lines in one term
isAddZero = False
if len(term.range_dict) > 2 and term.range_dict[1] is not None:
for i in range(range_size/2):
print 'i plot', i
if i < len(term.range_dict)/2 - 1:
print 'plot term name', term.name, 'i', i, 'idx', idx, 'x_value1', x_values[i], 'x_value2', x_values[i+1], 'y_value1', y_values[i], 'y_value2', y_values[i+1], 'color', colours[idx]
plot([x_values[i],x_values[i+1]],[y_values[i],y_values[i+1]],colours[idx]+'-', label=term.name, linewidth=2)
# if flag term
else:
if term.range_dict[1] is None:
## plot([0,FLAG_SIZE*idx],[0,FLAG_SIZE],colours[idx], label=term.name, linewidth=2)
print 'plot term name', term.name
#if idx == 0:
isAddZero = True
plot([0,FLAG_SIZE],[term.range_dict[0],term.range_dict[0]],colours[idx]+'-', label=term.name, linewidth=2)
## plot([0,FLAG_SIZE*idx],[0,FLAG_SIZE],colours[idx], label=term.name, linewidth=2)
idx = idx + 1
## plot([0, int(max_x)+1],[0, int(max_y)+1])
print 'isAddZero', isAddZero
if isAddZero:
plot([0,FLAG_SIZE],[0,0],'b-', linewidth=2)
title('Rule: ' + rule)
ylabel('Y axis')
xlabel('X axis ')
print termNames
legend( termNames )
show()
class Rule(object):
def __init__(self, name, type):
super(Rule, self).__init__()
self.initUI(name, type)
def initUI(self, name, type):
self.name = name
self.type = type
#self.start = 0
#self.end = 0
class Dependency(object):
def __init__(self, name, dependency_dict):
super(Dependency, self).__init__()
self.initUI(name, dependency_dict)
def initUI(self, name, dependency_dict):
self.name = name
self.dependency_dict = dependency_dict
class Decision(object):
def __init__(self, name, condition):
super(Decision, self).__init__()
self.initUI(name, condition)
def initUI(self, name, condition):
self.name = name
self.condition = condition
class Term(object):
def __init__(self, rule, name, range_dict):
super(Term, self).__init__()
self.initUI(rule, name, range_dict)
def initUI(self, rule, name, range_dict):
self.rule = rule
self.name = name
self.range_dict = range_dict
class Input(object):
def __init__(self, name, min_range, max_range, weight):
super(Input, self).__init__()
self.initUI(name, min_range, max_range, weight)
def initUI(self, name, min_range, max_range, weight):
self.name = name
self.min_range = min_range
self.max_range = max_range
self.weight = weight
class Output(Input):
pass
class Model():
rules = []
inputs = []
outputs = []
exp_values = []
dependencies = []
terms = []
headers = []
decisions = []
res_str = ''
rule_counter = 0
isRun = False
def __init__(self):
super(Model, self).__init__()
self.initUI()
class DrawFrame(wx.Frame):
"""
A frame used for the FloatCanvas Demo
"""
def __init__(self,parent, id,title,position,size):
wx.Frame.__init__(self,parent, id,title,position, size)
self.data = None
self.filename = DEFAULT_FILE_NAME
self.dirname = None
self.data_def = None
self.filename_def = DEFAULT_FILE_NAME_DEF
self.dirname_def = None
## Set up the MenuBar
MenuBar = wx.MenuBar()
file_menu = wx.Menu()
item = file_menu.Append(-1, "&Close","Close this frame")
self.Bind(wx.EVT_MENU, self.OnQuit, item)
menuOpen = file_menu.Append(-1, "&Open"," Open a file to edit")
self.Bind(wx.EVT_MENU, self.OnOpen, menuOpen)
showFlcFile = file_menu.Append(wx.ID_OPEN, "&OpenFLC"," Open an FLC file to read")
self.Bind(wx.EVT_MENU, self.ShowFlcFile, showFlcFile)
item = file_menu.Append(-1, "&SavePNG","Save the current image as a PNG")
self.Bind(wx.EVT_MENU, self.OnSavePNG, item)
MenuBar.Append(file_menu, "&File")
draw_menu = wx.Menu()
item = draw_menu.Append(-1, "&Clear","Clear the Canvas")
self.Bind(wx.EVT_MENU, self.Clear, item)
MenuBar.Append(draw_menu, "&Tests")
view_menu = wx.Menu()
item = view_menu.Append(-1, "Zoom to &Fit","Zoom to fit the window")
self.Bind(wx.EVT_MENU, self.ZoomToFit, item)
MenuBar.Append(view_menu, "&View")
help_menu = wx.Menu()
item = help_menu.Append(-1, "&About",
"More information About this program")
self.Bind(wx.EVT_MENU, self.OnAbout, item)
MenuBar.Append(help_menu, "&Help")
self.SetMenuBar(MenuBar)
self.CreateStatusBar()
self.sizer2 = wx.BoxSizer(wx.HORIZONTAL)
self.buttons = []
self.buttons.append(wx.Button(self, ADD_RULE_ID, "Add Rule"))
self.sizer2.Add(self.buttons[ADD_RULE_ID], 1, wx.EXPAND)
self.Bind(wx.EVT_BUTTON, self.OnCmdButton, self.buttons[ADD_RULE_ID])
self.buttons.append(wx.Button(self, REMOVE_RULE_ID, "Remove Rule"))
self.sizer2.Add(self.buttons[REMOVE_RULE_ID], 1, wx.EXPAND)
self.Bind(wx.EVT_BUTTON, self.OnCmdButton, self.buttons[REMOVE_RULE_ID])
self.buttons.append(wx.Button(self, RUN_ID, "Run"))
self.sizer2.Add(self.buttons[RUN_ID], 1, wx.EXPAND)
self.Bind(wx.EVT_BUTTON, self.OnCmdButton, self.buttons[RUN_ID])
self.buttons.append(wx.Button(self, EXPORT_ID, "Export Rule Engine"))
self.sizer2.Add(self.buttons[EXPORT_ID], 1, wx.EXPAND)
self.Bind(wx.EVT_BUTTON, self.OnCmdButton, self.buttons[EXPORT_ID])
# Add the Canvas
NC = NavCanvas.NavCanvas(self,
Debug = 0,
BackgroundColor = "DARK SLATE BLUE")
self.Canvas = NC.Canvas # reference the contained FloatCanvas
self.MsgWindow = wx.TextCtrl(self, wx.ID_ANY,
"Look Here for output from events\n",
style = (wx.TE_MULTILINE |
wx.TE_READONLY |
wx.SUNKEN_BORDER)
)
##Create a sizer to manage the Canvas and message window
MainSizer = wx.BoxSizer(wx.VERTICAL)
MainSizer.Add(NC, 4, wx.EXPAND)
MainSizer.Add(self.sizer2, 0, wx.EXPAND)
MainSizer.Add(self.MsgWindow, 1, wx.EXPAND | wx.ALL, 5)
self.SetSizer(MainSizer)
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
self.Canvas.Bind(FloatCanvas.EVT_MOTION, self.OnMove)
self.Canvas.Bind(FloatCanvas.EVT_MOUSEWHEEL, self.OnWheel)
self.EventsAreBound = False
## getting all the colors for random objects
wx.lib.colourdb.updateColourDB()
self.colors = wx.lib.colourdb.getColourList()
return None
def Log(self, text):
self.MsgWindow.AppendText(text)
if not text[-1] == "\n":
self.MsgWindow.AppendText("\n")
def BindAllMouseEvents(self):
if not self.EventsAreBound:
## Here is how you catch FloatCanvas mouse events
self.Canvas.Bind(FloatCanvas.EVT_LEFT_DOWN, self.OnLeftDown)
self.Canvas.Bind(FloatCanvas.EVT_LEFT_UP, self.OnLeftUp)
self.Canvas.Bind(FloatCanvas.EVT_LEFT_DCLICK, self.OnLeftDouble)
self.Canvas.Bind(FloatCanvas.EVT_MIDDLE_DOWN, self.OnMiddleDown)
self.Canvas.Bind(FloatCanvas.EVT_MIDDLE_UP, self.OnMiddleUp)
self.Canvas.Bind(FloatCanvas.EVT_MIDDLE_DCLICK, self.OnMiddleDouble)
self.Canvas.Bind(FloatCanvas.EVT_RIGHT_DOWN, self.OnRightDown)
self.Canvas.Bind(FloatCanvas.EVT_RIGHT_UP, self.OnRightUp)
self.Canvas.Bind(FloatCanvas.EVT_RIGHT_DCLICK, self.OnRightDouble)
self.EventsAreBound = True
def UnBindAllMouseEvents(self):
## Here is how you unbind FloatCanvas mouse events
self.Canvas.Unbind(FloatCanvas.EVT_LEFT_DOWN)
self.Canvas.Unbind(FloatCanvas.EVT_LEFT_UP)
self.Canvas.Unbind(FloatCanvas.EVT_LEFT_DCLICK)
self.Canvas.Unbind(FloatCanvas.EVT_MIDDLE_DOWN)
self.Canvas.Unbind(FloatCanvas.EVT_MIDDLE_UP)
self.Canvas.Unbind(FloatCanvas.EVT_MIDDLE_DCLICK)
self.Canvas.Unbind(FloatCanvas.EVT_RIGHT_DOWN)
self.Canvas.Unbind(FloatCanvas.EVT_RIGHT_UP)
self.Canvas.Unbind(FloatCanvas.EVT_RIGHT_DCLICK)
self.EventsAreBound = False
def PrintCoords(self,event):
self.Log("coords are: %s"%(event.Coords,))
self.Log("pixel coords are: %s\n"%(event.GetPosition(),))
def OnOpen(self,e):
""" Open a file"""
dlg = wx.FileDialog(self, "Choose a file", "", "", "*.fcl", wx.OPEN)
if dlg.ShowModal() == wx.ID_OK:
self.filename = dlg.GetFilename()
self.dirname = dlg.GetDirectory()
self.filename = dlg.GetPath()
print 'open a file', self.filename
self.Clear()
Model.rules = []
Model.terms = []
Model.decisions = []
Model.inputs = []
Model.outputs = []
self.ReadRuleEngine()
dlg.Destroy()
def ShowFlcFile(self, evt):
dlg = wx.FileDialog(self, "Choose an FLC file", "", "", "*.*", wx.OPEN)
if dlg.ShowModal() == wx.ID_OK:
self.filename = dlg.GetFilename()
self.dirname = dlg.GetDirectory()
self.Log("selected dir: " + dlg.GetDirectory() + ", file: " + dlg.GetFilename())
f = open(dlg.GetDirectory() + '\\' + dlg.GetFilename(), "r")
msg = f.read()
f.close()
dlg = wx.lib.dialogs.ScrolledMessageDialog(self, msg, "message test")
dlg.ShowModal()
def OnSavePNG(self, event=None):
import os
dlg = wx.FileDialog(
self, message="Save file as ...", defaultDir=os.getcwd(),
defaultFile="", wildcard="*.png", style=wx.SAVE
)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
if not(path[-4:].lower() == ".png"):
path = path+".png"
self.Canvas.SaveAsImage(path)
def OnLeftDown(self, event):
self.Log("LeftDown")
self.PrintCoords(event)
def OnLeftUp(self, event):
self.Log("LeftUp")
self.PrintCoords(event)
def OnLeftDouble(self, event):
self.Log("LeftDouble")
self.PrintCoords(event)
def OnMiddleDown(self, event):
self.Log("MiddleDown")
self.PrintCoords(event)
def OnMiddleUp(self, event):
self.Log("MiddleUp")
self.PrintCoords(event)
def OnMiddleDouble(self, event):
self.Log("MiddleDouble")
self.PrintCoords(event)
def OnRightDown(self, event):
self.Log("RightDown")
self.PrintCoords(event)
def OnRightUp(self, event):
self.Log("RightUp")
self.PrintCoords(event)
def OnRightDouble(self, event):
self.Log("RightDouble")
self.PrintCoords(event)
def OnWheel(self, event):
self.Log("Mouse Wheel")
self.PrintCoords(event)
Rot = event.GetWheelRotation()
Rot = Rot / abs(Rot) * 0.1
if event.ControlDown(): # move left-right
self.Canvas.MoveImage( (Rot, 0), "Panel" )
else: # move up-down
self.Canvas.MoveImage( (0, Rot), "Panel" )
def OnMove(self, event):
"""
Updates the status bar with the world coordinates
"""
self.SetStatusText("%.2f, %.2f"%tuple(event.Coords))
event.Skip()
def OnAbout(self, event):
dlg = wx.MessageDialog(self,
"This is a program to create and handle rules \n"
"using the fuzzy logic\n",
"AIT",
wx.OK | wx.ICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
def ZoomToFit(self,event):
self.Canvas.ZoomToBB()
def Clear(self,event = None):
self.UnBindAllMouseEvents()
self.Canvas.InitAll()
self.Canvas.Draw()
def OnQuit(self,event):
self.Close(True)
def OnCloseWindow(self, event):
self.Destroy()
def ShowFrame(self):
Object = self.MovingObject
Range = self.Range
if self.TimeStep < self.NumTimeSteps:
x,y = Object.XY
if x > Range[1] or x < Range[0]:
self.dx = -self.dx
if y > Range[1] or y < Range[0]:
self.dy = -self.dy
Object.Move( (self.dx,self.dy) )
Object.Text.Move( (self.dx,self.dy))
self.Canvas.Draw()
self.TimeStep += 1
wx.GetApp().Yield(True)
else:
self.Timer.Stop()
def MoveMe(self, Object):
self.MovingObject = Object
Range = self.Range
self.dx = random.uniform(Range[0]/4,Range[1]/4)
self.dy = random.uniform(Range[0]/4,Range[1]/4)
#import time
#start = time.time()
self.NumTimeSteps = 200
self.TimeStep = 1
self.Timer.Start(self.FrameDelay)
#print "Did %i frames in %f seconds"%(N, (time.time() - start) )
def AddRule(self,event):
self.Canvas.ZoomToBB()
def ReadRuleEngine(self, event=None):
if self.filename != None:
self.Log('Opened rule engine: ' + self.filename + "\n")
f = open(self.filename, 'r')
with f:
self.data = f.read()
self.data = self.data.replace('\t',' ')
self.Log('Opened definitions: ' + self.filename_def + "\n")
fdef = open(self.filename_def, 'r')
with fdef:
self.datadef = fdef.read()
self.datadef = self.datadef.replace('\t',' ')
# read input settings and values
if 'EXPERIMENTBLOCK' in self.datadef:
experiment_block_begin_pos = self.datadef.find('EXPERIMENTBLOCK')
experiment_block_end_pos = self.datadef.find('END_EXPERIMENTBLOCK')
experiment_block = self.datadef[experiment_block_begin_pos:experiment_block_end_pos].replace(' ', '').replace('\n\n', '\n').split('\n')[1:-1]
print 'experiment block', experiment_block
Model.headers = experiment_block[0].split(';')
Model.exp_values = experiment_block[1::]
# read positive decisions
decision_pos = self.datadef.find('VAR_POSITIVE_DECISION')
ruleblock_pos = self.datadef.find('EXPERIMENTBLOCK')
decision_block = self.datadef[ decision_pos : ruleblock_pos : ]
var_decisions_part = decision_block.replace(' ', '').replace('\n\n','\n').split('VAR_POSITIVE_DECISION')[1::]
#print 'decision part: ', var_decisions_part
for var_decisions in var_decisions_part[::2]:
v = var_decisions.replace('END_DECISION_VAR\n', '').replace('\n\n','\n').split('\n')
##print 'decision: ', v, ', decision name: ', v[0]
allconditions = []
for condition in v[1::]:
if ':=' in condition:
eq_pos = condition.find(':=')
end_pos = condition.find(';')
d = []
print 'condition: ', condition
condition = condition.replace(' ', '')
condition_row = condition.split(':=')
print 'condition_row', condition_row
#print 'condition_row0', condition_row[0], 'condition_row1', condition_row[1]
if len(condition_row[0]) > 0:
print 'condition_row: ', condition_row
allconditions.append(condition_row)
print 'decision', v[0], 'allconditions', allconditions
Model.decisions.append(Decision(v[0], allconditions))
# read defuzzify rules
defuzzify_pos = self.data.find('DEFUZZIFY')
end_defuzzify_pos = self.data.find('END_DEFUZZIFY')
rules_block = self.data[ defuzzify_pos : end_defuzzify_pos : ]
#print 'rules_block', rules_block
first_break_pos = rules_block.find('\n')
#print 'defuzzify_pos', defuzzify_pos, 'first_break_pos', first_break_pos
defuzzifyRuleName = rules_block[len('DEFUZZIFY') : first_break_pos].replace(' ', '')
var_rules_part = rules_block.replace(' ', '').replace('\n\n','\n').split('\n')[1::]
print 'defuzzify rules part: ', var_rules_part
for term in var_rules_part:
if 'TERM' in term:
term_name_pos = term.find('TERM')
eq_pos = term.find(':=')
end_pos = term.find(';')
term_name = term[term_name_pos + 4 : eq_pos]
d = []
d.extend([term[eq_pos + 2 : end_pos], None])
if (len(term_name) > 0):
print 'defuzzifyRuleName', defuzzifyRuleName, 'term_name', term_name, 'd: ', d
Model.terms.append(Term(defuzzifyRuleName, term_name, d))
# read rules
fuzzify_pos = self.data.find('FUZZIFY')
defuzzify_pos = self.data.find('DEFUZZIFY')
rules_block = self.data[ fuzzify_pos : defuzzify_pos : ]
var_rules_part = rules_block.replace(' ', '').replace('\n\n','\n').split('FUZZIFY')[1::]
#print 'rules part: ', var_rules_part
for var_rules in var_rules_part[::2]:
v = var_rules.replace('END_FUZZIFY\n', '').replace('\n\n','\n').split('\n')
##print 'rules: ', v, ', rule name: ', v[0]
for term in v[1::]:
#print 'term: ', term
term_name_pos = term.find('TERM')
eq_pos = term.find(':=')
end_pos = term.find(';')
range_pos = term.find('(')
term_name = term[term_name_pos + 4 : eq_pos]
d = []
if (range_pos > 0):
for row in term.split('(')[1:]:
#print 'row: ', row
range_pair = row.replace(' ', '')
range_end_pos = range_pair.find(')')
range = range_pair[:range_end_pos].split(',')
#print 'range: ', range
d.extend([range[0], range[1]])
else:
d.extend([term[eq_pos + 2 : end_pos], None])
if (len(term_name) > 0):
#print 'd: ', d
Model.terms.append(Term(v[0], term_name, d))
var_inputs_part = self.data.split('VAR_INPUT')
var_inputs = var_inputs_part[1].replace(':','').split('END_VAR')
##print 'var_inputs: ', var_inputs[0]
var_outputs_part = self.data.split('VAR_OUTPUT')
var_outputs = var_outputs_part[1].replace(':','').split('END_VAR')
##print 'var_outputs: ', var_outputs[0]
# read inputs
input_names = var_inputs[0].split('\n')
for n in input_names:
x = n.replace(' ', '')
real_pos = x.find('REAL')
range_pos = x.find('RANGE(')
sep_pos = x.find('..')
end_pos = x.find(')')
if (real_pos > 0): # TODO save inputs and outputs only once
add = True
for inputObj in Model.inputs:
if inputObj.name == x[:real_pos]:
add = False
break
if add == True:
Model.inputs.append(Input(x[:real_pos], x[range_pos + 6:sep_pos], x[sep_pos + 2: end_pos], None)) #weight))
output_names = var_outputs[0].split('\n')
for n in output_names:
x = n.replace(' ', '')
real_pos = x.find('REAL')
range_pos = x.find('RANGE(')
sep_pos = x.find('..')
end_pos = x.find(')')
if (real_pos > 0):
add = True
for outputObj in Model.outputs:
if outputObj.name == x[:real_pos]:
add = False
break
if add == True:
Model.outputs.append(Output(x[:real_pos], x[range_pos + 6:sep_pos], x[sep_pos + 2: end_pos], 0.5))
for n in input_names:
if len(n) > 0:
rule = n.replace(' ', '')
real_pos = rule.find('REAL')
rule_str = rule[:real_pos]
Model.rules.append(Rule(rule_str,INPUT_RULE));
for n in output_names:
if len(n) > 0:
rule = n.replace(' ', '')
real_pos = rule.find('REAL')
rule_str = rule[:real_pos]
Model.rules.append(Rule(rule_str,OUTPUT_RULE));
self.ShowRuleEngine()
self.Canvas.ZoomToBB()
def ShowRuleEngine(self, event=None):#, isRun=False):
print 'ShowRuleEngine() isRun', Model.isRun
wx.GetApp().Yield(True)
self.UnBindAllMouseEvents()
Canvas = self.Canvas
Canvas.InitAll()
w, h = WIDTH, HEIGHT
dx = DX
dy = DY
x, y = X_START, Y_START
FontSize = FONT_SIZE
if self.filename != None:
self.Log('Opened rule engine: ' + self.filename + "\n")
#print Model.rules
input_rules_count = 0;
for n in Model.rules:
str = n.name
##print 'str:',str,';'
if len(str) > 0 and n.type == INPUT_RULE:
#print 'n: ', n
rule_str = n.name.replace('_',' ')
x = X_START
y += dy
color = "RED"
R = Canvas.AddRectangle((x, y), (w, h), LineWidth = 2, FillColor = color)
L1 = Canvas.AddLine(( (x-dx, y+h/2), (x, y+h/2) ), LineWidth = LINE_WIDTH, LineColor = "Green") # line before rule
L2 = Canvas.AddLine(( (x+w, y+h/2), (x+w+dx*2, y+h/2) ), LineWidth = LINE_WIDTH, LineColor = "Green") # line after rule
R.Name = rule_str
R.Bind(FloatCanvas.EVT_FC_RIGHT_DOWN, self.RectGotHitRight)
R.Bind(FloatCanvas.EVT_FC_LEFT_DOWN, self.RectGotHitLeft)
#Canvas.AddText("L and R Click", (x, y), Size = FontSize, Position = "bl")
Canvas.AddText(rule_str, (x, y+h), Size = FontSize, Position = "tl")
if Model.isRun == True:
#print 'isRun', Model.isRun
inputValues = Model.exp_values[0].split(';')
inputValue = inputValues[input_rules_count]
Canvas.AddText(inputValue, (x-dx, y+h/2+10), Size = FontSize, Position = "tl", Color = "White")
input_rules_count = input_rules_count+1;
L3 = Canvas.AddLine(( (x+w+dx*2, y+h/2), (x+w+dx*2, Y_START+dy+h/2) ), LineWidth = LINE_WIDTH, LineColor = "Green") # vertical line
# arrow before output rule
L4 = Canvas.AddLine(( (x+w+dx*2,(y+h+dy+Y_START)/2), (x+2*w, (y+h+dy+Y_START)/2) ), LineWidth = LINE_WIDTH, LineColor = "Green") # line after rule
#print 'y: ', y
for n in Model.rules:
str = n.name
if len(str) > 0 and n.type == OUTPUT_RULE:
#print 'n: ', n
rule_str = n.name.replace('_',' ')
x = 2*w
y = HEIGHT+dy*(input_rules_count+1)/2
color = "CYAN"
#print 'y2: ', y
R = Canvas.AddRectangle((x, y), (w, h), LineWidth = 2, FillColor = color)
R.Name = rule_str
R.Bind(FloatCanvas.EVT_FC_RIGHT_DOWN, self.RectGotHitRight)
R.Bind(FloatCanvas.EVT_FC_LEFT_DOWN, self.RectGotHitLeft)
Canvas.AddText(rule_str, (x, y+h), Size = FontSize, Position = "tl")
if Model.isRun == True:
outputValue = Model.outputs[0]
print 'outputValue', outputValue
res = repr(outputValue)
Canvas.AddText(res, (x+w+2*dx-10, y+h/2+10), Size = FontSize, Position = "tl", Color = "White")
# arrow after output rule
Canvas.AddArrow((x+w,y+h/2),Length = 2*dx, Direction = 90,LineWidth = LINE_WIDTH, LineColor = "Green", ArrowHeadAngle = 50)
self.Canvas.ZoomToBB()
Model.isRun = False
def RectMoveLeft(self,Object):
self.MoveRects("left")
def RectMoveRight(self,Object):
self.MoveRects("right")
def RectMoveUp(self,Object):
self.MoveRects("up")
def RectMoveDown(self,Object):
self.MoveRects("down")
def MoveRects(self, Dir):
for Object in self.MovingRects:
X,Y = Object.XY
if Dir == "left": X -= 10
elif Dir == "right": X += 10
elif Dir == "up": Y += 10
elif Dir == "down": Y -= 10
Object.SetPoint((X,Y))
self.Canvas.Draw()
def PointSetGotHit(self, Object):
self.Log(Object.Name + " Got Hit\n")
def RectGotHit(self, Object):
self.Log(Object.Name + " Got Hit\n")
def RectGotHitRight(self, Object):
self.Log(Object.Name + " Got Hit With Right\n")
def RectGotHitLeft(self, Object):
self.Log(Object.Name + " Got Hit with Left\n")
dlg = RuleEditDialog(self, -1, Object.Name)
dlg.SetTitle(Object.Name)
if dlg.ShowModal() == wx.ID_OK:
rowsCount = len(dlg.ranges)
print 'ok: ', rowsCount
#origTerms = range(rowsCount)
termIdx = 0
for term in Model.terms:
if term.rule == Object.Name.replace(' ', '_'):
#origTerms[termIdx] = term.name
curTerm = dlg.terms[termIdx].GetValue()
if term.name != curTerm:
term.name = curTerm
termIdx = termIdx + 1
for row in range(rowsCount):
termText = dlg.terms[row].GetValue()
removeFlag = dlg.removeFlags[row].GetValue()
positiveFlag = dlg.positiveFlags[row].GetValue()
print 'removeFlag: ', removeFlag
print 'positiveFlag: ', positiveFlag
if removeFlag == True:
resTerm = None
for term in Model.terms:
if term.rule == Object.Name.replace(' ', '_') and termText == term.name:
resTerm = term
break
if resTerm is not None:
Model.terms.remove(resTerm)
else:
range1 = dlg.ranges[0][0].GetValue()
self.Log('range00: ' + range1)
d = []
if (len(dlg.ranges[row]) > 1):
for field in dlg.ranges[row]:
print 'field: ', field.GetValue()
d.extend([field.GetValue()])
else:
d.extend([dlg.ranges[row][0], None])
print 'd: ', d
for term in Model.terms:
if term.rule == Object.Name.replace(' ', '_') and termText == term.name:
term.range_dict = d
dlg.Destroy()
def RectMouseOver(self, Object):
self.Log("Mouse entered: " + Object.Name)
def RectMouseLeave(self, Object):
self.Log("Mouse left " + Object.Name)
def binding2(self, event):
self.Log("I'm the TextBox")
def SelectPointHit(self, Point):
self.Log("Point Num: %i Hit"%Point.VerticeNum)
self.SelectedPoint = Point
def OnCmdButton(self,event):
self.Log("Click on button with Id %d\n" %event.GetId())
if event.GetId() == ADD_RULE_ID:
dlg = wx.TextEntryDialog(
self, 'Rule name',
'Add Rule', 'Python')
dlg.SetValue("New rule")
if dlg.ShowModal() == wx.ID_OK:
self.Log('You entered: %s\n' % dlg.GetValue())
Model.rules.append(Rule(dlg.GetValue(),INPUT_RULE));
for i in range(TERM_NUMBER):
d = ['0'] * RANGE_NUMBER
ruleName = dlg.GetValue().replace(" ", "_")
print 'empty terms: ', d, ' for rule: ', ruleName
Model.terms.append(Term(ruleName, 'Term' + str(i), d))
dlg.Destroy()
self.ShowRuleEngine()
elif event.GetId() == REMOVE_RULE_ID:
lst = []
for n in Model.rules:
if len(n.name) > 0 and n.type == INPUT_RULE:
lst.append(n.name)
dlg = wx.MultiChoiceDialog( self,
"Select rules to delete from\nthis list",
"wx.MultiChoiceDialog", lst)
if (dlg.ShowModal() == wx.ID_OK):
selections = dlg.GetSelections()
strings = [lst[x] for x in selections]
self.Log("Selections: %s -> %s\n" % (selections, strings))
for n in Model.rules:
if n.name in strings:
Model.rules.remove(n)
dlg.Destroy()
self.ShowRuleEngine()
elif event.GetId() == EXPORT_ID:
dlg = wx.FileDialog(self, "Choose an FCL import file for rule engine", "", "import.fcl", "*.fcl", wx.OPEN)
if dlg.ShowModal() == wx.ID_OK:
self.path = dlg.GetPath()
self.Log("selected import path: " + dlg.GetPath())
f = open(dlg.GetPath(),'w')
f.write('FUNCTION_BLOCK fb\n') # name is important for standard and pyfuzzy
f.write('\n')
f.write('VAR_INPUT\n')
for i in Model.inputs:
f.write('\t' + i.name + ': REAL; (* RANGE(' + i.min_range + ' .. ' + i.max_range + ') *)\n')
f.write('END_VAR\n\n')
f.write('VAR_OUTPUT\n')
for i in Model.outputs:
f.write('\t' + i.name + ': REAL; (* RANGE(' + i.min_range + ' .. ' + i.max_range + ') *)\n')
f.write('END_VAR\n\n')
outputFileName = ''
for n in Model.rules:
if len(n.name) > 0 and n.type == OUTPUT_RULE:
outputFileName = n.name
break
positiveDecisionTerm = ''
negativeDecisionTerm = ''
for n in Model.rules:
if len(n.name) > 0 and n.type == INPUT_RULE:
f.write('\n')
f.write('FUZZIFY\t' + n.name + '\n')
print 'fuzzify', n.name
for term in Model.terms:
if term.rule == n.name.replace(' ', '_'):
f.write('\tTERM\t' + term.name + '\t:=\t')
## print 'output file name', outputFileName, 'term rule', term.rule, 'term.name', term.name
if len(term.range_dict) > 2:
res = ''
for number, val in enumerate(term.range_dict):
if number % 2 != 0:
res = res + ', ' + val + ') '
else:
res = res + '(' + val
f.write(res + ' ;\n')
else:
f.write(term.range_dict[0] + ' ;\n')
f.write('END_FUZZIFY\n')
for n in Model.rules:
if len(n.name) > 0 and n.type == OUTPUT_RULE:
f.write('\n')
f.write('DEFUZZIFY\t' + n.name + '\n')
print 'defuzzify', n.name
for term in Model.terms:
if term.rule == n.name.replace(' ', '_'):
f.write('\tTERM\t' + term.name + '\t:=\t')
print 'output file name', outputFileName, 'term rule', term.rule, 'term.name', term.name
if outputFileName == term.rule and 'Not' not in term.name:
positiveDecisionTerm = term.name
if outputFileName == term.rule and 'Not' in term.name:
negativeDecisionTerm = term.name
if len(term.range_dict) > 2:
res = ''
for number, val in enumerate(term.range_dict):
if number % 2 != 0:
res = res + ', ' + val + ') '
else:
res = res + '(' + val
f.write(res + ' ;\n')
else:
f.write(term.range_dict[0] + ' ;\n')
f.write('\tACCU : MAX;\n')
f.write('\tMETHOD : COGS;\n')
f.write('\tDEFAULT := 0;\n')
f.write('END_DEFUZZIFY\n')
f.write('\n')
print 'export decisions size: ', len(Model.decisions)
decisions = []
curDecisionStr = ''
for decision in Model.decisions:
print len(decision.condition)
for curCondition in decision.condition:
print 'condition: ', curCondition
curDecisionStr = curDecisionStr + curCondition[0] + ' IS ' + curCondition[1] + ' AND '
f.write('RULEBLOCK first\n')
f.write('\n')
f.write('AND : MIN;\n')
termList = range(len(Model.terms))
for i, term in enumerate(Model.terms):
termList[i] = term.name
import itertools
allterms = []
for n in Model.rules:
if len(n.name) > 0 and n.type == INPUT_RULE:
ruleterms = []
for term in Model.terms:
if term.rule == n.name.replace(' ', '_'):
ruleterms.append(term.rule + ' IS ' + term.name)
allterms.append(ruleterms)
print 'dec', positiveDecisionTerm, negativeDecisionTerm
for idx, element in enumerate(itertools.product(*allterms)):
ruleStr = 'RULE ' + str(idx) + ': IF ' + str(element)[1:-1].replace(',', ' AND').replace('\'','') + ' THEN ' + outputFileName
###print 'ruleStr', ruleStr, 'curDecisionStr', curDecisionStr[:-4]
if curDecisionStr[:-4] in ruleStr:
###print 'positiveDecisionTerm'
ruleStr = ruleStr + ' IS ' + positiveDecisionTerm + ';\n'
else:
ruleStr = ruleStr + ' IS ' + negativeDecisionTerm + ';\n'
f.write(ruleStr)
f.write('END_RULEBLOCK\n')
f.write('END_FUNCTION_BLOCK\n')
f.close()
dlg.Destroy()
self.ShowRuleEngine()
elif event.GetId() == RUN_ID:
print 'headers', Model.headers
print 'experiment values', Model.exp_values
import fuzzy.storage.fcl.Reader
system = fuzzy.storage.fcl.Reader.Reader().load_from_file(self.filename)
# preallocate input and output values
values = Model.exp_values[0].split(';')
my_input = {}
my_output = {}
for idx, header in enumerate(Model.headers):
if len(header) > 0:
my_input[header] = int(values[idx])
print 'my_input', my_input
for n in Model.rules:
if len(n.name) > 0 and n.type == OUTPUT_RULE:
print 'rule name', n.name, 'n.type', n.type
my_output[n.name] = 0
print 'my_output', my_output
# calculate
system.calculate(my_input, my_output)
# now use outputs
print my_output
Model.outputs = []
Model.outputs.extend(my_output.values())
Model.isRun = True
self.ShowRuleEngine()#True)
if event.GetId() == ID_PLOT:
self.showPlot("Plot")
dlg.Destroy()
self.ShowRuleEngine()
return DrawFrame
#---------------------------------------------------------------------------
if __name__ == "__main__":
import wx
# check options:
import sys, getopt
optlist, args = getopt.getopt(sys.argv[1:],'l',["all",
"hit",
"hitf",
"hide"])
if not haveNumpy:
raise ImportError(errorText)
StartUpDemo = "hit" # the default
if optlist:
StartUpDemo = optlist[0][0][2:]
class DemoApp(wx.App):
"""
How the tool works:
Under the Draw menu, there are three options:
*Clear: Clears the Canvas.
"""
def __init__(self, *args, **kwargs):
wx.App.__init__(self, *args, **kwargs)
def OnInit(self):
wx.InitAllImageHandlers()
DrawFrame = BuildDrawFrame()
frame = DrawFrame(None, -1, "Expert System Tool",wx.DefaultPosition,(900,700))
self.SetTopWindow(frame)
frame.Show()
if StartUpDemo == "text":
frame.TestText()
elif StartUpDemo == "bitmap":
frame.TestBitmap()
elif StartUpDemo == "hit":
frame.ReadRuleEngine()
elif StartUpDemo == "hide":
frame.HideTest()
return True
app = DemoApp(False)# put in True if you want output to go to it's own window.
app.MainLoop()
else:
# It's not running stand-alone, set up for wxPython demo.
# don't neeed wxversion here.
import wx
if not haveNumpy:
## TestPanel and runTest used for integration into wxPython Demo
class TestPanel(wx.Panel):
def __init__(self, parent, log):
self.log = log
wx.Panel.__init__(self, parent, -1)
from wx.lib.floatcanvas.ScreenShot import getScreenShotBitmap
note1 = wx.StaticText(self, -1, errorText)
note2 = wx.StaticText(self, -1, "This is what the Expert System Tool can look like:")
S = wx.BoxSizer(wx.VERTICAL)
S.Add((10, 10), 1)
S.Add(note1, 0, wx.ALIGN_CENTER)
S.Add(note2, 0, wx.ALIGN_CENTER | wx.BOTTOM, 4)
S.Add(wx.StaticBitmap(self,-1,getScreenShotBitmap()),0,wx.ALIGN_CENTER)
S.Add((10, 10), 1)
self.SetSizer(S)
self.Layout()
else:
## TestPanel and runTest used for integration into wxPython Demo
class TestPanel(wx.Panel):
def __init__(self, parent, log):
self.log = log
wx.Panel.__init__(self, parent, -1)
note1 = wx.StaticText(self, -1, "The Expert System Tool needs")
note2 = wx.StaticText(self, -1, "a separate frame")
b = wx.Button(self, -1, "Open Demo Frame Now")
b.Bind(wx.EVT_BUTTON, self.OnButton)
S = wx.BoxSizer(wx.VERTICAL)
S.Add((10, 10), 1)
S.Add(note1, 0, wx.ALIGN_CENTER)
S.Add(note2, 0, wx.ALIGN_CENTER | wx.BOTTOM, 5)
S.Add(b, 0, wx.ALIGN_CENTER | wx.ALL, 5)
S.Add((10, 10), 1)
self.SetSizer(S)
self.Layout()
def OnButton(self, evt):
DrawFrame = BuildDrawFrame()
frame = DrawFrame(None, -1, "Expert System Tool",wx.DefaultPosition,(500,500))
frame.Show()
frame.ReadRuleEngine()
def runTest(frame, nb, log):
win = TestPanel(nb, log)
return win
# import to get the doc
from wx.lib import floatcanvas
overview = floatcanvas.__doc__
| openpreserve/fuzzy-expert-system | es.py | Python | apache-2.0 | 64,869 |
# !/usr/bin/env python
#
# Hornet - SSH Honeypot
#
# Copyright (C) 2015 Aniket Panse <aniketpanse@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gevent.monkey
gevent.monkey.patch_all()
import paramiko
from hornet.main import Hornet
from hornet.tests.commands.base import BaseTestClass
class HornetTests(BaseTestClass):
def test_ssh_no_username(self):
""" Tests if ssh command works when no username is provided in host string
eg: $ ssh test01
"""
honeypot = Hornet(self.working_dir)
honeypot.start()
while honeypot.server.server_port == 0: # wait until the server is ready
gevent.sleep(0)
port = honeypot.server.server_port
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# If we log in properly, this should raise no errors
client.connect('127.0.0.1', port=port, username='testuser', password='testpassword')
channel = client.invoke_shell()
while not channel.recv_ready():
gevent.sleep(0) # :-(
welcome = ''
while channel.recv_ready():
welcome += channel.recv(1)
lines = welcome.split('\r\n')
prompt = lines[-1]
self.assertTrue(prompt.endswith('$ '))
# Now send the ssh command
channel.send('ssh test01\r\n')
while not channel.recv_ready():
gevent.sleep(0) # :-(
output = ''
while not output.endswith('Password:'):
output += channel.recv(1)
# Now send the password
channel.send('passtest\r\n')
output = ''
while not output.endswith('$ '):
output += channel.recv(1)
self.assertTrue('Welcome to test01 server' in output)
self.assertTrue(output.endswith('$ '))
honeypot.stop()
def test_ssh_with_username(self):
""" Tests if ssh command works when username is provided in host string
eg: $ ssh mango@test01
"""
honeypot = Hornet(self.working_dir)
honeypot.start()
while honeypot.server.server_port == 0: # wait until the server is ready
gevent.sleep(0)
port = honeypot.server.server_port
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# If we log in properly, this should raise no errors
client.connect('127.0.0.1', port=port, username='testuser', password='testpassword')
channel = client.invoke_shell()
while not channel.recv_ready():
gevent.sleep(0) # :-(
welcome = ''
while channel.recv_ready():
welcome += channel.recv(1)
lines = welcome.split('\r\n')
prompt = lines[-1]
self.assertTrue(prompt.endswith('$ '))
# Now send the ssh command
channel.send('ssh root@test01\r\n')
while not channel.recv_ready():
gevent.sleep(0) # :-(
output = ''
while not output.endswith('Password:'):
output += channel.recv(1)
# Now send the password
channel.send('toor\r\n')
output = ''
while not output.endswith('$ '):
output += channel.recv(1)
self.assertTrue('Welcome to test01 server' in output)
self.assertTrue(output.endswith('$ '))
honeypot.stop()
def test_ssh_with_username_param(self):
""" Tests if ssh command works when username is provided as a parameter
eg: $ ssh test01 -l mango
"""
honeypot = Hornet(self.working_dir)
honeypot.start()
while honeypot.server.server_port == 0: # wait until the server is ready
gevent.sleep(0)
port = honeypot.server.server_port
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# If we log in properly, this should raise no errors
client.connect('127.0.0.1', port=port, username='testuser', password='testpassword')
channel = client.invoke_shell()
while not channel.recv_ready():
gevent.sleep(0) # :-(
welcome = ''
while channel.recv_ready():
welcome += channel.recv(1)
lines = welcome.split('\r\n')
prompt = lines[-1]
self.assertTrue(prompt.endswith('$ '))
# Now send the ssh command
channel.send('ssh test01 -l root\r\n')
while not channel.recv_ready():
gevent.sleep(0) # :-(
output = ''
while not output.endswith('Password:'):
output += channel.recv(1)
# Now send the password
channel.send('toor\r\n')
output = ''
while not output.endswith('$ '):
output += channel.recv(1)
self.assertTrue('Welcome to test01 server' in output)
self.assertTrue(output.endswith('$ '))
honeypot.stop()
def test_ssh_bad_hostname(self):
""" Tests if ssh command returns correct string if host doesn't exist
eg: $ ssh test01 -l mango
"""
honeypot = Hornet(self.working_dir)
honeypot.start()
while honeypot.server.server_port == 0: # wait until the server is ready
gevent.sleep(0)
port = honeypot.server.server_port
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# If we log in properly, this should raise no errors
client.connect('127.0.0.1', port=port, username='testuser', password='testpassword')
channel = client.invoke_shell()
while not channel.recv_ready():
gevent.sleep(0) # :-(
welcome = ''
while channel.recv_ready():
welcome += channel.recv(1)
lines = welcome.split('\r\n')
prompt = lines[-1]
self.assertTrue(prompt.endswith('$ '))
# Now send the ssh command
channel.send('ssh blahblah\r\n')
while not channel.recv_ready():
gevent.sleep(0) # :-(
output = ''
while not output.endswith('$ '):
output += channel.recv(1)
self.assertTrue('Name or service not known' in output)
self.assertTrue(output.endswith('$ '))
honeypot.stop()
| czardoz/hornet | hornet/tests/commands/test_ssh.py | Python | gpl-3.0 | 6,905 |
import h5py # HDF5 support
import os
import glob
import numpy as n
from scipy.interpolate import interp1d
import sys
from astropy.cosmology import FlatLambdaCDM
import astropy.units as u
cosmoMD = FlatLambdaCDM(H0=67.77*u.km/u.s/u.Mpc, Om0=0.307115, Ob0=0.048206)
#status = 'create'
status = 'update'
path_to_lc = sys.argv[1]
#path_to_lc = '/data17s/darksim/MD/MD_1.0Gpc/h5_lc/lc_cluster_remaped_position_L3.hdf5'
f = h5py.File(path_to_lc, 'r+')
is_gal = (f['/sky_position/selection'].value)&(f['/sky_position/redshift_R'].value<3.)
z = f['/sky_position/redshift_S'].value[is_gal]
lx = f['/cluster_data/log_LceX_05_24'].value[is_gal]
percent_observed = 1.
lx_absorbed_05_24 = n.log10(10**lx * percent_observed)
d_L = cosmoMD.luminosity_distance(z)
dl_cm = (d_L.to(u.cm)).value
adjusting_factor = 0.35 # accounts for absorption for now !
fx_05_24 = 10**(lx_absorbed_05_24-adjusting_factor) / (4 * n.pi * dl_cm**2.)
fx_05_24_out = n.ones_like(f['/sky_position/redshift_S'].value)*-9999.
fx_05_24_out[is_gal] = fx_05_24
if status == 'create':
f['/cluster_data'].create_dataset('rxay_flux_05_24', data = fx_05_24_out )
if status == 'update':
f['/cluster_data/rxay_flux_05_24'][:] = fx_05_24_out
f.close()
| JohanComparat/pyEmerge | bin_cluster/lc_add_clusters.py | Python | unlicense | 1,223 |
from eve import Eve
app = Eve()
if __name__ == '__main__':
app.run()
| newmediadenver/evegenie | run.py | Python | mit | 75 |
#============================================================================================================
# CWE-120: Buffer Copy without Checking Size of Input
#
# Vuln Info: A trivial way to cause this vulnerability is using the gets() function which is not secure.
# Ex:
# bytes_received = gets(input); <--Bad
# bytes_received = receive_until(input, sizeof(input), '\n'); <--Good
#
# Methodology:
# 1. Find gets instruction
# 2. There's a vulnerability
#
# Try it on: REMATCH_1--Hat_Trick--Morris_Worm
#
#============================================================================================================
import sys
import grakn
def main(keyspace):
client = grakn.Grakn(uri='localhost:48555')
with client.session(keyspace=keyspace).transaction(grakn.TxType.READ) as graph:
# Check for gets() function
# Get address of function to use for next query
func_names = ['gets', 'cgc_gets']
func_addrs = []
for function_name in func_names:
query1 = 'match $func isa function, has func-name "{}", has asm-address $a; get $a;'.format(function_name)
func_addrs += [int(result.value(), 16) for result in graph.query(query1).collect_concepts()]
# If the function is found continue query
for func_addr in func_addrs:
# Get all instructions that have function name
query2 = 'match $x has operation-type "MLIL_CALL_SSA" has asm-address $a; $y isa"MLIL_CONST_PTR"; ($x,$y); $z isa constant, has constant-value {}; ($y,$z); get $a;'.format(func_addr)
result2 = graph.query(query2).collect_concepts()
# If there are instructions that use the function check the instructions
for instr in result2:
ins_addr = instr.value()
print("CWE-120: Buffer Copy Without Checking Size of Input at {}".format(ins_addr))
if __name__ == "__main__":
if len(sys.argv) > 1:
keyspace = sys.argv[1]
else:
keyspace = "grakn"
main(keyspace)
| cetfor/PaperMachete | queries/cwe_120_v1.py | Python | mit | 2,076 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Slack
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from celery import shared_task
@shared_task
def get_result(scraper, additional_data):
return scraper.get_result(), additional_data | Slack06/yadg | descgen/tasks.py | Python | mit | 1,273 |
import urllib
try:
import json as simplejson
except ImportError:
try:
import simplejson
except ImportError:
from django.utils import simplejson
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.test.client import Client, RequestFactory
from django.utils.importlib import import_module
from mock import patch
from social_auth.views import complete
class DumbResponse(object):
"""
Response from a call to, urllib2.urlopen()
"""
def __init__(self, data_str, url=None):
self.data_str = data_str
self.url = url
def read(self):
return self.data_str
class NoBackendError(Exception):
"""
Used when a client attempts to login with a invalid backend.
"""
pass
class SocialClient(Client):
"""
Test client to login/register a user
Does so by mocking api posts/responses.
Only supports facebook.
"""
@patch('social_auth.backends.facebook.FacebookAuth.enabled')
@patch('social_auth.utils.urlopen')
def login(self, user, mock_urlopen, mock_facebook_enabled, backend='facebook'):
"""
Login or Register a facebook user.
If the user has never logged in then they get registered and logged in.
If the user has already registered, then they are logged in.
user: dict
backend: 'facebook'
example user:
{
'first_name': 'Django',
'last_name': 'Reinhardt',
'verified': True,
'name': 'Django Reinhardt',
'locale': 'en_US',
'hometown': {
'id': '12345678',
'name': 'Any Town, Any State'
},
'expires': '4812',
'updated_time': '2012-01-29T19:27:32+0000',
'access_token': 'dummyToken',
'link': 'http://www.facebook.com/profile.php?id=1234',
'location': {
'id': '108659242498155',
'name': 'Chicago, Illinois'
},
'gender': 'male',
'timezone': -6,
'id': '1234',
'email': 'user@domain.com'
}
"""
token = 'dummyToken'
backends = {
'facebook': (
urllib.urlencode({
'access_token': token,
'expires': 3600,
}),
simplejson.dumps(user),
),
'google': (
simplejson.dumps({
"access_token": token,
"token_type": "Bearer",
"expires_in": 3600,
}),
simplejson.dumps(user),
),
'linkedin': (
urllib.urlencode({
'oauth_token': token,
'oauth_token_secret': token,
'oauth_callback_confirmed': 'true',
'xoauth_request_auth_url': (
'https://api.linkedin.com/uas/oauth/authorize'),
'oauth_expires_in': 3600,
}),
urllib.urlencode({
'oauth_token': token,
'oauth_token_secret': token,
'oauth_expires_in': 3600,
'oauth_authorization_expires_in': 3600,
}),
(('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n'
'<person>\n'
' <id>{id}</id>\n'
' <email-address>{email}</email-address>\n'
' <first-name>{first_name}</first-name>\n'
' <last-name>{last_name}</last-name>\n'
'</person>\n').format(**user)),
),
}
if backend not in backends:
raise NoBackendError("%s is not supported" % backend)
"""
mock out urlopen
"""
mock_urlopen.side_effect = [
DumbResponse(r) for r in backends[backend]
]
# make it work when no FACEBOOK_APP_ID declared
mock_facebook_enabled.return_value = True
factory = RequestFactory()
request = factory.post('', {'code': 'dummy',
'redirect_state': 'dummy'})
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
request.user = AnonymousUser()
request.session['facebook_state'] = 'dummy'
# make it happen.
redirect = complete(request, backend)
request.session.save()
# Set the cookie for this session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
return True
| jdavidagudelo/django-social-auth-corrected | social_auth/tests/client.py | Python | bsd-3-clause | 5,129 |
#version 1:
from re import search
import rospy
from std_msgs.msg import String
from sys import exit
pub = rospy.Publisher('control_action', String);#,queue_size=10);
state = ""
def callback(data):
global state
state = data.data
def initialize_handshake(HOST, PORT): # setup socket and start the connection to the model
rospy.init_node('controller', anonymous=True)
rospy.Subscriber('plant_state', String, callback)
def process(HOST, PORT, GET,client_socketport=None):
if rospy.is_shutdown():
exit(0);
try:
pub.publish(GET+"&]")
# print "send : "+GET
response=state
# response = rospy.wait_for_message('plant_state', String, timeout=.150).data
# print "response : "+response
m = search('\[(.+?)\]', response);
if m:
response = m.groups()[-1];
data = response.split()
except Exception, err:
rospy.logwarn("[Warning] in ROS client send and receive :%s\n " % err)
response = (GET.split("time=")[1]).split("&")[0]
return response
if __name__ == "__main__":
initialize_handshake(None, None);
print process(None, None, "/init");
| slremy/testingpubsub | myBallPlate/rosclient.py | Python | mit | 1,166 |
raise NotImplementedError("htmlentitydefs is not yet implemented in Skulpt")
| ArcherSys/ArcherSys | skulpt/src/lib/htmlentitydefs.py | Python | mit | 77 |
# -*- coding: utf-8 -*-
import unittest
class FunctionalTestCase(unittest.TestCase):
@unittest.skip
def test(self):
# TODO: create S3 bucket
# TODO: create CloudFront distribution
# TODO: assign S3 bucket to CloutFront distribution
#
# TODO: put_object to created S3 bucket
# TODO: run Signer
# TODO: check dounloadable
#
# TODO: delete CloudfFront distribution
# TODO: delete S3 bicket
pass
| gjo/cloudfrontsigner | cloudfrontsigner/tests/test_functional.py | Python | bsd-3-clause | 492 |
from lcapy import Circuit
cct = Circuit("""
V 1 0 step 10; down
L 1 2 1e-3; right, size=1.2
C 2 3 1e-4; right, size=1.2
R 3 0_1 1; down
W 0 0_1; right
""")
import numpy as np
t = np.linspace(0, 0.01, 1000)
vr = cct.R.v.evaluate(t)
from matplotlib.pyplot import subplots, savefig
fig, ax = subplots(1)
ax.plot(t, vr, linewidth=2)
ax.set_xlabel('Time (s)')
ax.set_ylabel('Resistor voltage (V)')
ax.grid(True)
savefig('circuit-VRLC2-vr.png')
| mph-/lcapy | doc/examples/netlists/circuit-VRLC2-vr.py | Python | lgpl-2.1 | 442 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import copy
import uuid
import mox
from neutronclient.common import exceptions
from neutronclient.v2_0 import client
from oslo.config import cfg
import six
from nova.compute import flavors
from nova.conductor import api as conductor_api
from nova import context
from nova import exception
from nova.network import model
from nova.network import neutronv2
from nova.network.neutronv2 import api as neutronapi
from nova.network.neutronv2 import constants
from nova.openstack.common import jsonutils
from nova.openstack.common import local
from nova import test
from nova import utils
CONF = cfg.CONF
#NOTE: Neutron client raises Exception which is discouraged by HACKING.
# We set this variable here and use it for assertions below to avoid
# the hacking checks until we can make neutron client throw a custom
# exception class instead.
NEUTRON_CLIENT_EXCEPTION = Exception
class MyComparator(mox.Comparator):
def __init__(self, lhs):
self.lhs = lhs
def _com_dict(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for key, value in lhs.iteritems():
if key not in rhs:
return False
rhs_value = rhs[key]
if not self._com(value, rhs_value):
return False
return True
def _com_list(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for lhs_value in lhs:
if lhs_value not in rhs:
return False
return True
def _com(self, lhs, rhs):
if lhs is None:
return rhs is None
if isinstance(lhs, dict):
if not isinstance(rhs, dict):
return False
return self._com_dict(lhs, rhs)
if isinstance(lhs, list):
if not isinstance(rhs, list):
return False
return self._com_list(lhs, rhs)
if isinstance(lhs, tuple):
if not isinstance(rhs, tuple):
return False
return self._com_list(lhs, rhs)
return lhs == rhs
def equals(self, rhs):
return self._com(self.lhs, rhs)
def __repr__(self):
return str(self.lhs)
class TestNeutronClient(test.TestCase):
def test_withtoken(self):
self.flags(neutron_url='http://anyhost/')
self.flags(neutron_url_timeout=30)
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token')
self.mox.StubOutWithMock(client.Client, "__init__")
client.Client.__init__(
auth_strategy=None,
endpoint_url=CONF.neutron_url,
token=my_context.auth_token,
timeout=CONF.neutron_url_timeout,
insecure=False,
ca_cert=None).AndReturn(None)
self.mox.ReplayAll()
neutronv2.get_client(my_context)
def test_withouttoken(self):
my_context = context.RequestContext('userid', 'my_tenantid')
self.assertRaises(exceptions.Unauthorized,
neutronv2.get_client,
my_context)
def test_withtoken_context_is_admin(self):
self.flags(neutron_url='http://anyhost/')
self.flags(neutron_url_timeout=30)
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token',
is_admin=True)
self.mox.StubOutWithMock(client.Client, "__init__")
client.Client.__init__(
auth_strategy=None,
endpoint_url=CONF.neutron_url,
token=my_context.auth_token,
timeout=CONF.neutron_url_timeout,
insecure=False,
ca_cert=None).AndReturn(None)
self.mox.ReplayAll()
# Note that although we have admin set in the context we
# are not asking for an admin client, and so we auth with
# our own token
neutronv2.get_client(my_context)
def test_withouttoken_keystone_connection_error(self):
self.flags(neutron_auth_strategy='keystone')
self.flags(neutron_url='http://anyhost/')
my_context = context.RequestContext('userid', 'my_tenantid')
self.assertRaises(NEUTRON_CLIENT_EXCEPTION,
neutronv2.get_client,
my_context)
class TestNeutronv2Base(test.TestCase):
def setUp(self):
super(TestNeutronv2Base, self).setUp()
self.context = context.RequestContext('userid', 'my_tenantid')
setattr(self.context,
'auth_token',
'bff4a5a6b9eb4ea2a6efec6eefb77936')
self.instance = {'project_id': '9d049e4b60b64716978ab415e6fbd5c0',
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance',
'availability_zone': 'nova',
'host': 'some_host',
'security_groups': []}
self.instance2 = {'project_id': '9d049e4b60b64716978ab415e6fbd5c0',
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance2',
'availability_zone': 'nova',
'security_groups': []}
self.nets1 = [{'id': 'my_netid1',
'name': 'my_netname1',
'tenant_id': 'my_tenantid'}]
self.nets2 = []
self.nets2.append(self.nets1[0])
self.nets2.append({'id': 'my_netid2',
'name': 'my_netname2',
'tenant_id': 'my_tenantid'})
self.nets3 = self.nets2 + [{'id': 'my_netid3',
'name': 'my_netname3',
'tenant_id': 'my_tenantid'}]
self.nets4 = [{'id': 'his_netid4',
'name': 'his_netname4',
'tenant_id': 'his_tenantid'}]
# A network request with external networks
self.nets5 = self.nets1 + [{'id': 'the-external-one',
'name': 'out-of-this-world',
'router:external': True,
'tenant_id': 'should-be-an-admin'}]
self.nets = [self.nets1, self.nets2, self.nets3,
self.nets4, self.nets5]
self.port_address = '10.0.1.2'
self.port_data1 = [{'network_id': 'my_netid1',
'device_id': self.instance2['uuid'],
'device_owner': 'compute:nova',
'id': 'my_portid1',
'fixed_ips': [{'ip_address': self.port_address,
'subnet_id': 'my_subid1'}],
'mac_address': 'my_mac1', }]
self.float_data1 = [{'port_id': 'my_portid1',
'fixed_ip_address': self.port_address,
'floating_ip_address': '172.0.1.2'}]
self.dhcp_port_data1 = [{'fixed_ips': [{'ip_address': '10.0.1.9',
'subnet_id': 'my_subid1'}]}]
self.port_address2 = '10.0.2.2'
self.port_data2 = []
self.port_data2.append(self.port_data1[0])
self.port_data2.append({'network_id': 'my_netid2',
'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
'id': 'my_portid2',
'fixed_ips':
[{'ip_address': self.port_address2,
'subnet_id': 'my_subid2'}],
'mac_address': 'my_mac2', })
self.float_data2 = []
self.float_data2.append(self.float_data1[0])
self.float_data2.append({'port_id': 'my_portid2',
'fixed_ip_address': '10.0.2.2',
'floating_ip_address': '172.0.2.2'})
self.port_data3 = [{'network_id': 'my_netid1',
'device_id': 'device_id3',
'device_owner': 'compute:nova',
'id': 'my_portid3',
'fixed_ips': [], # no fixed ip
'mac_address': 'my_mac3', }]
self.subnet_data1 = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2 = []
self.subnet_data_n = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']},
{'id': 'my_subid2',
'cidr': '20.0.1.0/24',
'network_id': 'my_netid2',
'gateway_ip': '20.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2.append({'id': 'my_subid2',
'cidr': '10.0.2.0/24',
'network_id': 'my_netid2',
'gateway_ip': '10.0.2.1',
'dns_nameservers': ['8.8.2.1', '8.8.2.2']})
self.fip_pool = {'id': '4fdbfd74-eaf8-4884-90d9-00bd6f10c2d3',
'name': 'ext_net',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_pool_nova = {'id': '435e20c3-d9f1-4f1b-bee5-4611a1dd07db',
'name': 'nova',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_unassociated = {'tenant_id': 'my_tenantid',
'id': 'fip_id1',
'floating_ip_address': '172.24.4.227',
'floating_network_id': self.fip_pool['id'],
'port_id': None,
'fixed_ip_address': None,
'router_id': None}
fixed_ip_address = self.port_data2[1]['fixed_ips'][0]['ip_address']
self.fip_associated = {'tenant_id': 'my_tenantid',
'id': 'fip_id2',
'floating_ip_address': '172.24.4.228',
'floating_network_id': self.fip_pool['id'],
'port_id': self.port_data2[1]['id'],
'fixed_ip_address': fixed_ip_address,
'router_id': 'router_id1'}
self._returned_nw_info = []
self.mox.StubOutWithMock(neutronv2, 'get_client')
self.moxed_client = self.mox.CreateMock(client.Client)
self.addCleanup(CONF.reset)
self.addCleanup(self.mox.VerifyAll)
self.addCleanup(self.mox.UnsetStubs)
self.addCleanup(self.stubs.UnsetAll)
def _stub_allocate_for_instance(self, net_idx=1, **kwargs):
api = neutronapi.API()
self.mox.StubOutWithMock(api, 'get_instance_nw_info')
has_portbinding = False
has_extra_dhcp_opts = False
# Note: (dkehn) this option check should be removed as soon as support
# in neutron released, see https://bugs.launchpad.net/nova/+bug/1214162
if (cfg.CONF.dhcp_options_enabled == True and kwargs.get(
'dhcp_options', None) != None):
has_extra_dhcp_opts = True
dhcp_options = kwargs.get('dhcp_options')
if kwargs.get('portbinding'):
has_portbinding = True
api.extensions[constants.PORTBINDING_EXT] = 1
self.mox.StubOutWithMock(api, '_refresh_neutron_extensions_cache')
neutronv2.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
neutronv2.get_client(
mox.IgnoreArg(), admin=True).MultipleTimes().AndReturn(
self.moxed_client)
api._refresh_neutron_extensions_cache()
else:
self.mox.StubOutWithMock(api, '_populate_neutron_extension_values')
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
# Net idx is 1-based for compatibility with existing unit tests
nets = self.nets[net_idx - 1]
ports = {}
fixed_ips = {}
macs = kwargs.get('macs')
if macs:
macs = set(macs)
req_net_ids = []
if 'requested_networks' in kwargs:
for id, fixed_ip, port_id in kwargs['requested_networks']:
if port_id:
self.moxed_client.show_port(port_id).AndReturn(
{'port': {'id': 'my_portid1',
'network_id': 'my_netid1',
'mac_address': 'my_mac1',
'device_id': kwargs.get('_device') and
self.instance2['uuid'] or ''}})
ports['my_netid1'] = self.port_data1[0]
id = 'my_netid1'
if macs is not None:
macs.discard('my_mac1')
else:
fixed_ips[id] = fixed_ip
req_net_ids.append(id)
expected_network_order = req_net_ids
else:
expected_network_order = [n['id'] for n in nets]
if kwargs.get('_break') == 'pre_list_networks':
self.mox.ReplayAll()
return api
search_ids = [net['id'] for net in nets if net['id'] in req_net_ids]
if search_ids:
mox_list_params = {'id': mox.SameElementsAs(search_ids)}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
else:
mox_list_params = {'tenant_id': self.instance['project_id'],
'shared': False}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
mox_list_params = {'shared': True}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': []})
for net_id in expected_network_order:
port_req_body = {
'port': {
'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
},
}
if has_portbinding:
port_req_body['port']['binding:host_id'] = (
self.instance.get('host'))
port = ports.get(net_id, None)
if not has_portbinding:
api._populate_neutron_extension_values(
self.instance, mox.IgnoreArg()).AndReturn(None)
else:
# since _populate_neutron_extension_values() will call
# _has_port_binding_extension()
api._has_port_binding_extension().AndReturn(has_portbinding)
api._has_port_binding_extension().AndReturn(has_portbinding)
if port:
port_id = port['id']
self.moxed_client.update_port(port_id,
MyComparator(port_req_body)
).AndReturn(
{'port': port})
else:
fixed_ip = fixed_ips.get(net_id)
if fixed_ip:
port_req_body['port']['fixed_ips'] = [{'ip_address':
fixed_ip}]
port_req_body['port']['network_id'] = net_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = \
self.instance['project_id']
if macs:
port_req_body['port']['mac_address'] = macs.pop()
if has_portbinding:
port_req_body['port']['binding:host_id'] = (
self.instance.get('host'))
res_port = {'port': {'id': 'fake'}}
if has_extra_dhcp_opts:
port_req_body['port']['extra_dhcp_opts'] = dhcp_options
if kwargs.get('_break') == 'mac' + net_id:
self.mox.ReplayAll()
return api
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn(res_port)
api.get_instance_nw_info(mox.IgnoreArg(),
self.instance,
networks=nets).AndReturn(
self._returned_nw_info)
self.mox.ReplayAll()
return api
def _verify_nw_info(self, nw_inf, index=0):
id_suffix = index + 1
self.assertEquals('10.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index]['address'])
self.assertEquals('172.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index].floating_ip_addresses()[0])
self.assertEquals('my_netname%s' % id_suffix,
nw_inf[index]['network']['label'])
self.assertEquals('my_portid%s' % id_suffix, nw_inf[index]['id'])
self.assertEquals('my_mac%s' % id_suffix, nw_inf[index]['address'])
self.assertEquals('10.0.%s.0/24' % id_suffix,
nw_inf[index]['network']['subnets'][0]['cidr'])
self.assertTrue(model.IP(address='8.8.%s.1' % id_suffix) in
nw_inf[index]['network']['subnets'][0]['dns'])
def _get_instance_nw_info(self, number):
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(mox.IgnoreArg(),
self.instance['uuid'],
mox.IgnoreArg())
port_data = number == 1 and self.port_data1 or self.port_data2
net_info_cache = []
for port in port_data:
net_info_cache.append({"network": {"id": port['network_id']}})
instance = copy.copy(self.instance)
# This line here does not wrap net_info_cache in jsonutils.dumps()
# intentionally to test the other code path when it's not unicode.
instance['info_cache'] = {'network_info': net_info_cache}
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data})
net_ids = [port['network_id'] for port in port_data]
nets = number == 1 and self.nets1 or self.nets2
self.moxed_client.list_networks(
id=net_ids).AndReturn({'networks': nets})
for i in xrange(1, number + 1):
float_data = number == 1 and self.float_data1 or self.float_data2
for ip in port_data[i - 1]['fixed_ips']:
float_data = [x for x in float_data
if x['fixed_ip_address'] == ip['ip_address']]
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=port_data[i - 1]['id']).AndReturn(
{'floatingips': float_data})
subnet_data = i == 1 and self.subnet_data1 or self.subnet_data2
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid%s' % i])).AndReturn(
{'subnets': subnet_data})
self.moxed_client.list_ports(
network_id=subnet_data[0]['network_id'],
device_owner='network:dhcp').AndReturn(
{'ports': []})
self.mox.ReplayAll()
nw_inf = api.get_instance_nw_info(self.context, instance)
for i in xrange(0, number):
self._verify_nw_info(nw_inf, i)
def _allocate_for_instance(self, net_idx=1, **kwargs):
api = self._stub_allocate_for_instance(net_idx, **kwargs)
return api.allocate_for_instance(self.context, self.instance, **kwargs)
class TestNeutronv2(TestNeutronv2Base):
def setUp(self):
super(TestNeutronv2, self).setUp()
neutronv2.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
def test_get_instance_nw_info_1(self):
# Test to get one port in one network and subnet.
neutronv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(1)
def test_get_instance_nw_info_2(self):
# Test to get one port in each of two networks and subnets.
neutronv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(2)
def test_get_instance_nw_info_with_nets(self):
# Test get instance_nw_info with networks passed in.
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg())
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data1})
port_data = self.port_data1
for ip in port_data[0]['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=port_data[0]['id']).AndReturn(
{'floatingips': self.float_data1})
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid1'])).AndReturn(
{'subnets': self.subnet_data1})
self.moxed_client.list_ports(
network_id='my_netid1',
device_owner='network:dhcp').AndReturn(
{'ports': self.dhcp_port_data1})
neutronv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self.mox.ReplayAll()
self.instance['info_cache'] = {'network_info': []}
nw_inf = api.get_instance_nw_info(self.context,
self.instance,
networks=self.nets1)
self._verify_nw_info(nw_inf, 0)
def test_get_instance_nw_info_with_nets_and_info_cache(self):
# This tests that adding an interface to an instance does not
# remove the first instance from the instance.
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg())
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data1})
port_data = self.port_data1
for ip in port_data[0]['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=port_data[0]['id']).AndReturn(
{'floatingips': self.float_data1})
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid1'])).AndReturn(
{'subnets': self.subnet_data1})
self.moxed_client.list_ports(
network_id='my_netid1',
device_owner='network:dhcp').AndReturn(
{'ports': self.dhcp_port_data1})
neutronv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self.mox.ReplayAll()
network_model = model.Network(id='network_id',
bridge='br-int',
injected='injected',
label='fake_network',
tenant_name='fake_tenant')
self.instance['info_cache'] = {
'network_info': [{'id': 'port_id',
'address': 'mac_address',
'network': network_model,
'type': 'ovs',
'ovs_interfaceid': 'ovs_interfaceid',
'devname': 'devname'}]}
nw_inf = api.get_instance_nw_info(self.context,
self.instance,
networks=self.nets1)
self.assertEqual(2, len(nw_inf))
for k, v in self.instance['info_cache']['network_info'][0].iteritems():
self.assertEqual(nw_inf[0][k], v)
# remove first inf and verify that the second interface is correct
del nw_inf[0]
self._verify_nw_info(nw_inf, 0)
def test_get_instance_nw_info_without_subnet(self):
# Test get instance_nw_info for a port without subnet.
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg())
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data3})
self.moxed_client.list_networks(
id=[self.port_data1[0]['network_id']]).AndReturn(
{'networks': self.nets1})
neutronv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self.mox.StubOutWithMock(conductor_api.API,
'instance_get_by_uuid')
net_info_cache = []
for port in self.port_data3:
net_info_cache.append({"network": {"id": port['network_id']}})
instance = copy.copy(self.instance)
instance['info_cache'] = {'network_info':
six.text_type(
jsonutils.dumps(net_info_cache))}
self.mox.ReplayAll()
nw_inf = api.get_instance_nw_info(self.context,
instance)
id_suffix = 3
self.assertEquals(0, len(nw_inf.fixed_ips()))
self.assertEquals('my_netname1', nw_inf[0]['network']['label'])
self.assertEquals('my_portid%s' % id_suffix, nw_inf[0]['id'])
self.assertEquals('my_mac%s' % id_suffix, nw_inf[0]['address'])
self.assertEquals(0, len(nw_inf[0]['network']['subnets']))
def test_refresh_neutron_extensions_cache(self):
api = neutronapi.API()
# Note: Don't want the default get_client from setUp()
self.mox.ResetAll()
neutronv2.get_client(mox.IgnoreArg(),
admin=True).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': 'nvp-qos'}]})
self.mox.ReplayAll()
api._refresh_neutron_extensions_cache()
self.assertEquals({'nvp-qos': {'name': 'nvp-qos'}}, api.extensions)
def test_populate_neutron_extension_values_rxtx_factor(self):
api = neutronapi.API()
# Note: Don't want the default get_client from setUp()
self.mox.ResetAll()
neutronv2.get_client(mox.IgnoreArg(),
admin=True).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': 'nvp-qos'}]})
self.mox.ReplayAll()
instance_type = flavors.get_default_flavor()
instance_type['rxtx_factor'] = 1
sys_meta = utils.dict_to_metadata(
flavors.save_flavor_info({}, instance_type))
instance = {'system_metadata': sys_meta}
port_req_body = {'port': {}}
api._populate_neutron_extension_values(instance, port_req_body)
self.assertEquals(port_req_body['port']['rxtx_factor'], 1)
def test_allocate_for_instance_1(self):
# Allocate one port in one network env.
self._allocate_for_instance(1)
def test_allocate_for_instance_2(self):
# Allocate one port in two networks env.
self._allocate_for_instance(2)
def test_allocate_for_instance_accepts_macs_kwargs_None(self):
# The macs kwarg should be accepted as None.
self._allocate_for_instance(1, macs=None)
def test_allocate_for_instance_accepts_macs_kwargs_set(self):
# The macs kwarg should be accepted, as a set, the
# _allocate_for_instance helper checks that the mac is used to create a
# port.
self._allocate_for_instance(1, macs=set(['ab:cd:ef:01:23:45']))
def test_allocate_for_instance_accepts_only_portid(self):
# Make sure allocate_for_instance works when only a portid is provided
self._returned_nw_info = self.port_data1
result = self._allocate_for_instance(
requested_networks=[(None, None, 'my_portid1')])
self.assertEqual(self.port_data1, result)
def test_allocate_for_instance_not_enough_macs_via_ports(self):
# using a hypervisor MAC via a pre-created port will stop it being
# used to dynamically create a port on a network. We put the network
# first in requested_networks so that if the code were to not pre-check
# requested ports, it would incorrectly assign the mac and not fail.
requested_networks = [
(self.nets2[1]['id'], None, None),
(None, None, 'my_portid1')]
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac1']),
_break='mac' + self.nets2[1]['id'])
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['my_mac1']))
def test_allocate_for_instance_not_enough_macs(self):
# If not enough MAC addresses are available to allocate to networks, an
# error should be raised.
# We could pass in macs=set(), but that wouldn't tell us that
# allocate_for_instance tracks used macs properly, so we pass in one
# mac, and ask for two networks.
requested_networks = [
(self.nets2[1]['id'], None, None),
(self.nets2[0]['id'], None, None)]
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2']),
_break='mac' + self.nets2[0]['id'])
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['my_mac2']))
def test_allocate_for_instance_two_macs_two_networks(self):
# If two MACs are available and two networks requested, two new ports
# get made and no exceptions raised.
requested_networks = [
(self.nets2[1]['id'], None, None),
(self.nets2[0]['id'], None, None)]
self._allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2', 'my_mac1']))
def test_allocate_for_instance_mac_conflicting_requested_port(self):
# specify only first and last network
requested_networks = [(None, None, 'my_portid1')]
api = self._stub_allocate_for_instance(
net_idx=1, requested_networks=requested_networks,
macs=set(['unknown:mac']),
_break='pre_list_networks')
self.assertRaises(exception.PortNotUsable,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['unknown:mac']))
def test_allocate_for_instance_with_requested_networks(self):
# specify only first and last network
requested_networks = [
(net['id'], None, None)
for net in (self.nets3[1], self.nets3[0], self.nets3[2])]
self._allocate_for_instance(net_idx=3,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_fixedip(self):
# specify only first and last network
requested_networks = [(self.nets1[0]['id'], '10.0.1.0/24', None)]
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_port(self):
requested_networks = [(None, None, 'myportid1')]
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_no_networks(self):
"""verify the exception thrown when there are no networks defined."""
api = neutronapi.API()
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn(
{'networks': []})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': []})
self.mox.ReplayAll()
nwinfo = api.allocate_for_instance(self.context, self.instance)
self.assertEqual(len(nwinfo), 0)
def test_allocate_for_instance_ex1(self):
"""verify we will delete created ports
if we fail to allocate all net resources.
Mox to raise exception when creating a second port.
In this case, the code should delete the first created port.
"""
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_populate_neutron_extension_values')
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension().MultipleTimes().AndReturn(False)
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': []})
index = 0
for network in self.nets2:
binding_port_req_body = {
'port': {
'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
},
}
port_req_body = {
'port': {
'network_id': network['id'],
'admin_state_up': True,
'tenant_id': self.instance['project_id'],
},
}
port_req_body['port'].update(binding_port_req_body['port'])
port = {'id': 'portid_' + network['id']}
api._populate_neutron_extension_values(
self.instance, binding_port_req_body).AndReturn(None)
if index == 0:
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn({'port': port})
else:
NeutronOverQuota = exceptions.NeutronClientException(
message="Quota exceeded for resources: ['port']",
status_code=409)
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(NeutronOverQuota)
index += 1
self.moxed_client.delete_port('portid_' + self.nets2[0]['id'])
self.mox.ReplayAll()
self.assertRaises(exception.PortLimitExceeded,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_ex2(self):
"""verify we have no port to delete
if we fail to allocate the first net resource.
Mox to raise exception when creating the first port.
In this case, the code should not delete any ports.
"""
api = neutronapi.API()
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': []})
neutronv2.get_client(mox.IgnoreArg(),
admin=True).AndReturn(
self.moxed_client)
port_req_body = {
'port': {
'network_id': self.nets2[0]['id'],
'admin_state_up': True,
'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id'],
},
}
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(
Exception("fail to create port"))
self.mox.ReplayAll()
self.assertRaises(NEUTRON_CLIENT_EXCEPTION, api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_no_port_or_network(self):
class BailOutEarly(Exception):
pass
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_get_available_networks')
# Make sure we get an empty list and then bail out of the rest
# of the function
api._get_available_networks(self.context, self.instance['project_id'],
[]).AndRaise(BailOutEarly)
self.mox.ReplayAll()
self.assertRaises(BailOutEarly,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=[(None, None, None)])
def test_allocate_for_instance_second_time(self):
# Make sure that allocate_for_instance only returns ports that it
# allocated during _that_ run.
new_port = {'id': 'fake'}
self._returned_nw_info = self.port_data1 + [new_port]
nw_info = self._allocate_for_instance()
self.assertEqual(nw_info, [new_port])
def test_allocate_for_instance_port_in_use(self):
# If a port is already in use, an exception should be raised.
requested_networks = [(None, None, 'my_portid1')]
api = self._stub_allocate_for_instance(
requested_networks=requested_networks,
_break='pre_list_networks',
_device=True)
self.assertRaises(exception.PortInUse,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks)
def _deallocate_for_instance(self, number, requested_networks=None):
api = neutronapi.API()
port_data = number == 1 and self.port_data1 or self.port_data2
ret_data = copy.deepcopy(port_data)
if requested_networks:
for net, fip, port in requested_networks:
ret_data.append({'network_id': net,
'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
'id': port,
'status': 'DOWN',
'admin_state_up': True,
'fixed_ips': [],
'mac_address': 'fake_mac', })
self.moxed_client.list_ports(
device_id=self.instance['uuid']).AndReturn(
{'ports': ret_data})
if requested_networks:
for net, fip, port in requested_networks:
self.moxed_client.update_port(port)
for port in reversed(port_data):
self.moxed_client.delete_port(port['id'])
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(self.context,
self.instance['uuid'],
{'network_info': '[]'})
self.mox.ReplayAll()
api = neutronapi.API()
api.deallocate_for_instance(self.context, self.instance,
requested_networks=requested_networks)
def test_deallocate_for_instance_1_with_requested(self):
requested = [('fake-net', 'fake-fip', 'fake-port')]
# Test to deallocate in one port env.
self._deallocate_for_instance(1, requested_networks=requested)
def test_deallocate_for_instance_2_with_requested(self):
requested = [('fake-net', 'fake-fip', 'fake-port')]
# Test to deallocate in one port env.
self._deallocate_for_instance(2, requested_networks=requested)
def test_deallocate_for_instance_1(self):
# Test to deallocate in one port env.
self._deallocate_for_instance(1)
def test_deallocate_for_instance_2(self):
# Test to deallocate in two ports env.
self._deallocate_for_instance(2)
def test_deallocate_for_instance_port_not_found(self):
port_data = self.port_data1
self.moxed_client.list_ports(
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data})
NeutronNotFound = neutronv2.exceptions.NeutronClientException(
status_code=404)
for port in reversed(port_data):
self.moxed_client.delete_port(port['id']).AndRaise(
NeutronNotFound)
self.mox.ReplayAll()
api = neutronapi.API()
api.deallocate_for_instance(self.context, self.instance)
def _test_deallocate_port_for_instance(self, number):
port_data = number == 1 and self.port_data1 or self.port_data2
nets = number == 1 and self.nets1 or self.nets2
self.moxed_client.delete_port(port_data[0]['id'])
net_info_cache = []
for port in port_data:
net_info_cache.append({"network": {"id": port['network_id']}})
instance = copy.copy(self.instance)
instance['info_cache'] = {'network_info':
six.text_type(
jsonutils.dumps(net_info_cache))}
api = neutronapi.API()
neutronv2.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data[1:]})
neutronv2.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
net_ids = [port['network_id'] for port in port_data]
self.moxed_client.list_networks(id=net_ids).AndReturn(
{'networks': nets})
float_data = number == 1 and self.float_data1 or self.float_data2
for data in port_data[1:]:
for ip in data['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=data['id']).AndReturn(
{'floatingips': float_data[1:]})
for port in port_data[1:]:
self.moxed_client.list_subnets(id=['my_subid2']).AndReturn({})
self.mox.ReplayAll()
nwinfo = api.deallocate_port_for_instance(self.context, instance,
port_data[0]['id'])
self.assertEqual(len(nwinfo), len(port_data[1:]))
if len(port_data) > 1:
self.assertEqual(nwinfo[0]['network']['id'], 'my_netid2')
def test_deallocate_port_for_instance_1(self):
# Test to deallocate the first and only port
self._test_deallocate_port_for_instance(1)
def test_deallocate_port_for_instance_2(self):
# Test to deallocate the first port of two
self._test_deallocate_port_for_instance(2)
def test_list_ports(self):
search_opts = {'parm': 'value'}
self.moxed_client.list_ports(**search_opts)
self.mox.ReplayAll()
neutronapi.API().list_ports(self.context, **search_opts)
def test_show_port(self):
self.moxed_client.show_port('foo')
self.mox.ReplayAll()
neutronapi.API().show_port(self.context, 'foo')
def test_validate_networks(self):
requested_networks = [('my_netid1', 'test', None),
('my_netid2', 'test2', None)]
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks)
def test_validate_networks_ex_1(self):
requested_networks = [('my_netid1', 'test', None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1'])).AndReturn(
{'networks': self.nets1})
self.mox.ReplayAll()
api = neutronapi.API()
try:
api.validate_networks(self.context, requested_networks)
except exception.NetworkNotFound as ex:
self.assertTrue("my_netid2" in str(ex))
def test_validate_networks_ex_2(self):
requested_networks = [('my_netid1', 'test', None),
('my_netid2', 'test2', None),
('my_netid3', 'test3', None)]
ids = ['my_netid1', 'my_netid2', 'my_netid3']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets1})
self.mox.ReplayAll()
api = neutronapi.API()
try:
api.validate_networks(self.context, requested_networks)
except exception.NetworkNotFound as ex:
self.assertTrue("my_netid2, my_netid3" in str(ex))
def test_validate_networks_duplicate(self):
"""Verify that the correct exception is thrown when duplicate
network ids are passed to validate_networks.
"""
requested_networks = [('my_netid1', None, None),
('my_netid1', None, None)]
self.mox.ReplayAll()
# Expected call from setUp.
neutronv2.get_client(None)
api = neutronapi.API()
self.assertRaises(exception.NetworkDuplicated,
api.validate_networks,
self.context, requested_networks)
def test_validate_networks_not_specified(self):
requested_networks = []
self.moxed_client.list_networks(
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
shared=True).AndReturn(
{'networks': self.nets2})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.NetworkAmbiguous,
api.validate_networks,
self.context, requested_networks)
def test_validate_networks_port_not_found(self):
# Verify that the correct exception is thrown when a non existent
# port is passed to validate_networks.
requested_networks = [('my_netid1', None, '3123-ad34-bc43-32332ca33e')]
NeutronNotFound = neutronv2.exceptions.NeutronClientException(
status_code=404)
self.moxed_client.show_port(requested_networks[0][2]).AndRaise(
NeutronNotFound)
self.mox.ReplayAll()
# Expected call from setUp.
neutronv2.get_client(None)
api = neutronapi.API()
self.assertRaises(exception.PortNotFound,
api.validate_networks,
self.context, requested_networks)
def test_validate_networks_port_in_use(self):
requested_networks = [(None, None, self.port_data3[0]['id'])]
self.moxed_client.show_port(self.port_data3[0]['id']).\
AndReturn({'port': self.port_data3[0]})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.PortInUse,
api.validate_networks,
self.context, requested_networks)
def test_validate_networks_ports_in_same_network(self):
port_a = self.port_data3[0]
port_b = self.port_data1[0]
self.assertEqual(port_a['network_id'], port_b['network_id'])
for port in [port_a, port_b]:
port['device_id'] = None
port['device_owner'] = None
requested_networks = [(None, None, port_a['id']),
(None, None, port_b['id'])]
self.moxed_client.show_port(port_a['id']).AndReturn({'port': port_a})
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.NetworkDuplicated,
api.validate_networks,
self.context, requested_networks)
def test_validate_networks_ports_not_in_same_network(self):
port_a = self.port_data3[0]
port_b = self.port_data2[1]
self.assertNotEqual(port_a['network_id'], port_b['network_id'])
for port in [port_a, port_b]:
port['device_id'] = None
port['device_owner'] = None
requested_networks = [(None, None, port_a['id']),
(None, None, port_b['id'])]
self.moxed_client.show_port(port_a['id']).AndReturn({'port': port_a})
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
search_opts = {'id': [port_a['network_id'], port_b['network_id']]}
self.moxed_client.list_networks(
**search_opts).AndReturn({'networks': self.nets2})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks)
def _mock_list_ports(self, port_data=None):
if port_data is None:
port_data = self.port_data2
address = self.port_address
self.moxed_client.list_ports(
fixed_ips=MyComparator('ip_address=%s' % address)).AndReturn(
{'ports': port_data})
self.mox.ReplayAll()
return address
def test_get_instance_uuids_by_ip_filter(self):
self._mock_list_ports()
filters = {'ip': '^10\\.0\\.1\\.2$'}
api = neutronapi.API()
result = api.get_instance_uuids_by_ip_filter(self.context, filters)
self.assertEquals(self.instance2['uuid'], result[0]['instance_uuid'])
self.assertEquals(self.instance['uuid'], result[1]['instance_uuid'])
def test_get_fixed_ip_by_address_fails_for_no_ports(self):
address = self._mock_list_ports(port_data=[])
api = neutronapi.API()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.get_fixed_ip_by_address,
self.context, address)
def test_get_fixed_ip_by_address_succeeds_for_1_port(self):
address = self._mock_list_ports(port_data=self.port_data1)
api = neutronapi.API()
result = api.get_fixed_ip_by_address(self.context, address)
self.assertEquals(self.instance2['uuid'], result['instance_uuid'])
def test_get_fixed_ip_by_address_fails_for_more_than_1_port(self):
address = self._mock_list_ports()
api = neutronapi.API()
self.assertRaises(exception.FixedIpAssociatedWithMultipleInstances,
api.get_fixed_ip_by_address,
self.context, address)
def _get_available_networks(self, prv_nets, pub_nets,
req_ids=None, context=None):
api = neutronapi.API()
nets = prv_nets + pub_nets
if req_ids:
mox_list_params = {'id': req_ids}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
else:
mox_list_params = {'tenant_id': self.instance['project_id'],
'shared': False}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': prv_nets})
mox_list_params = {'shared': True}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': pub_nets})
self.mox.ReplayAll()
rets = api._get_available_networks(
context if context else self.context,
self.instance['project_id'],
req_ids)
self.assertEqual(rets, nets)
def test_get_available_networks_all_private(self):
self._get_available_networks(prv_nets=self.nets2, pub_nets=[])
def test_get_available_networks_all_public(self):
self._get_available_networks(prv_nets=[], pub_nets=self.nets2)
def test_get_available_networks_private_and_public(self):
self._get_available_networks(prv_nets=self.nets1, pub_nets=self.nets4)
def test_get_available_networks_with_network_ids(self):
prv_nets = [self.nets3[0]]
pub_nets = [self.nets3[-1]]
# specify only first and last network
req_ids = [net['id'] for net in (self.nets3[0], self.nets3[-1])]
self._get_available_networks(prv_nets, pub_nets, req_ids)
def test_get_available_networks_with_externalnet_fails(self):
req_ids = [net['id'] for net in self.nets5]
self.assertRaises(
exception.ExternalNetworkAttachForbidden,
self._get_available_networks,
self.nets5, pub_nets=[], req_ids=req_ids)
def test_get_available_networks_with_externalnet_admin_ctx(self):
admin_ctx = context.RequestContext('userid', 'my_tenantid',
is_admin=True)
req_ids = [net['id'] for net in self.nets5]
self._get_available_networks(self.nets5, pub_nets=[],
req_ids=req_ids, context=admin_ctx)
def test_get_floating_ip_pools(self):
api = neutronapi.API()
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.mox.ReplayAll()
pools = api.get_floating_ip_pools(self.context)
expected = [{'name': self.fip_pool['name']},
{'name': self.fip_pool_nova['name']}]
self.assertEqual(expected, pools)
def _get_expected_fip_model(self, fip_data, idx=0):
expected = {'id': fip_data['id'],
'address': fip_data['floating_ip_address'],
'pool': self.fip_pool['name'],
'project_id': fip_data['tenant_id'],
'fixed_ip_id': fip_data['port_id'],
'fixed_ip':
{'address': fip_data['fixed_ip_address']},
'instance': ({'uuid': self.port_data2[idx]['device_id']}
if fip_data['port_id']
else None)}
return expected
def _test_get_floating_ip(self, fip_data, idx=0, by_address=False):
api = neutronapi.API()
fip_id = fip_data['id']
net_id = fip_data['floating_network_id']
address = fip_data['floating_ip_address']
if by_address:
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
else:
self.moxed_client.show_floatingip(fip_id).\
AndReturn({'floatingip': fip_data})
self.moxed_client.show_network(net_id).\
AndReturn({'network': self.fip_pool})
if fip_data['port_id']:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[idx]})
self.mox.ReplayAll()
expected = self._get_expected_fip_model(fip_data, idx)
if by_address:
fip = api.get_floating_ip_by_address(self.context, address)
else:
fip = api.get_floating_ip(self.context, fip_id)
self.assertEqual(expected, fip)
def test_get_floating_ip_unassociated(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0)
def test_get_floating_ip_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1)
def test_get_floating_ip_by_address(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0,
by_address=True)
def test_get_floating_ip_by_address_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1,
by_address=True)
def test_get_floating_ip_by_address_not_found(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': []})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpNotFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ip_by_id_not_found(self):
api = neutronapi.API()
NeutronNotFound = neutronv2.exceptions.NeutronClientException(
status_code=404)
floating_ip_id = self.fip_unassociated['id']
self.moxed_client.show_floatingip(floating_ip_id).\
AndRaise(NeutronNotFound)
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpNotFound,
api.get_floating_ip,
self.context, floating_ip_id)
def test_get_floating_ip_by_address_multiple_found(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated] * 2})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpMultipleFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ips_by_project(self):
api = neutronapi.API()
project_id = self.context.project_id
self.moxed_client.list_floatingips(tenant_id=project_id).\
AndReturn({'floatingips': [self.fip_unassociated,
self.fip_associated]})
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.moxed_client.list_ports(tenant_id=project_id).\
AndReturn({'ports': self.port_data2})
self.mox.ReplayAll()
expected = [self._get_expected_fip_model(self.fip_unassociated),
self._get_expected_fip_model(self.fip_associated, idx=1)]
fips = api.get_floating_ips_by_project(self.context)
self.assertEqual(expected, fips)
def _test_get_instance_id_by_floating_address(self, fip_data,
associated=False):
api = neutronapi.API()
address = fip_data['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
if associated:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[1]})
self.mox.ReplayAll()
if associated:
expected = self.port_data2[1]['device_id']
else:
expected = None
fip = api.get_instance_id_by_floating_address(self.context, address)
self.assertEqual(expected, fip)
def test_get_instance_id_by_floating_address(self):
self._test_get_instance_id_by_floating_address(self.fip_unassociated)
def test_get_instance_id_by_floating_address_associated(self):
self._test_get_instance_id_by_floating_address(self.fip_associated,
associated=True)
def test_allocate_floating_ip(self):
api = neutronapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, 'ext_net')
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_allocate_floating_ip_with_pool_id(self):
api = neutronapi.API()
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'id': pool_id}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, pool_id)
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_allocate_floating_ip_with_default_pool(self):
api = neutronapi.API()
pool_name = self.fip_pool_nova['name']
pool_id = self.fip_pool_nova['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool_nova]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context)
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_release_floating_ip(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
fip_id = self.fip_unassociated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.delete_floatingip(fip_id)
self.mox.ReplayAll()
api.release_floating_ip(self.context, address)
def test_release_floating_ip_associated(self):
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpAssociated,
api.release_floating_ip, self.context, address)
def _setup_mock_for_refresh_cache(self, api, instances):
nw_info = self.mox.CreateMock(model.NetworkInfo)
self.mox.StubOutWithMock(api, '_get_instance_nw_info')
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
for instance in instances:
nw_info.json()
api._get_instance_nw_info(mox.IgnoreArg(), instance).\
AndReturn(nw_info)
api.db.instance_info_cache_update(mox.IgnoreArg(),
instance['uuid'],
mox.IgnoreArg())
def test_associate_floating_ip(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
fixed_address = self.port_address2
fip_id = self.fip_unassociated['id']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[1]]})
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': self.fip_associated['port_id'],
'fixed_ip_address': fixed_address}})
self._setup_mock_for_refresh_cache(api, [self.instance])
self.mox.ReplayAll()
api.associate_floating_ip(self.context, self.instance,
address, fixed_address)
def test_reassociate_floating_ip(self):
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
old_fixed_address = self.fip_associated['fixed_ip_address']
new_fixed_address = self.port_address
fip_id = self.fip_associated['id']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance2['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[0]]})
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': 'my_portid1',
'fixed_ip_address': new_fixed_address}})
self.moxed_client.show_port(self.fip_associated['port_id']).\
AndReturn({'port': self.port_data2[1]})
self.mox.StubOutWithMock(api.db, 'instance_get_by_uuid')
api.db.instance_get_by_uuid(mox.IgnoreArg(),
self.instance['uuid']).\
AndReturn(self.instance)
self._setup_mock_for_refresh_cache(api, [self.instance,
self.instance2])
self.mox.ReplayAll()
api.associate_floating_ip(self.context, self.instance2,
address, new_fixed_address)
def test_associate_floating_ip_not_found_fixed_ip(self):
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
fixed_address = self.fip_associated['fixed_ip_address']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[0]]})
self.mox.ReplayAll()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.associate_floating_ip, self.context,
self.instance, address, fixed_address)
def test_disassociate_floating_ip(self):
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
fip_id = self.fip_associated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': None}})
self._setup_mock_for_refresh_cache(api, [self.instance])
self.mox.ReplayAll()
api.disassociate_floating_ip(self.context, self.instance, address)
def test_add_fixed_ip_to_instance(self):
api = neutronapi.API()
self._setup_mock_for_refresh_cache(api, [self.instance])
network_id = 'my_netid1'
search_opts = {'network_id': network_id}
self.moxed_client.list_subnets(
**search_opts).AndReturn({'subnets': self.subnet_data_n})
search_opts = {'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
'network_id': network_id}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [{'subnet_id': 'my_subid1'},
{'subnet_id': 'my_subid1'}],
},
}
port = self.port_data1[0]
port['fixed_ips'] = [{'subnet_id': 'my_subid1'}]
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.add_fixed_ip_to_instance(self.context, self.instance, network_id)
def test_remove_fixed_ip_from_instance(self):
api = neutronapi.API()
self._setup_mock_for_refresh_cache(api, [self.instance])
address = '10.0.0.3'
zone = 'compute:%s' % self.instance['availability_zone']
search_opts = {'device_id': self.instance['uuid'],
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [],
},
}
port = self.port_data1[0]
port['fixed_ips'] = []
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.remove_fixed_ip_from_instance(self.context, self.instance, address)
def test_list_floating_ips_without_l3_support(self):
api = neutronapi.API()
NeutronNotFound = exceptions.NeutronClientException(
status_code=404)
self.moxed_client.list_floatingips(
fixed_ip_address='1.1.1.1', port_id=1).AndRaise(NeutronNotFound)
self.mox.ReplayAll()
neutronv2.get_client('fake')
floatingips = api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', 1)
self.assertEqual(floatingips, [])
def test_nw_info_get_ips(self):
fake_port = {
'fixed_ips': [
{'ip_address': '1.1.1.1'}],
'id': 'port-id',
}
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_get_floating_ips_by_fixed_and_port')
api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', 'port-id').AndReturn(
[{'floating_ip_address': '10.0.0.1'}])
self.mox.ReplayAll()
neutronv2.get_client('fake')
result = api._nw_info_get_ips(self.moxed_client, fake_port)
self.assertEqual(len(result), 1)
self.assertEqual(result[0]['address'], '1.1.1.1')
self.assertEqual(result[0]['floating_ips'][0]['address'], '10.0.0.1')
def test_nw_info_get_subnets(self):
fake_port = {
'fixed_ips': [
{'ip_address': '1.1.1.1'},
{'ip_address': '2.2.2.2'}],
'id': 'port-id',
}
fake_subnet = model.Subnet(cidr='1.0.0.0/8')
fake_ips = [model.IP(x['ip_address']) for x in fake_port['fixed_ips']]
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_get_subnets_from_port')
api._get_subnets_from_port(self.context, fake_port).AndReturn(
[fake_subnet])
self.mox.ReplayAll()
neutronv2.get_client('fake')
subnets = api._nw_info_get_subnets(self.context, fake_port, fake_ips)
self.assertEqual(len(subnets), 1)
self.assertEqual(len(subnets[0]['ips']), 1)
self.assertEqual(subnets[0]['ips'][0]['address'], '1.1.1.1')
def _test_nw_info_build_network(self, vif_type):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id',
'binding:vif_type': vif_type,
}
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id', 'name': 'foo', 'tenant_id': 'tenant'}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronv2.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertEqual(net['subnets'], fake_subnets)
self.assertEqual(net['id'], 'net-id')
self.assertEqual(net['label'], 'foo')
self.assertEqual(net.get_meta('tenant_id'), 'tenant')
self.assertEqual(net.get_meta('injected'), CONF.flat_injected)
return net, iid
def test_nw_info_build_network_ovs(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_OVS)
self.assertEqual(net['bridge'], CONF.neutron_ovs_bridge)
self.assertFalse('should_create_bridge' in net)
self.assertEqual(iid, 'port-id')
def test_nw_info_build_network_bridge(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_BRIDGE)
self.assertEqual(net['bridge'], 'brqnet-id')
self.assertTrue(net['should_create_bridge'])
self.assertEqual(iid, None)
def test_nw_info_build_network_other(self):
net, iid = self._test_nw_info_build_network(None)
self.assertEqual(net['bridge'], None)
self.assertFalse('should_create_bridge' in net)
self.assertEqual(iid, None)
def test_nw_info_build_no_match(self):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id1',
'tenant_id': 'tenant',
'binding:vif_type': model.VIF_TYPE_OVS,
}
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id2', 'name': 'foo', 'tenant_id': 'tenant'}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronv2.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertEqual(fake_subnets, net['subnets'])
self.assertEqual('net-id1', net['id'])
self.assertEqual('net-id1', net['id'])
self.assertEqual('tenant', net['meta']['tenant_id'])
def test_build_network_info_model(self):
api = neutronapi.API()
fake_inst = {'project_id': 'fake', 'uuid': 'uuid',
'info_cache': {'network_info': []}}
fake_ports = [
{'id': 'port0',
'network_id': 'net-id',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:01',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
},
# This does not match the networks we provide below,
# so it should be ignored (and is here to verify that)
{'id': 'port1',
'network_id': 'other-net-id',
},
]
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [
{'id': 'net-id',
'name': 'foo',
'tenant_id': 'fake',
}
]
neutronv2.get_client(mox.IgnoreArg(), admin=True).MultipleTimes(
).AndReturn(self.moxed_client)
self.moxed_client.list_ports(
tenant_id='fake', device_id='uuid').AndReturn(
{'ports': fake_ports})
self.mox.StubOutWithMock(api, '_get_floating_ips_by_fixed_and_port')
api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', 'port0').AndReturn(
[{'floating_ip_address': '10.0.0.1'}])
self.mox.StubOutWithMock(api, '_get_subnets_from_port')
api._get_subnets_from_port(self.context, fake_ports[0]).AndReturn(
fake_subnets)
self.mox.ReplayAll()
neutronv2.get_client('fake')
nw_info = api._build_network_info_model(self.context, fake_inst,
fake_nets)
self.assertEqual(len(nw_info), 1)
self.assertEqual(nw_info[0]['id'], 'port0')
self.assertEqual(nw_info[0]['address'], 'de:ad:be:ef:00:01')
self.assertEqual(nw_info[0]['devname'], 'tapport0')
self.assertEqual(nw_info[0]['ovs_interfaceid'], None)
self.assertEqual(nw_info[0]['type'], model.VIF_TYPE_BRIDGE)
self.assertEqual(nw_info[0]['network']['bridge'], 'brqnet-id')
def test_get_all_empty_list_networks(self):
api = neutronapi.API()
self.moxed_client.list_networks().AndReturn({'networks': []})
self.mox.ReplayAll()
networks = api.get_all(self.context)
self.assertEqual(networks, [])
class TestNeutronv2ModuleMethods(test.TestCase):
def test_ensure_requested_network_ordering_no_preference_ids(self):
l = [1, 2, 3]
neutronapi._ensure_requested_network_ordering(
lambda x: x,
l,
None)
def test_ensure_requested_network_ordering_no_preference_hashes(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
neutronapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
None)
self.assertEqual(l, [{'id': 3}, {'id': 1}, {'id': 2}])
def test_ensure_requested_network_ordering_with_preference(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
neutronapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
[1, 2, 3])
self.assertEqual(l, [{'id': 1}, {'id': 2}, {'id': 3}])
class TestNeutronv2Portbinding(TestNeutronv2Base):
def test_allocate_for_instance_portbinding(self):
self._allocate_for_instance(1, portbinding=True)
def test_populate_neutron_extension_values_binding(self):
api = neutronapi.API()
neutronv2.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': constants.PORTBINDING_EXT}]})
self.mox.ReplayAll()
host_id = 'my_host_id'
instance = {'host': host_id}
port_req_body = {'port': {}}
api._populate_neutron_extension_values(instance, port_req_body)
self.assertEquals(port_req_body['port']['binding:host_id'], host_id)
def test_migrate_instance_finish_binding_false(self):
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(refresh_cache=True).AndReturn(False)
self.mox.ReplayAll()
api.migrate_instance_finish(self.context, None, None)
def test_migrate_instance_finish_binding_true(self):
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(refresh_cache=True).AndReturn(True)
neutronv2.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
search_opts = {'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id']}
ports = {'ports': [{'id': 'test1'}]}
self.moxed_client.list_ports(**search_opts).AndReturn(ports)
migration = {'source_compute': self.instance.get('host'),
'dest_compute': 'dest_host', }
port_req_body = {'port':
{'binding:host_id': migration['dest_compute']}}
self.moxed_client.update_port('test1',
port_req_body).AndReturn(None)
self.mox.ReplayAll()
api.migrate_instance_finish(self.context, self.instance, migration)
def test_migrate_instance_finish_binding_true_exception(self):
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(refresh_cache=True).AndReturn(True)
neutronv2.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
search_opts = {'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id']}
ports = {'ports': [{'id': 'test1'}]}
self.moxed_client.list_ports(**search_opts).AndReturn(ports)
migration = {'source_compute': self.instance.get('host'),
'dest_compute': 'dest_host', }
port_req_body = {'port':
{'binding:host_id': migration['dest_compute']}}
self.moxed_client.update_port('test1',
port_req_body).AndRaise(
Exception("fail to update port"))
self.mox.ReplayAll()
self.assertRaises(NEUTRON_CLIENT_EXCEPTION,
api.migrate_instance_finish,
self.context, self.instance, migration)
class TestNeutronv2ExtraDhcpOpts(TestNeutronv2Base):
def setUp(self):
super(TestNeutronv2ExtraDhcpOpts, self).setUp()
neutronv2.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
def test_allocate_for_instance_1_with_extra_dhcp_opts_turned_off(self):
# Note: (dkehn) this option check should be removed as soon as support
# in neutron released, see https://bugs.launchpad.net/nova/+bug/1214162
CONF.set_override('dhcp_options_enabled', True)
self._allocate_for_instance(1, extra_dhcp_opts=False)
CONF.set_override('dhcp_options_enabled', False)
def test_allocate_for_instance_extradhcpopts(self):
# Note: (dkehn) this option check should be removed as soon as support
# in neutron released, see https://bugs.launchpad.net/nova/+bug/1214162
CONF.set_override('dhcp_options_enabled', True)
dhcp_opts = [{'opt_name': 'bootfile-name',
'opt_value': 'pxelinux.0'},
{'opt_name': 'tftp-server',
'opt_value': '123.123.123.123'},
{'opt_name': 'server-ip-address',
'opt_value': '123.123.123.456'}]
self._allocate_for_instance(1, dhcp_options=dhcp_opts)
CONF.set_override('dhcp_options_enabled', False)
class TestNeutronClientForAdminScenarios(test.TestCase):
def test_get_cached_neutron_client_for_admin(self):
self.flags(neutron_url='http://anyhost/')
self.flags(neutron_url_timeout=30)
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token')
# Make multiple calls and ensure we get the same
# client back again and again
client = neutronv2.get_client(my_context, True)
client2 = neutronv2.get_client(my_context, True)
client3 = neutronv2.get_client(my_context, True)
self.assertEqual(client, client2)
self.assertEqual(client, client3)
# clear the cache
local.strong_store.neutron_client = None
# A new client should be created now
client4 = neutronv2.get_client(my_context, True)
self.assertNotEqual(client, client4)
def test_get_neutron_client_for_non_admin(self):
self.flags(neutron_url='http://anyhost/')
self.flags(neutron_url_timeout=30)
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token')
# Multiple calls should return different clients
client = neutronv2.get_client(my_context)
client2 = neutronv2.get_client(my_context)
self.assertNotEqual(client, client2)
def test_get_neutron_client_for_non_admin_and_no_token(self):
self.flags(neutron_url='http://anyhost/')
self.flags(neutron_url_timeout=30)
my_context = context.RequestContext('userid',
'my_tenantid')
self.assertRaises(exceptions.Unauthorized,
neutronv2.get_client,
my_context)
def test_get_client_for_admin(self):
self.flags(neutron_auth_strategy=None)
self.flags(neutron_url='http://anyhost/')
self.flags(neutron_url_timeout=30)
my_context = context.RequestContext('userid', 'my_tenantid',
auth_token='token')
self.mox.StubOutWithMock(client.Client, "__init__")
client.Client.__init__(
auth_url=CONF.neutron_admin_auth_url,
password=CONF.neutron_admin_password,
tenant_name=CONF.neutron_admin_tenant_name,
username=CONF.neutron_admin_username,
endpoint_url=CONF.neutron_url,
auth_strategy=None,
timeout=CONF.neutron_url_timeout,
insecure=False,
ca_cert=None).AndReturn(None)
self.mox.ReplayAll()
# clear the cache
if hasattr(local.strong_store, 'neutron_client'):
delattr(local.strong_store, 'neutron_client')
# Note that the context is not elevated, but the True is passed in
# which will force an elevation to admin credentials even though
# the context has an auth_token.
neutronv2.get_client(my_context, True)
def test_get_client_for_admin_context(self):
self.flags(neutron_auth_strategy=None)
self.flags(neutron_url='http://anyhost/')
self.flags(neutron_url_timeout=30)
my_context = context.get_admin_context()
self.mox.StubOutWithMock(client.Client, "__init__")
client.Client.__init__(
auth_url=CONF.neutron_admin_auth_url,
password=CONF.neutron_admin_password,
tenant_name=CONF.neutron_admin_tenant_name,
username=CONF.neutron_admin_username,
endpoint_url=CONF.neutron_url,
auth_strategy=None,
timeout=CONF.neutron_url_timeout,
insecure=False,
ca_cert=None).AndReturn(None)
self.mox.ReplayAll()
# clear the cache
if hasattr(local.strong_store, 'neutron_client'):
delattr(local.strong_store, 'neutron_client')
# Note that the context does not contain a token but is
# an admin context which will force an elevation to admin
# credentials.
neutronv2.get_client(my_context)
| rickerc/nova_audit | nova/tests/network/test_neutronv2.py | Python | apache-2.0 | 88,310 |
"""
Test running features in a freshly created Django application.
"""
import os
import shutil
import subprocess
import unittest
from tests.util import in_temporary_directory, run_scenario
def find_file(name):
"""
Find a file with the specified name somewhere in the current directory.
"""
for dirpath, dirnames, filenames in os.walk('.'):
if name in dirnames + filenames:
return os.path.join(dirpath, name)
raise ValueError("File named {0} not found.".format(name))
# Directory with source files to copy
SOURCE_DIR = os.path.join(
os.path.dirname(__file__), 'django', 'lychee')
class DjangoAppTest(unittest.TestCase):
"""Test running features in a freshly created Django application."""
@in_temporary_directory
def test_django_app(self):
"""Create a stock Django app and test running features for it."""
django_version = subprocess.check_output(
('django-admin', '--version')).decode().strip()
# Create the project and the application
subprocess.check_call(('django-admin', 'startproject', 'lychee'))
os.chdir('lychee')
subprocess.check_call(('django-admin', 'startapp', 'lychee_app'))
# Add the created application and Aloe-Django to installed
with open(find_file('settings.py'), 'a') as settings:
settings.write("""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.sqlite',
}
}
INSTALLED_APPS += ('aloe_django', 'lychee_app')
""")
app_root = find_file('lychee_app')
# Add a view and a template
with open(find_file('views.py'), 'a') as views:
views.write("""
from django.views.generic import TemplateView
class HelloView(TemplateView):
template_name = 'hello.html'
""")
templates_dir = os.path.join(app_root, 'templates')
os.mkdir(templates_dir)
with open(os.path.join(templates_dir, 'hello.html'), 'w') as template:
template.write("World!")
# Add the view to URLs
with open(find_file('urls.py'), 'a') as urls:
if django_version >= '2.0':
urls.write("""
from lychee_app.views import HelloView
urlpatterns += [path(r'hello/', HelloView.as_view())]
""")
else:
urls.write("""
from lychee_app.views import HelloView
urlpatterns += [url(r'^hello/', HelloView.as_view())]
""")
# Create a features directory
features_dir = os.path.join(app_root, 'features')
os.mkdir(features_dir)
# Copy in a feature and steps for it
for filename in (
'hello.feature',
'__init__.py',
'steps.py',
):
shutil.copyfile(
os.path.join(SOURCE_DIR, filename),
os.path.join(features_dir, filename),
)
ret, output = run_scenario()
print(output)
self.assertEqual(ret, 0, "Should succeed")
self.assertIn("Ran 1 test", output)
| koterpillar/aloe_django | tests/integration/test_stock_app.py | Python | gpl-3.0 | 3,049 |
"""
Wrapper functions mimicking behavior of scipy.stats.truncnorm methods
"""
import truncated_normal.truncated_normal as truncnorm
import ctypes
import numpy as np
def pdf(x, a, b, loc=0., scale=1.):
a = loc + a * scale
b = loc + b * scale
if isinstance(x, (int, float)):
return truncnorm.truncated_normal_ab_pdf(x, loc, scale, a, b)
elif isinstance(x, np.ndarray):
len = x.nbytes / x.itemsize
pdf = np.zeros(len)
for i, xi in enumerate(x.astype('d').flat):
pdf[i] = truncnorm.truncated_normal_ab_pdf(xi, loc, scale, a, b)
#pdf = np.frombuffer(truncnorm.truncated_normal_ab_pdf_array(x.flatten().astype('d').data, len, loc, scale, a, b))
pdf = pdf.reshape(x.shape)
return pdf
else:
raise Exception("%s format not supported for x argument!" % type(x))
def cdf(x, a, b, loc=0., scale=1.):
a = loc + a * scale
b = loc + b * scale
if isinstance(x, (int, float)):
return truncnorm.truncated_normal_ab_cdf(x, loc, scale, a, b)
elif isinstance(x, np.ndarray):
len = x.nbytes / x.itemsize
cdf = np.zeros(len)
for i, xi in enumerate(x.astype('d').flat):
cdf[i] = truncnorm.truncated_normal_ab_cdf(xi, loc, scale, a, b)
#cdf = np.frombuffer(truncnorm.truncated_normal_ab_cdf_array(x.flatten().astype('d').ctypes.data, len, loc, scale, a, b))
cdf = cdf.reshape(x.shape)
return cdf
else:
raise Exception("%s format not supported for x argument!" % type(x))
def sf(x, a, b, loc=0., scale=1.):
return 1 - cdf(x, a, b, loc=loc, scale=scale)
def ppf(cdf, a, b, loc=0., scale=1.):
a = loc + a * scale
b = loc + b * scale
if isinstance(cdf, (int, float)):
return truncnorm.truncated_normal_ab_cdf_inv(cdf, loc, scale, a, b)
elif isinstance(cdf, np.ndarray):
len = cdf.nbytes / cdf.itemsize
pdf = np.zeros(len)
for i, cdfi in enumerate(cdf.astype('d').flat):
ppf[i] = truncnorm.truncated_normal_ab_cdf_inv(cdfi, loc, scale, a, b)
ppf = ppf.reshape(cdf.shape)
return ppf
else:
raise Exception("%s format not supported for cdf argument!" % type(x))
| ROB-Seismology/oq-hazardlib | openquake/hazardlib/c_speedups/truncnorm.py | Python | agpl-3.0 | 2,006 |
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import (
check_output,
CalledProcessError,
)
from charmhelpers.core.hookenv import (
log,
DEBUG,
ERROR,
)
from charmhelpers.fetch import (
apt_install,
apt_purge,
apt_update,
)
from charmhelpers.contrib.hardening.audits.file import (
TemplatedFile,
DeletedFile,
)
from charmhelpers.contrib.hardening import utils
from charmhelpers.contrib.hardening.host import TEMPLATES_DIR
def get_audits():
"""Get OS hardening PAM authentication audits.
:returns: dictionary of audits
"""
audits = []
settings = utils.get_settings('os')
if settings['auth']['pam_passwdqc_enable']:
audits.append(PasswdqcPAM('/etc/passwdqc.conf'))
if settings['auth']['retries']:
audits.append(Tally2PAM('/usr/share/pam-configs/tally2'))
else:
audits.append(DeletedFile('/usr/share/pam-configs/tally2'))
return audits
class PasswdqcPAMContext(object):
def __call__(self):
ctxt = {}
settings = utils.get_settings('os')
ctxt['auth_pam_passwdqc_options'] = \
settings['auth']['pam_passwdqc_options']
return ctxt
class PasswdqcPAM(TemplatedFile):
"""The PAM Audit verifies the linux PAM settings."""
def __init__(self, path):
super(PasswdqcPAM, self).__init__(path=path,
template_dir=TEMPLATES_DIR,
context=PasswdqcPAMContext(),
user='root',
group='root',
mode=0o0640)
def pre_write(self):
# Always remove?
for pkg in ['libpam-ccreds', 'libpam-cracklib']:
log("Purging package '%s'" % pkg, level=DEBUG),
apt_purge(pkg)
apt_update(fatal=True)
for pkg in ['libpam-passwdqc']:
log("Installing package '%s'" % pkg, level=DEBUG),
apt_install(pkg)
def post_write(self):
"""Updates the PAM configuration after the file has been written"""
try:
check_output(['pam-auth-update', '--package'])
except CalledProcessError as e:
log('Error calling pam-auth-update: %s' % e, level=ERROR)
class Tally2PAMContext(object):
def __call__(self):
ctxt = {}
settings = utils.get_settings('os')
ctxt['auth_lockout_time'] = settings['auth']['lockout_time']
ctxt['auth_retries'] = settings['auth']['retries']
return ctxt
class Tally2PAM(TemplatedFile):
"""The PAM Audit verifies the linux PAM settings."""
def __init__(self, path):
super(Tally2PAM, self).__init__(path=path,
template_dir=TEMPLATES_DIR,
context=Tally2PAMContext(),
user='root',
group='root',
mode=0o0640)
def pre_write(self):
# Always remove?
apt_purge('libpam-ccreds')
apt_update(fatal=True)
apt_install('libpam-modules')
def post_write(self):
"""Updates the PAM configuration after the file has been written"""
try:
check_output(['pam-auth-update', '--package'])
except CalledProcessError as e:
log('Error calling pam-auth-update: %s' % e, level=ERROR)
| konono/equlipse | openstack-install/charm/trusty/charm-keystone/charmhelpers/contrib/hardening/host/checks/pam.py | Python | mit | 4,042 |
from vnc.models import *
import paramiko
def test():
print "start "
s=server.objects.all()
print s
for i in s:
print i.servername
#test()
def updateserver():
ser=server.objects.all()
for i in ser:
vncnum,se=runcmd2(i.serverusername,i.serverpassword,i.servername,"ps -ef | grep Xvnc |grep -v grep |wc| awk '{print $1}'")
load,se2=runcmd2(i.serverusername,i.serverpassword,i.servername,"uptime | awk '{print $10}'")
#so=2
if vncnum and load:
i.totalnum=int(vncnum)
loadunm=int(float(load.split(",")[0])*100)
i.load=loadunm
if int(vncnum) > 80 or loadunm >1000:
i.status="disable"
else:
i.status="enable"
else:
i.status="disable"
i.save()
def runcmd2(un,pa,hn,cmd):
s=paramiko.SSHClient()
s.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
s.connect(str(hn),22,str(un),str(pa))
si,so,se=s.exec_command(cmd)
output=so.read()
error=se.read()
s.close()
return output,error
except:
return ("","")
def p(msg):
print msg
| ylzmax/vncmanager | vnc/t.py | Python | gpl-3.0 | 996 |
# =============================================================================
# Federal University of Rio Grande do Sul (UFRGS)
# Connectionist Artificial Intelligence Laboratory (LIAC)
# Renato de Pontes Pereira - rppereira@inf.ufrgs.br
# =============================================================================
# Copyright (c) 2011 Renato de Pontes Pereira, renato.ppontes at gmail dot com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# =============================================================================
import wx
import numpy as np
import scipy
import scipy.spatial
import psi
import arff
from ..properties import *
# from psi.tools.property_handler import *
from psi.euclid import Vector2
from ..component import Component
from .base import BaseRobot
from psi.tools.timer import Timer
__all__ = ['ReplayRobot']
def bresenham(x0, y0, x1, y1, step=1):
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
dx = abs(x1 - x0)
dy = abs(y1 - y0)
den, num, num_add, num_pixels = (0, 0, 0, 0)
xinc1 = step if x0 <= x1 else -step
xinc2 = xinc1
yinc1 = step if y0 <= y1 else -step
yinc2 = yinc1
if dx >= dy:
xinc1 = 0
yinc2 = 0
den = dx
num = dx/2.
num_add = dy
num_pixels = dx
else:
xinc2 = 0
yinc1 = 0
den = dy
num = dy/2.
num_add = dx
num_pixels = dy
x, y = x0, y0
points = []
for curpixel in xrange(0, num_pixels+1, step):
p = (x, y)
points.append(p)
num += num_add
if (num >= den):
num -= den
x += xinc1
y += yinc1
x += xinc2
y += yinc2
return points
class ReplayRobot(Component):
name = 'Replay Robot'
roles = [psi.ROLE_ROBOT]
fields = ['_path']
def __init__(self):
super(ReplayRobot, self).__init__()
self._path = ''
self.on_init()
def on_init(self):
self._pos = Vector2(0, 0)
self._th = 0.0
self.size = Vector2(45, 38)
self.aux = []
self._data_pos = None
self._data_th = None
self._data_sonar = None
self._data_index = None
def on_run(self):
f = open(self._path, 'rb')
dataset = arff.load(f)
data = np.array(dataset['data'])
print len(data)
self.pos = self._pos.copy()
self.th = self._th
self._data_pos = data[:, 1:3]/10.0
self._data_th = data[:, 3:4]
self._data_sonar = data[:, 6:14]/10.0
self._data_index = 0
f.close()
def on_update(self, tick):
tsize = psi.config.TILE_PIXEL_SIZE
self._pos.x = self._data_pos[self._data_index//1][0]*psi.config.TILE_SCALE
self._pos.y = self._data_pos[self._data_index//1][1]*psi.config.TILE_SCALE
self._th = self._data_th[self._data_index//1][0]
self._data_index += 1
sonar = self._data_sonar[self._data_index//1]
self.obstacles = []
self.not_obstacles = []
snap = psi.calc.snap_to_grid
euc = scipy.spatial.distance.euclidean
# timer = Timer()
# timer.tic()
self.aux = []
self.not_obstacles = set()
for i, angle in enumerate([-90, -50, -30, -10, 10, 30, 50, 90]):
d = sonar[i]
d = psi.calc.clip(d, 0, 200)
opos = psi.calc.obstacle_position(d, angle, self._pos, self._th)
if d < 200:
self.obstacles.append(opos)
# else:
# self.not_obstacles.add(opos)
# if angle == -50:
for a_ in xrange(-15, 15, 2):
p = psi.calc.obstacle_position(d*0.75, a_-angle-90, [self._pos[1], self._pos[0]], -self._th)
points = bresenham(self._pos[0], self._pos[1], p[0], p[1], tsize)
self.not_obstacles.update(points)
self.aux.append(p)
# t = timer.toc()
# print 'replay_robot.on_update: %.4f seconds'%t
# # print self.obstacles
def on_draw(self, tick):
scale = psi.config.TILE_SCALE
w, h = self.size*scale
hw, hh = w//2, h//2
hhh = hh//2
x, y = self._pos
color = (1., 0., 0., 1.)
psi.graphics.draw_box(x-hw, y-hh, w, h, color, rotation=self._th, center=(x, y))
color = (1., 1., 0., 1.)
psi.graphics.draw_box(x, y-hhh, hw, hh, color, rotation=self._th, center=(x, y))
# tsize = psi.config.TILE_PIXEL_SIZE
# ht = tsize/2
# for a in self.aux:
# ax = Vector2(a[0], a[1])
# psi.graphics.draw_box(ax.x, ax.y, tsize, tsize, psi.BLUE)
# psi.graphics.draw_line([self._pos+[ht, ht], ax+(ht, ht)])
# psi.graphics.draw_line([self.mark+(ht, ht), ax+(ht, ht)])
def ui_options(self, mgr):
super(ReplayRobot, self).ui_options(mgr)
# mgr.add_row('Position', Text(self, '_pos', VECTOR2))
# mgr.add_row('Heading', Text(self, '_th', FLOAT))
mgr.add_row('Path', FileChooser(self, '_path'))
| renatopp/psi-robotics | psi/core/components/replay_robot.py | Python | mit | 6,151 |
import turtle
userinput = int(input("How many triangles within triangles do you want? Enter an integer from 1 to 6!"))
def drawTriangle(points,color,myTurtle):
myTurtle.fillcolor(color)
myTurtle.up()
myTurtle.goto(points[0][0],points[0][1])
myTurtle.down()
myTurtle.begin_fill()
myTurtle.goto(points[1][0],points[1][1])
myTurtle.goto(points[2][0],points[2][1])
myTurtle.goto(points[0][0],points[0][1])
myTurtle.end_fill()
def getMid(p1,p2):
return ( (p1[0]+p2[0]) / 2, (p1[1] + p2[1]) / 2)
def sierpinski(points,degree,myTurtle):
colormap = ['blue','red','green','white','yellow',
'violet','orange']
drawTriangle(points,colormap[degree],myTurtle)
if degree > 0:
sierpinski([points[0],
getMid(points[0], points[1]),
getMid(points[0], points[2])],
degree-1, myTurtle)
sierpinski([points[1],
getMid(points[0], points[1]),
getMid(points[1], points[2])],
degree-1, myTurtle)
sierpinski([points[2],
getMid(points[2], points[1]),
getMid(points[0], points[2])],
degree-1, myTurtle)
def main():
myTurtle = turtle.Turtle()
myWin = turtle.Screen()
myPoints = [[-100,-50],[0,100],[100,-50]]
sierpinski(myPoints,userinput,myTurtle)
myWin.exitonclick()
while True:
main()
| sjamcsclub/ROOM-B-CS-Club-Materials | Turtle Lesson/triangle.py | Python | gpl-3.0 | 1,474 |
from pytba.util import team_wrap
class Event:
"""Class representing a single FRC Event with associated data.
Attributes (see https://www.thebluealliance.com/apidocs#models for more information):
key (str): A string of the event's key, usually the year followed by the event code.
info (dict): Basic information about the event.
teams (list): A list of team models for each team at the event. (This may include teams who did not
participate in any matches)
matches (list): A list of match models for each match at the event.
awards (list): A list of award models for the awards handed out at the event.
rankings (list): A 2D table containing the ranking information for the event.
"""
def __init__(self, info, teams, matches, awards, rankings, filtered=True, key=None):
""" Constructs an Event object. All required params are the same as specified in the class docstring.
:param filtered: (bool) Remove from teams any teams that have not played any matches at this event if true
(default is true).
:param key: (str) Manual override for the key (if it is not provided in the info dic)
"""
if key is None and info is not None: self.key = info['key']
self.info = info
if filtered:
self.teams = list(filter(lambda team: len(list(filter(
lambda match: team['key'] in match['alliances']['red']['teams'] or team['key'] in
match['alliances']['blue']['teams'],
matches))) > 0, teams))
else:
self.teams = teams
self.matches = sorted(matches, key=match_sort_key)
self.awards = awards
self.rankings = rankings
def get_match(self, match_key):
""" Gets the specified match.
:param match_key: (str) The match's individual key (without the event key preceding it)
:return: A dict containing match information. (see https://www.thebluealliance.com/apidocs#match-model)
"""
key = self.key + '_' + match_key
for match in self.matches:
if match['key'] == key: return match
return None
@team_wrap(pos=1)
def team_matches(self, team, round=None, quals_only=False, playoffs_only=False):
"""Returns a list of a team's matches at this event.
:param team: (int or str formatted as 'frcXXXX') The team to get matches for.
:param round: (str) Competition round to get matches from, either "qualification" or "playoffs"
:param quals_only: (bool) Select only qualifications if true (default false)
:param playoffs_only: (bool) Select only playoffs if true (default false, takes precedence over qualsOnly)
:return: A list of dicts each containing:
match - a dict with the match model, (See https://www.thebluealliance.com/apidocs#match-model)
alliance - string containing the teams alliance, either red or blue
score - int with team's alliance's score
opp_score - int with team's opponent aliiance's score
"""
matches = []
filteredMatches = self.matches
if (round == 'qualification' or quals_only): filteredMatches = self.get_qual_matches()
if (round == 'playoffs' or playoffs_only): filteredMatches = self.get_playoff_matches()
for match in filteredMatches:
if team in match['alliances']['red']['teams']:
matches.append({'match': match, 'alliance': 'red', 'score': match['alliances']['red']['score'],
'opp_score': match['alliances']['blue']['score']})
elif team in match['alliances']['blue']['teams']:
matches.append({'match': match, 'alliance': 'blue', 'score': match['alliances']['blue']['score'],
'opp_score': match['alliances']['red']['score']})
return matches
@team_wrap(pos=1, format="{}")
def team_awards(self, team):
"""Gets all of the awards given to this team at this event.
:param team: (int or str formatted as 'frcXXXX') The team to get awards for.
:return: A list of dicts (one for each award received), containing:
award - The detailed award model (see https://www.thebluealliance.com/apidocs#award-model)
name - String of the award's common name
awardee - String of the individual recipient of the award (if applicable)
"""
team = int(team)
awards = []
for award in self.awards:
for recipient in award['recipient_list']:
if recipient['team_number'] == team:
awards.append({'award': award, 'name': award['name'], 'awardee': recipient['awardee']})
return awards
@team_wrap(pos=1, format="{}")
def team_ranking(self, team, array=False):
"""Return the ranking information about a team for this event.
:param team: (int or str formatted as 'frcXXXX') The team to get ranking information for.
:param array: (bool) returns info as an array if true, otherwise returns as a dict (default).
:return: Either an array (array=True) or a dict with the team's ranking information at this event. If returned
as a dict, the keys will be the headers used for that year's game's ranking table. (See
https://www.thebluealliance.com/apidocs#event-rankings-request for more info on specific headers usd per year)
Typically, a team's rank is under "Rank" (capital R).
"""
if not array: headers = self.rankings[0]
rank = None
for row in self.rankings:
if row[1] == team:
rank = row
break
if rank is None: return None
if array: return rank
col = 0
ranking_dict = {}
for c in headers:
ranking_dict[c] = rank[col]
col += 1
return ranking_dict
def get_qual_matches(self):
"""Returns the qualification matches for this event."""
return list(filter(lambda match: match['comp_level'] == 'qm', self.matches))
def get_playoff_matches(self):
"""Returns the playoff matches for this event."""
return list(filter(lambda match: match['comp_level'] != 'qm', self.matches))
class MatchHelper:
COMP_LEVEL = 'comp_level'
MATCH_NUMBER = 'match_number'
VIDEOS = 'videos'
TIME_STRING = 'time_string'
SET_NUMBER = 'set_number'
EVENT_KEY = 'event_key'
KEY = 'key'
TIME = 'time'
SCORE_BREAKDOWN = 'score_breakdown'
ALLIANCES = 'alliances'
BLUE_ALLIANCE = 'alliances/blue'
BLUE_ALLIANCE_TEAMS = BLUE_ALLIANCE + '/teams'
BLUE_ALLIANCE_SCORE = BLUE_ALLIANCE + '/score'
RED_ALLIANCE = 'alliances/red'
RED_ALLIANCE_TEAMS = RED_ALLIANCE + '/teams'
RED_ALLIANCE_SCORE = RED_ALLIANCE + '/score'
def match_sort_key(match):
"""Function used to sort matches in chronological order, first by level, then by match set.
Returns a sorting key value for the provided match model.
"""
levels = {
'qm': 0,
'ef': 1000,
'qf': 2000,
'sf': 3000,
'f': 4000
}
key = levels[match['comp_level']]
key += 100 * match['set_number'] if match['comp_level'] != 'qm' else 0
key += match['match_number']
return key
| Ninjakow/TrueSkill | lib/pytba/models.py | Python | gpl-3.0 | 7,506 |
from builtins import range
from builtins import object
import rdflib
import os
import collections
import requests
from dataurl import DataURLStorage
from werkzeug.utils import secure_filename
import tempfile
from depot.io.utils import FileIntent
from depot.manager import DepotManager
from datetime import datetime
import pytz
from whyis.namespace import np, prov, dc, frbr
from uuid import uuid4
from datastore import create_id
class Nanopublication(rdflib.ConjunctiveGraph):
_nanopub_resource = None
@property
def nanopub_resource(self):
if self._nanopub_resource is None:
self._nanopub_resource = self.resource(self.identifier)
if not self._nanopub_resource[rdflib.RDF.type: np.Nanopublication]:
self._nanopub_resource.add(rdflib.RDF.type, np.Nanopublication)
return self._nanopub_resource
@property
def assertion_resource(self):
return self.resource(self.assertion.identifier)
@property
def pubinfo_resource(self):
return self.resource(self.pubinfo.identifier)
@property
def provenance_resource(self):
return self.resource(self.provenance.identifier)
_assertion = None
@property
def assertion(self):
if self._assertion is None:
assertion = self.nanopub_resource.value(np.hasAssertion)
if assertion is None:
if isinstance(self.identifier, rdflib.BNode):
assertion = self.resource(rdflib.BNode())
else:
assertion = self.resource(self.identifier + "_assertion")
assertion.add(rdflib.RDF.type, np.Assertion)
self.add((self.identifier, np.hasAssertion, assertion.identifier))
self._assertion = rdflib.Graph(store=self.store, identifier=assertion.identifier)
return self._assertion
_pubinfo = None
@property
def pubinfo(self):
if self._pubinfo is None:
pubinfo = self.nanopub_resource.value(np.hasPublicationInfo)
if pubinfo is None:
if isinstance(self.identifier, rdflib.BNode):
pubinfo = self.resource(rdflib.BNode())
else:
pubinfo = self.resource(self.identifier + "_pubinfo")
pubinfo.add(rdflib.RDF.type, np.PublicationInfo)
self.add((self.identifier, np.hasPublicationInfo, pubinfo.identifier))
self._pubinfo = rdflib.Graph(store=self.store, identifier=pubinfo.identifier)
return self._pubinfo
_provenance = None
@property
def provenance(self):
if self._provenance is None:
provenance = self.nanopub_resource.value(np.hasProvenance)
if provenance is None:
if isinstance(self.identifier, rdflib.BNode):
provenance = self.resource(rdflib.BNode())
else:
provenance = self.resource(self.identifier + "_provenance")
provenance.add(rdflib.RDF.type, np.Provenance)
self.add((self.identifier, np.hasProvenance, provenance.identifier))
self._provenance = rdflib.Graph(store=self.store, identifier=provenance.identifier)
return self._provenance
@property
def modified(self):
modified = self.pubinfo.value(self.assertion.identifier, dc.modified)
if modified is not None:
return modified.value
| tetherless-world/satoru | whyis/nanopub/nanopublication.py | Python | apache-2.0 | 3,459 |
from blogfetch import BlogFetch
from gspreadsheet import DB
| TXTPEN/blog-fetch | blogfetch/__init__.py | Python | bsd-3-clause | 60 |
# coding=utf-8
'''
Created on 2013-7-22
拍书操作
@author: gudh
'''
import win32api,win32con,win32gui
import time
import Image,ImageGrab
import os, traceback
import bookconfig,bookorm
def move(loc):
'''移动鼠标'''
win32api.SetCursorPos(loc)
def click(left=True):
'''单击鼠标'''
if left:
d = win32con.MOUSEEVENTF_LEFTDOWN
u = win32con.MOUSEEVENTF_LEFTUP
else:
d = win32con.MOUSEEVENTF_RIGHTDOWN
u = win32con.MOUSEEVENTF_RIGHTUP
win32api.mouse_event(d, 0, 0)
time.sleep(0.1)
win32api.mouse_event(u, 0, 0)
time.sleep(0.1)
def double_click():
'''双击鼠标'''
click()
time.sleep(0.1)
click()
def move_click_sleep(pos_sleep):
'''移动鼠标,点击,并延时'''
move(pos_sleep[0:2])
click()
time.sleep(pos_sleep[2])
def move_double_click_sleep(pos_sleep):
'''移动鼠标,双击,并延时'''
move(pos_sleep[0:2])
double_click()
time.sleep(pos_sleep[2])
def cut(dect):
'''返回屏幕截图'''
im = ImageGrab.grab()
im1 = im.crop(dect)
return im1
def save(img, path, qualit=85):
'''保存图片'''
if not os.path.exists(os.path.split(path)[0]):
os.makedirs(os.path.split(path)[0])
img.save(path, 'JPEG', quality = qualit)
def zoom_cover(cover_path):
try:
img = Image.open(cover_path)
size = os.path.getsize(cover_path) / 1024
qua = 70
if size > 20:
qua = 5
elif size > 15:
qua = 10
elif size > 10:
qua = 20
elif size > 5:
qua = 30
img.save(cover_path, 'JPEG', quality = qua)
except:
traceback.print_exc()
def is_white(img):
'''判断是否是全白色'''
size = img.size
for x in range(size[0]):
for y in range(size[1]):
if img.getpixel((x, y)) != (255, 255, 255):
return False
print "iswhite true"
return True
def print_rgb(img):
'''打印所有RGB'''
size = img.size
for x in range(size[0]):
for y in range(size[1]):
print x,y,img.getpixel((x, y))
def is_bold(img, blod_para):
'''是否是粗体'''
size = img.size
arr = []
(w, h, p) = blod_para
for y in range(size[1]):
arx = []
for x in range(size[0]):
pix = img.getpixel((x, y))
r = False
if pix[0] < p and pix[1] < p and pix[2] < p:
# 判断像素点是否满足黑的条件
r = True
if x >= w and y >= h:
# 如果已经获取的像素达到判断范围则进行范围判断
b = True
# 在以x,y为坐标的前面w,h点均满足条件则判断是粗体,否则跳出继续判断
for i in range(x-w, x):
for j in range(y-h, y):
b &= arr[j][i]
if not b:
break
else: continue
break
if b:
return True
# 将当前像素信息记录到数组
arx.append(r)
arr.append(arx)
return False
def is_equal(img1, img2, jump=1):
'''判断两张图片是否一致'''
if img1 == None or img2 == None:
return False
size = img1.size
if size != img2.size:
return False
for y in range(0, size[1], jump):
for x in range(0, size[0], jump):
if img1.getpixel((x, y)) != img2.getpixel((x, y)):
return False
print "isequal true"
return True
def shot_book(img_dect, inner_blank_sleep, next_pos_sleep, book, cid):
'''拍书'''
if not book:
print "shot book can't none"
return
flag = 0
nid = book.nid
t = book.createTime[0:10].replace("-", "")
path = bookconfig.rootpath + t + "/content/%s/" + nid[0:2] + "/" + nid[2:4] + "/" + nid[4:] + "/%s/%s.jpg"
last_img = None
i = 0
# 点击空白位
move_click_sleep(inner_blank_sleep)
while True:
i += 1
img = cut(img_dect)
if is_white(img) or is_equal(img, last_img):
# 判断是否拍图到结束
flag += 1
if flag >= bookconfig.equal_times:
i -= 1
break
else:
i -= 1
continue
else:
flag = 0
s_i = str(i)
#if is_bold(img, bookconfig.blod_para):
# s_i = s_i + "_b"
hpath = path % ("h", cid, s_i)
lpath = path % ("l", cid, s_i)
save(img, hpath)
save(img, lpath, 30)
print "save ok: " + hpath
# 记录上一张图片
last_img = img
# 翻页到下一张
move_click_sleep(next_pos_sleep)
book = bookorm.get_book(nid)
book.imgCount = i
book.upTime()
bookorm.save_book(book)
return book.imgCount > 5
def pos_to_first_book(down_time=10):
'''从上一本书的结尾定位到第一本畅读的阅读页'''
move_click_sleep(bookconfig.fhsj_pos_sleep)
move_click_sleep(bookconfig.wdcd_pos_sleep)
move_click_sleep(bookconfig.sx_pos_sleep)
move_click_sleep(bookconfig.zxcd_first_pos_sleep)
move_double_click_sleep(bookconfig.zxcd_first_pos_sleep)
print "begin down book sleep: %d" % down_time
time.sleep(down_time) # 下载时间
move_double_click_sleep(bookconfig.zxcd_first_pos_sleep)
move_double_click_sleep(bookconfig.zxcd_first_pos_sleep)
def pos_to_loc_book(loc, down_time=10):
'''从上一本书的结尾定位到指定数量下拉的畅读的阅读页'''
move_click_sleep(bookconfig.fhsj_pos_sleep)
move_click_sleep(bookconfig.wdcd_pos_sleep)
move_click_sleep(bookconfig.sx_pos_sleep)
move_click_sleep(bookconfig.zxcd_first_pos_sleep)
#
down_times = loc
row_num = 0
if down_times > bookconfig.max_down_times:
# 超过最大下翻页则移动位置
down_times = bookconfig.max_down_times
row_num = loc - down_times
# 翻到指定页
for i in range(down_times):
i = i
move_click_sleep(bookconfig.down_pos_sleep)
# 计算需要点击的位置
book_pos = bookconfig.zxcd_first_pos_sleep[:]
book_pos[1] = book_pos[1] + (row_num * bookconfig.row_height)
# 移动位置,下载,打开
move_double_click_sleep(book_pos)
print "begin down book sleep: %d" % down_time
time.sleep(down_time) # 下载时间
move_double_click_sleep(book_pos)
move_double_click_sleep(book_pos)
def shot_first_book(book, cid="1", down_time=15):
'''拍最前面一本书'''
pos_to_first_book(down_time)
start_pos = bookconfig.start_pos
shot_size = bookconfig.shot_size
dect = (start_pos[0], start_pos[1], start_pos[0] + shot_size[0], start_pos[1] + shot_size[1])
return shot_book(dect, bookconfig.inner_blank_sleep, bookconfig.next_pos_sleep, book, cid)
def shot_point_book(book, loc, cid="1", down_time=15):
'''拍最指定位置的书'''
pos_to_loc_book(loc, down_time)
start_pos = bookconfig.start_pos
shot_size = bookconfig.shot_size
dect = (start_pos[0], start_pos[1], start_pos[0] + shot_size[0], start_pos[1] + shot_size[1])
return shot_book(dect, bookconfig.inner_blank_sleep, bookconfig.next_pos_sleep, book, cid)
if __name__ == '__main__':
time.sleep(2)
nid = "7738afd367cac04d3d52489a2a3e584e"
book = bookorm.get_book(nid)
shot_first_book(book)
| Yhzhtk/bookcatch | bookshot.py | Python | gpl-2.0 | 7,589 |
#################################################################################################
# Programmer: Chase Johnson #
# Date: 10/20/17 #
# File Name: ExtractingNutrition.py #
# Description: Parses through the nutrition facts of an item on myfitnesspal.com. Takes as #
# parameters the url of the nutrition facts (obtained from searchForProductURL() #
# in the findProductUrl.py file) and the nutrition item you want to find #
# (ex: Calories) #
# #
#################################################################################################
import bs4 as bs
import urllib.request
def getNutritionInfo(url, key):
# Extract the html data from the website
sauce = urllib.request.urlopen(url).read()
# Extract a nicer version of the data
soup = bs.BeautifulSoup(sauce, 'lxml')
# Print the title of the page (the type of food we are dealing with)
# print(soup.title.string)
arr = []
# Make a list filled with all 'td' (Table Data) objects
# These hold the data in the nutrition facts table
for col in soup.find_all('td'):
# print(col.text)
arr.append(col.text)
titleArr = []
dataArr = []
# Every other 'td' entry is a Title (such as Calories)
# and Data (such as 833). Create a list of the titles and the data
for index in range(len(arr)):
if (index%2 == 0):
titleArr.append(arr[index])
if (index%2 == 1):
dataArr.append(arr[index])
# Make a dictionary <Title, Data> to represent the nutrition facts of the item
nutFacts = {}
for index in range(len(titleArr)):
nutFacts.update({titleArr[index]:dataArr[index]})
# Returns the value associated with the key given by the user
return nutFacts.get(key)
# print(getNutritionInfo('http://www.myfitnesspal.com/food/calories/381378836', "Calories"))
# This is an example: print(getNutritionInfo('http://www.myfitnesspal.com/food/calories/382905495', 'Calories'))
| zkhan4123/hackISU | ExtractingNutrition.py | Python | mit | 2,391 |
# tests/base.py
from flask_testing import TestCase
from app import create_app
from app.models import User, db
app = create_app()
class BaseTestCase(TestCase):
def create_app(self):
app.config.from_object('app.config.TestingConfig')
return app
def setUp(self):
db.create_all()
user = User(email="ad@min.com", password="admin_user")
db.session.add(user)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| paris3200/flask-inventory | tests/base.py | Python | mit | 510 |
__author__ = 'we32zac'
from pyEOM.datasets import Dataset as DatasetAbs
class Dataset(DatasetAbs):
shortname = 'MYD13A3'
platform = 'Aqua'
collection = '005'
rastertype = 'Tile'
timeInterval = 'P1M'
host = 'http://e4ftl01.cr.usgs.gov'
dir = '/MODIS_Composites/MOLA/MYD13A3.005'
sources = ['LPDAAC']
def getDownloadInfo(self):
return dict(shortname=self.shortname, platform=self.platform, collection=self.collection, rastertype=self.rastertype, host=self.host, directory=self.dir, sources=self.sources)
def getBands(self):
return self.bands
def getThematicBands(self):
return [self.bands['EVI'], self.bands['NDVI']]
def getQualityBands(self):
return []
bands = dict(NDVI={
'name': 'MOD_Grid_monthly_1km_VI:1 km monthly NDVI',
'nodata': -3000,
'scale': 0.0001,
'offset': None,
'imagetype': 'thematicClassification',
'identifier': 'MODIS_MYD13_A3_NDVI_Series',
'title': 'Monthly Normalized Difference Vegetation Index from MODIS Aqua',
'abstract': 'Time-series of monthly Aqua MODIS Normalized Difference Vegetation Index (NDVI) at 1 km spatial resolution. To retrieve actual values a scale factor of 0.0001 has to be applied. The unscaled nodata value is encoded as 0. Original MODIS data retrieved from the Land Processes Distributed Active Archive Center (ftp://e4ftl01.cr.usgs.gov/MOLT/).',
'keywords': 'MODIS,Aqua,Siberia,NDVI,Normalized Difference Vegetation Index,Vegetation,Index,Global,Monthly,Series',
'lineage': 'Original MODIS data retrieved from the Land Processes Distributed Active Archive Center (ftp://e4ftl01.cr.usgs.gov/MOLA/) and processed with GDAL 1.9.0.',
'datasetname': 'Normalized Difference Vegetation Index',
'datatype': 'RASTER',
'resolution': 1000.0,
'layername': 'myd13a3_ndvi',
'templates': 'template_header_evi.html',
'wcs_description': 'MODIS Aqua NDVI Monthly',
'wms_description': 'MODIS Aqua NDVI Monthly',
'colormap': 'ndvi_colorbar.map',
'resolution_unit': 'm',
'unit': 'None'
},EVI={
'name': 'MOD_Grid_monthly_1km_VI:1 km monthly EVI',
'nodata': -3000,
'scale': 0.0001,
'offset': None,
'imagetype': 'thematicClassification',
'identifier': 'MODIS_MYD13_A3_EVI_Series',
'title': 'Monthly Enhanced Vegetation Index from MODIS Aqua',
'abstract': 'Time-series of monthly Aqua MODIS Enhanced Vegetation Index (EVI) at 1 km spatial resolution. To retrieve actual values a scale factor of 0.0001 has to be applied. The unscaled nodata value is encoded as 0. Original MODIS data retrieved from the Land Processes Distributed Active Archive Center (ftp://e4ftl01.cr.usgs.gov/MOLT/).',
'keywords': 'MODIS,Aqua,Siberia,EVI,Enhanced Vegetation Index,Vegetation,Index,Global,Monthly,Series',
'lineage': 'Original MODIS data retrieved from the Land Processes Distributed Active Archive Center (ftp://e4ftl01.cr.usgs.gov/MOLA/) and processed with GDAL 1.9.0.',
'datasetname': 'Enhanced Vegetation Index',
'datatype': 'RASTER',
'resolution': 1000.0,
'layername': 'myd13a3_evi',
'templates': 'template_header_evi.html',
'wcs_description': 'MODIS Aqua EVI Monthly',
'wms_description': 'MODIS Aqua EVI Monthly',
'colormap': 'evi_colorbar.map',
'resolution_unit': 'm',
'unit': 'None'
}
) | jonas-eberle/pyEOM | pyEOM/datasets/predefined/MODIS/MYD13A3.py | Python | mit | 3,773 |
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Common functionalities shared between different iLO modules.
"""
import tempfile
from oslo.config import cfg
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LE
from ironic.common.i18n import _LI
from ironic.common import images
from ironic.common import swift
from ironic.common import utils
from ironic.drivers import utils as driver_utils
from ironic.openstack.common import log as logging
ilo_client = importutils.try_import('proliantutils.ilo.ribcl')
STANDARD_LICENSE = 1
ESSENTIALS_LICENSE = 2
ADVANCED_LICENSE = 3
opts = [
cfg.IntOpt('client_timeout',
default=60,
help='Timeout (in seconds) for iLO operations'),
cfg.IntOpt('client_port',
default=443,
help='Port to be used for iLO operations'),
cfg.StrOpt('swift_ilo_container',
default='ironic_ilo_container',
help='The Swift iLO container to store data.'),
cfg.IntOpt('swift_object_expiry_timeout',
default=900,
help='Amount of time in seconds for Swift objects to '
'auto-expire.'),
]
CONF = cfg.CONF
CONF.register_opts(opts, group='ilo')
LOG = logging.getLogger(__name__)
REQUIRED_PROPERTIES = {
'ilo_address': _("IP address or hostname of the iLO. Required."),
'ilo_username': _("username for the iLO with administrator privileges. "
"Required."),
'ilo_password': _("password for ilo_username. Required.")
}
OPTIONAL_PROPERTIES = {
'client_port': _("port to be used for iLO operations. Optional."),
'client_timeout': _("timeout (in seconds) for iLO operations. Optional.")
}
CONSOLE_PROPERTIES = {
'console_port': _("node's UDP port to connect to. Only required for "
"console access.")
}
COMMON_PROPERTIES = REQUIRED_PROPERTIES.copy()
COMMON_PROPERTIES.update(OPTIONAL_PROPERTIES)
DEFAULT_BOOT_MODE = 'LEGACY'
BOOT_MODE_GENERIC_TO_ILO = {'bios': 'legacy', 'uefi': 'uefi'}
BOOT_MODE_ILO_TO_GENERIC = dict((v, k)
for (k, v) in BOOT_MODE_GENERIC_TO_ILO.items())
def parse_driver_info(node):
"""Gets the driver specific Node deployment info.
This method validates whether the 'driver_info' property of the
supplied node contains the required information for this driver.
:param node: an ironic node object.
:returns: a dict containing information from driver_info
and default values.
:raises: InvalidParameterValue on invalid inputs.
:raises: MissingParameterValue if some mandatory information
is missing on the node
"""
info = node.driver_info
d_info = {}
error_msgs = []
for param in REQUIRED_PROPERTIES:
try:
d_info[param] = info[param]
except KeyError:
error_msgs.append(_("'%s' not supplied to IloDriver.") % param)
if error_msgs:
msg = (_("The following parameters were mising while parsing "
"driver_info:\n%s") % "\n".join(error_msgs))
raise exception.MissingParameterValue(msg)
for param in OPTIONAL_PROPERTIES:
value = info.get(param, CONF.ilo.get(param))
try:
value = int(value)
except ValueError:
error_msgs.append(_("'%s' is not an integer.") % param)
continue
d_info[param] = value
for param in CONSOLE_PROPERTIES:
value = info.get(param)
if value:
try:
value = int(value)
d_info[param] = value
except ValueError:
error_msgs.append(_("'%s' is not an integer.") % param)
if error_msgs:
msg = (_("The following errors were encountered while parsing "
"driver_info:\n%s") % "\n".join(error_msgs))
raise exception.InvalidParameterValue(msg)
return d_info
def get_ilo_object(node):
"""Gets an IloClient object from proliantutils library.
Given an ironic node object, this method gives back a IloClient object
to do operations on the iLO.
:param node: an ironic node object.
:returns: an IloClient object.
:raises: InvalidParameterValue on invalid inputs.
:raises: MissingParameterValue if some mandatory information
is missing on the node
"""
driver_info = parse_driver_info(node)
ilo_object = ilo_client.IloClient(driver_info['ilo_address'],
driver_info['ilo_username'],
driver_info['ilo_password'],
driver_info['client_timeout'],
driver_info['client_port'])
return ilo_object
def get_ilo_license(node):
"""Gives the current installed license on the node.
Given an ironic node object, this method queries the iLO
for currently installed license and returns it back.
:param node: an ironic node object.
:returns: a constant defined in this module which
refers to the current license installed on the node.
:raises: InvalidParameterValue on invalid inputs.
:raises: MissingParameterValue if some mandatory information
is missing on the node
:raises: IloOperationError if it failed to retrieve the
installed licenses from the iLO.
"""
# Get the ilo client object, and then the license from the iLO
ilo_object = get_ilo_object(node)
try:
license_info = ilo_object.get_all_licenses()
except ilo_client.IloError as ilo_exception:
raise exception.IloOperationError(operation=_('iLO license check'),
error=str(ilo_exception))
# Check the license to see if the given license exists
current_license_type = license_info['LICENSE_TYPE']
if current_license_type.endswith("Advanced"):
return ADVANCED_LICENSE
elif current_license_type.endswith("Essentials"):
return ESSENTIALS_LICENSE
else:
return STANDARD_LICENSE
def _get_floppy_image_name(node):
"""Returns the floppy image name for a given node.
:param node: the node for which image name is to be provided.
"""
return "image-%s" % node.uuid
def _prepare_floppy_image(task, params):
"""Prepares the floppy image for passing the parameters.
This method prepares a temporary vfat filesystem image. Then it adds
two files into the image - one containing the authentication token and
the other containing the parameters to be passed to the ramdisk. Then it
uploads the file to Swift in 'swift_ilo_container', setting it to
auto-expire after 'swift_object_expiry_timeout' seconds. Then it returns
the temp url for the Swift object.
:param task: a TaskManager instance containing the node to act on.
:param params: a dictionary containing 'parameter name'->'value' mapping
to be passed to the deploy ramdisk via the floppy image.
:returns: the Swift temp url for the floppy image.
"""
with tempfile.NamedTemporaryFile() as vfat_image_tmpfile_obj:
files_info = {}
token_tmpfile_obj = None
vfat_image_tmpfile = vfat_image_tmpfile_obj.name
# If auth_strategy is noauth, then no need to write token into
# the image file.
if task.context.auth_token:
token_tmpfile_obj = tempfile.NamedTemporaryFile()
token_tmpfile = token_tmpfile_obj.name
utils.write_to_file(token_tmpfile, task.context.auth_token)
files_info[token_tmpfile] = 'token'
try:
images.create_vfat_image(vfat_image_tmpfile, files_info=files_info,
parameters=params)
finally:
if token_tmpfile_obj:
token_tmpfile_obj.close()
container = CONF.ilo.swift_ilo_container
object_name = _get_floppy_image_name(task.node)
timeout = CONF.ilo.swift_object_expiry_timeout
object_headers = {'X-Delete-After': timeout}
swift_api = swift.SwiftAPI()
swift_api.create_object(container, object_name,
vfat_image_tmpfile,
object_headers=object_headers)
temp_url = swift_api.get_temp_url(container, object_name, timeout)
LOG.debug("Uploaded floppy image %(object_name)s to %(container)s "
"for deployment.",
{'object_name': object_name, 'container': container})
return temp_url
def attach_vmedia(node, device, url):
"""Attaches the given url as virtual media on the node.
:param node: an ironic node object.
:param device: the virtual media device to attach
:param url: the http/https url to attach as the virtual media device
:raises: IloOperationError if insert virtual media failed.
"""
ilo_object = get_ilo_object(node)
try:
ilo_object.insert_virtual_media(url, device=device)
ilo_object.set_vm_status(device=device, boot_option='CONNECT',
write_protect='YES')
except ilo_client.IloError as ilo_exception:
operation = _("Inserting virtual media %s") % device
raise exception.IloOperationError(operation=operation,
error=ilo_exception)
LOG.info(_LI("Attached virtual media %s successfully."), device)
# TODO(rameshg87): This needs to be moved to iLO's management interface.
def set_boot_device(node, device, persistent=False):
"""Sets the node to boot from a device for the next boot.
:param node: an ironic node object.
:param device: the device to boot from
:raises: IloOperationError if setting boot device failed.
"""
ilo_object = get_ilo_object(node)
try:
if not persistent:
ilo_object.set_one_time_boot(device)
else:
ilo_object.update_persistent_boot([device])
except ilo_client.IloError as ilo_exception:
operation = _("Setting %s as boot device") % device
raise exception.IloOperationError(operation=operation,
error=ilo_exception)
LOG.debug("Node %(uuid)s set to boot from %(device)s.",
{'uuid': node.uuid, 'device': device})
def set_boot_mode(node, boot_mode):
"""Sets the node to boot using boot_mode for the next boot.
:param node: an ironic node object.
:param boot_mode: Next boot mode.
:raises: IloOperationError if setting boot mode failed.
"""
ilo_object = get_ilo_object(node)
try:
p_boot_mode = ilo_object.get_pending_boot_mode()
except ilo_client.IloCommandNotSupportedError:
p_boot_mode = DEFAULT_BOOT_MODE
if BOOT_MODE_ILO_TO_GENERIC[p_boot_mode.lower()] == boot_mode:
LOG.info(_LI("Node %(uuid)s pending boot mode is %(boot_mode)s."),
{'uuid': node.uuid, 'boot_mode': boot_mode})
return
try:
ilo_object.set_pending_boot_mode(
BOOT_MODE_GENERIC_TO_ILO[boot_mode].upper())
except ilo_client.IloError as ilo_exception:
operation = _("Setting %s as boot mode") % boot_mode
raise exception.IloOperationError(operation=operation,
error=ilo_exception)
LOG.info(_LI("Node %(uuid)s boot mode is set to %(boot_mode)s."),
{'uuid': node.uuid, 'boot_mode': boot_mode})
def update_boot_mode_capability(task):
"""Update 'boot_mode' capability value of node's 'capabilities' property.
:param task: Task object.
"""
ilo_object = get_ilo_object(task.node)
try:
p_boot_mode = ilo_object.get_pending_boot_mode()
if p_boot_mode == 'UNKNOWN':
# NOTE(faizan) ILO will return this in remote cases and mostly on
# the nodes which supports UEFI. Such nodes mostly comes with UEFI
# as default boot mode. So we will try setting bootmode to UEFI
# and if it fails then we fall back to BIOS boot mode.
ilo_object.set_pending_boot_mode('UEFI')
p_boot_mode = 'UEFI'
except ilo_client.IloCommandNotSupportedError:
p_boot_mode = DEFAULT_BOOT_MODE
driver_utils.rm_node_capability(task, 'boot_mode')
driver_utils.add_node_capability(task, 'boot_mode',
BOOT_MODE_ILO_TO_GENERIC[p_boot_mode.lower()])
def setup_vmedia_for_boot(task, boot_iso, parameters=None):
"""Sets up the node to boot from the given ISO image.
This method attaches the given boot_iso on the node and passes
the required parameters to it via virtual floppy image.
:param task: a TaskManager instance containing the node to act on.
:param boot_iso: a bootable ISO image to attach to. The boot iso
should be present in either Glance or in Swift. If present in
Glance, it should be of format 'glance:<glance-image-uuid>'.
If present in Swift, it should be of format 'swift:<object-name>'.
It is assumed that object is present in CONF.ilo.swift_ilo_container.
:param parameters: the parameters to pass in the virtual floppy image
in a dictionary. This is optional.
:raises: ImageCreationFailed, if it failed while creating the floppy image.
:raises: IloOperationError, if attaching virtual media failed.
"""
LOG.info(_LI("Setting up node %s to boot from virtual media"),
task.node.uuid)
if parameters:
floppy_image_temp_url = _prepare_floppy_image(task, parameters)
attach_vmedia(task.node, 'FLOPPY', floppy_image_temp_url)
boot_iso_temp_url = None
scheme, boot_iso_ref = boot_iso.split(':')
if scheme == 'swift':
swift_api = swift.SwiftAPI()
container = CONF.ilo.swift_ilo_container
object_name = boot_iso_ref
timeout = CONF.ilo.swift_object_expiry_timeout
boot_iso_temp_url = swift_api.get_temp_url(container, object_name,
timeout)
elif scheme == 'glance':
glance_uuid = boot_iso_ref
boot_iso_temp_url = images.get_temp_url_for_glance_image(task.context,
glance_uuid)
attach_vmedia(task.node, 'CDROM', boot_iso_temp_url)
def cleanup_vmedia_boot(task):
"""Cleans a node after a virtual media boot.
This method cleans up a node after a virtual media boot. It deletes the
floppy image if it exists in CONF.ilo.swift_ilo_container. It also
ejects both virtual media cdrom and virtual media floppy.
:param task: a TaskManager instance containing the node to act on.
"""
LOG.debug("Cleaning up node %s after virtual media boot", task.node.uuid)
container = CONF.ilo.swift_ilo_container
object_name = _get_floppy_image_name(task.node)
try:
swift_api = swift.SwiftAPI()
swift_api.delete_object(container, object_name)
except exception.SwiftOperationError as e:
LOG.exception(_LE("Error while deleting %(object_name)s from "
"%(container)s. Error: %(error)s"),
{'object_name': object_name, 'container': container,
'error': e})
ilo_object = get_ilo_object(task.node)
for device in ('FLOPPY', 'CDROM'):
try:
ilo_object.eject_virtual_media(device)
except ilo_client.IloError as ilo_exception:
LOG.exception(_LE("Error while ejecting virtual media %(device)s "
"from node %(uuid)s. Error: %(error)s"),
{'device': device, 'uuid': task.node.uuid,
'error': ilo_exception})
| froyobin/ironic | ironic/drivers/modules/ilo/common.py | Python | apache-2.0 | 16,198 |
# setup.py
from setuptools import setup
setup(
name='rllab',
version='0.1.0',
packages=['rllab'],
)
| brain-research/mirage-rl-qprop | setup.py | Python | mit | 113 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='Sulley',
download_url='https://github.com/OpenRCE/sulley',
packages=['requests', 'sulley', 'sulley.legos', 'sulley.pgraph', 'sulley.utils',
'unit_tests', 'utils', 'web'],
package_dir={'requests': './requests',
'sulley': './sulley',
'sulley.legos': './sulley/legos',
'sulley.pgraph': './sulley/pgraph',
'sulley.utils': './sulley/utils',
'unit_tests': './unit_tests',
'utils': './utils',
'web': './web'
},
package_data={'web': ['templates/*', 'static/css/*']},
install_requires=['pydot2==1.0.33', 'tornado==4.0.2', 'Flask==0.10.1', 'pcapy', 'impacket']
)
| jtpereyda/sulley | setup.py | Python | gpl-2.0 | 883 |
# -*- coding: utf-8 -*-
from .context import textprocessor
import unittest
class DeleteDigitsTestSuite(unittest.TestCase):
"""Test cases for Delete Punctuation Signs"""
def test_clean_string(self):
"""Test clean string"""
string = "this is a clean string"
expected = "this is a clean string"
res = textprocessor.convert_caps(string)
self.assertEqual(res, expected, "Strings do not match")
def test_caps_string(self):
"""Test all caps string"""
string = "THIS IS A STRING"
expected = "this is a string"
res = textprocessor.convert_caps(string)
self.assertEqual(res, expected, "Strings do not match")
def test_caps_unicode_string(self):
"""Test all caps unicode string"""
string = u"THIS IS A STRING"
expected = "this is a string"
res = textprocessor.convert_caps(string)
self.assertEqual(res, expected, "Strings do not match")
def test_clean_unicode_string(self):
"""Test clean unicode string"""
string = u'this is a clean string'
expected = "this is a clean string"
res = textprocessor.convert_caps(string)
self.assertEqual(res, expected, "Strings do not match")
def test_mix_upper_and_lower_string(self):
"""Test mix upper and lower string"""
string = 'ThIs iS A cLeaN sTring'
expected = "this is a clean string"
res = textprocessor.convert_caps(string)
self.assertEqual(res, expected, "Strings do not match")
if __name__ == '__main__':
unittest.main()
| Cenorius/practica-final-verificacion | tests/test_caps.py | Python | gpl-3.0 | 1,598 |
"""Testing code for the tupa.features package, unit-testing only."""
import os
from collections import OrderedDict
import pytest
from ucca import textutil
from tupa.action import Actions
from tupa.features.dense_features import DenseFeatureExtractor
from tupa.features.sparse_features import SparseFeatureExtractor
from tupa.model import Model
from tupa.oracle import Oracle
from tupa.states.state import State
from .conftest import passage_files, load_passage, basename
SPARSE = "sparse"
DENSE = "dense"
VOCAB = os.path.join("test_files", "vocab", "en_core_web_lg.csv")
WORD_VECTORS = os.path.join("test_files", "vocab", "wiki.en.vec")
OMITTED = "d"
class FeatureExtractorCreator:
def __init__(self, name, indexed=False, annotated=False, vocab=None, wordvectors=None, omit=None):
self.name = name
self.indexed = indexed
self.annotated = annotated
self.vocab = vocab
self.id = vocab == "-"
self.wordvectors = wordvectors
self.omit = omit
def __str__(self):
return "-".join([self.name] + [attr for attr in ("indexed", "annotated", "vocab", "id", "wordvectors", "omit")
if getattr(self, attr)])
def __call__(self, config):
config.args.vocab = self.vocab
config.args.word_vectors = self.wordvectors
config.args.omit_features = self.omit
return SparseFeatureExtractor(omit_features=self.omit) if self.name == SPARSE else DenseFeatureExtractor(
OrderedDict((p.name, p.create_from_config()) for p in Model(None, config=config).param_defs()),
indexed=self.indexed, node_dropout=0, omit_features=self.omit)
def feature_extractors(*args, **kwargs):
return [FeatureExtractorCreator(SPARSE, *args, **kwargs), FeatureExtractorCreator(DENSE, *args, **kwargs),
FeatureExtractorCreator(DENSE, *args, indexed=True, **kwargs)]
def extract_features(feature_extractor, state, features):
values = feature_extractor.extract_features(state)
if feature_extractor.params:
for key, vs in values.items():
assert len(vs) == feature_extractor.params[key].num, key
features.append(values)
def _test_features(config, feature_extractor_creator, filename, write_features):
feature_extractor = feature_extractor_creator(config)
passage = load_passage(filename, annotate=feature_extractor_creator.annotated)
textutil.annotate(passage, as_array=True, as_extra=False, vocab=config.vocab())
config.set_format(passage.extra.get("format") or "ucca")
oracle = Oracle(passage)
state = State(passage)
actions = Actions()
for key, param in feature_extractor.params.items():
if not param.numeric:
param.dropout = 0
feature_extractor.init_param(key)
features = [feature_extractor.init_features(state)]
while True:
extract_features(feature_extractor, state, features)
action = min(oracle.get_actions(state, actions).values(), key=str)
state.transition(action)
if state.need_label:
extract_features(feature_extractor, state, features)
label, _ = oracle.get_label(state, action)
state.label_node(label)
if state.finished:
break
features = ["%s %s\n" % i for f in features if f for i in (sorted(f.items()) + [("", "")])]
compare_file = os.path.join("test_files", "features", "-".join((basename(filename), str(feature_extractor_creator)))
+ ".txt")
if write_features:
with open(compare_file, "w", encoding="utf-8") as f:
f.writelines(features)
with open(compare_file, encoding="utf-8") as f:
assert f.readlines() == features, compare_file
@pytest.mark.parametrize("feature_extractor_creator",
[f for v in (None, "-", VOCAB) for w in (None, WORD_VECTORS) for o in (None, OMITTED)
for f in feature_extractors(vocab=v, wordvectors=w, omit=o)], ids=str)
@pytest.mark.parametrize("filename", passage_files(), ids=basename)
def test_features(config, feature_extractor_creator, filename, write_features):
_test_features(config, feature_extractor_creator, filename, write_features)
@pytest.mark.parametrize("feature_extractor_creator",
[f for v in ("-", VOCAB) for w in (None, WORD_VECTORS) for o in (None, OMITTED)
for f in feature_extractors(annotated=True, vocab=v, wordvectors=w, omit=o)], ids=str)
@pytest.mark.parametrize("filename", passage_files("conllu"), ids=basename)
def test_features_conllu(config, feature_extractor_creator, filename, write_features):
_test_features(config, feature_extractor_creator, filename, write_features)
@pytest.mark.parametrize("feature_extractor_creator", [f for o in (None, OMITTED)
for f in feature_extractors(omit=o)[:-1]], ids=str)
def test_feature_templates(config, feature_extractor_creator, write_features):
config.set_format("amr")
feature_extractor = feature_extractor_creator(config)
features = ["%s\n" % i for i in feature_extractor.all_features()]
compare_file = os.path.join("test_files", "features", "templates-%s.txt" % str(feature_extractor_creator))
if write_features:
with open(compare_file, "w") as f:
f.writelines(features)
with open(compare_file) as f:
assert f.readlines() == features, compare_file
| danielhers/tupa | tests/test_features.py | Python | gpl-3.0 | 5,494 |
"""Compatibility fixes for older version of python, numpy and scipy
If you add content to this file, please give the version of the package
at which the fixe is no longer needed.
"""
# Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Fabian Pedregosa <fpedregosa@acm.org>
# Lars Buitinck
#
# License: BSD 3 clause
from functools import update_wrapper
from distutils.version import LooseVersion
import functools
import numpy as np
import scipy.sparse as sp
import scipy
import scipy.stats
from scipy.sparse.linalg import lsqr as sparse_lsqr # noqa
from numpy.ma import MaskedArray as _MaskedArray # TODO: remove in 1.0
from .._config import config_context, get_config
from .deprecation import deprecated
try:
from pkg_resources import parse_version # type: ignore
except ImportError:
# setuptools not installed
parse_version = LooseVersion # type: ignore
np_version = parse_version(np.__version__)
sp_version = parse_version(scipy.__version__)
if sp_version >= parse_version('1.4'):
from scipy.sparse.linalg import lobpcg
else:
# Backport of lobpcg functionality from scipy 1.4.0, can be removed
# once support for sp_version < parse_version('1.4') is dropped
# mypy error: Name 'lobpcg' already defined (possibly by an import)
from ..externals._lobpcg import lobpcg # type: ignore # noqa
def _object_dtype_isnan(X):
return X != X
# TODO: replace by copy=False, when only scipy > 1.1 is supported.
def _astype_copy_false(X):
"""Returns the copy=False parameter for
{ndarray, csr_matrix, csc_matrix}.astype when possible,
otherwise don't specify
"""
if sp_version >= parse_version('1.1') or not sp.issparse(X):
return {'copy': False}
else:
return {}
def _joblib_parallel_args(**kwargs):
"""Set joblib.Parallel arguments in a compatible way for 0.11 and 0.12+
For joblib 0.11 this maps both ``prefer`` and ``require`` parameters to
a specific ``backend``.
Parameters
----------
prefer : str in {'processes', 'threads'} or None
Soft hint to choose the default backend if no specific backend
was selected with the parallel_backend context manager.
require : 'sharedmem' or None
Hard condstraint to select the backend. If set to 'sharedmem',
the selected backend will be single-host and thread-based even
if the user asked for a non-thread based backend with
parallel_backend.
See joblib.Parallel documentation for more details
"""
import joblib
if parse_version(joblib.__version__) >= parse_version('0.12'):
return kwargs
extra_args = set(kwargs.keys()).difference({'prefer', 'require'})
if extra_args:
raise NotImplementedError('unhandled arguments %s with joblib %s'
% (list(extra_args), joblib.__version__))
args = {}
if 'prefer' in kwargs:
prefer = kwargs['prefer']
if prefer not in ['threads', 'processes', None]:
raise ValueError('prefer=%s is not supported' % prefer)
args['backend'] = {'threads': 'threading',
'processes': 'multiprocessing',
None: None}[prefer]
if 'require' in kwargs:
require = kwargs['require']
if require not in [None, 'sharedmem']:
raise ValueError('require=%s is not supported' % require)
if require == 'sharedmem':
args['backend'] = 'threading'
return args
class loguniform(scipy.stats.reciprocal):
"""A class supporting log-uniform random variables.
Parameters
----------
low : float
The minimum value
high : float
The maximum value
Methods
-------
rvs(self, size=None, random_state=None)
Generate log-uniform random variables
The most useful method for Scikit-learn usage is highlighted here.
For a full list, see
`scipy.stats.reciprocal
<https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.reciprocal.html>`_.
This list includes all functions of ``scipy.stats`` continuous
distributions such as ``pdf``.
Notes
-----
This class generates values between ``low`` and ``high`` or
low <= loguniform(low, high).rvs() <= high
The logarithmic probability density function (PDF) is uniform. When
``x`` is a uniformly distributed random variable between 0 and 1, ``10**x``
are random variables that are equally likely to be returned.
This class is an alias to ``scipy.stats.reciprocal``, which uses the
reciprocal distribution:
https://en.wikipedia.org/wiki/Reciprocal_distribution
Examples
--------
>>> from sklearn.utils.fixes import loguniform
>>> rv = loguniform(1e-3, 1e1)
>>> rvs = rv.rvs(random_state=42, size=1000)
>>> rvs.min() # doctest: +SKIP
0.0010435856341129003
>>> rvs.max() # doctest: +SKIP
9.97403052786026
"""
@deprecated(
'MaskedArray is deprecated in version 0.23 and will be removed in version '
'1.0 (renaming of 0.25). Use numpy.ma.MaskedArray instead.'
)
class MaskedArray(_MaskedArray):
pass # TODO: remove in 1.0
def _take_along_axis(arr, indices, axis):
"""Implements a simplified version of np.take_along_axis if numpy
version < 1.15"""
if np_version >= parse_version('1.15'):
return np.take_along_axis(arr=arr, indices=indices, axis=axis)
else:
if axis is None:
arr = arr.flatten()
if not np.issubdtype(indices.dtype, np.intp):
raise IndexError('`indices` must be an integer array')
if arr.ndim != indices.ndim:
raise ValueError(
"`indices` and `arr` must have the same number of dimensions")
shape_ones = (1,) * indices.ndim
dest_dims = (
list(range(axis)) +
[None] +
list(range(axis+1, indices.ndim))
)
# build a fancy index, consisting of orthogonal aranges, with the
# requested index inserted at the right location
fancy_index = []
for dim, n in zip(dest_dims, arr.shape):
if dim is None:
fancy_index.append(indices)
else:
ind_shape = shape_ones[:dim] + (-1,) + shape_ones[dim+1:]
fancy_index.append(np.arange(n).reshape(ind_shape))
fancy_index = tuple(fancy_index)
return arr[fancy_index]
# remove when https://github.com/joblib/joblib/issues/1071 is fixed
def delayed(function):
"""Decorator used to capture the arguments of a function."""
@functools.wraps(function)
def delayed_function(*args, **kwargs):
return _FuncWrapper(function), args, kwargs
return delayed_function
class _FuncWrapper:
""""Load the global configuration before calling the function."""
def __init__(self, function):
self.function = function
self.config = get_config()
update_wrapper(self, self.function)
def __call__(self, *args, **kwargs):
with config_context(**self.config):
return self.function(*args, **kwargs)
def linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None,
axis=0):
"""Implements a simplified linspace function as of numpy verion >= 1.16.
As of numpy 1.16, the arguments start and stop can be array-like and
there is an optional argument `axis`.
For simplicity, we only allow 1d array-like to be passed to start and stop.
See: https://github.com/numpy/numpy/pull/12388 and numpy 1.16 release
notes about start and stop arrays for linspace logspace and geomspace.
Returns
-------
out : ndarray of shape (num, n_start) or (num,)
The output array with `n_start=start.shape[0]` columns.
"""
if np_version < parse_version('1.16'):
start = np.asanyarray(start) * 1.0
stop = np.asanyarray(stop) * 1.0
dt = np.result_type(start, stop, float(num))
if dtype is None:
dtype = dt
if start.ndim == 0 == stop.ndim:
return np.linspace(start=start, stop=stop, num=num,
endpoint=endpoint, retstep=retstep, dtype=dtype)
if start.ndim != 1 or stop.ndim != 1 or start.shape != stop.shape:
raise ValueError("start and stop must be 1d array-like of same"
" shape.")
n_start = start.shape[0]
out = np.empty((num, n_start), dtype=dtype)
step = np.empty(n_start, dtype=np.float)
for i in range(n_start):
out[:, i], step[i] = np.linspace(start=start[i], stop=stop[i],
num=num, endpoint=endpoint,
retstep=True, dtype=dtype)
if axis != 0:
out = np.moveaxis(out, 0, axis)
if retstep:
return out, step
else:
return out
else:
return np.linspace(start=start, stop=stop, num=num, endpoint=endpoint,
retstep=retstep, dtype=dtype, axis=axis)
| lesteve/scikit-learn | sklearn/utils/fixes.py | Python | bsd-3-clause | 9,201 |
#!/bin/env python3
import argparse
import esprima
import json
import os
import re
import sys
def get_req_body_elems(obj, elems):
if obj.type == 'FunctionExpression':
get_req_body_elems(obj.body, elems)
elif obj.type == 'BlockStatement':
for s in obj.body:
get_req_body_elems(s, elems)
elif obj.type == 'TryStatement':
get_req_body_elems(obj.block, elems)
elif obj.type == 'ExpressionStatement':
get_req_body_elems(obj.expression, elems)
elif obj.type == 'MemberExpression':
left = get_req_body_elems(obj.object, elems)
right = obj.property.name
if left == 'req.body' and right not in elems:
elems.append(right)
return '{}.{}'.format(left, right)
elif obj.type == 'VariableDeclaration':
for s in obj.declarations:
get_req_body_elems(s, elems)
elif obj.type == 'VariableDeclarator':
if obj.id.type == 'ObjectPattern':
# get_req_body_elems() can't be called directly here:
# const {isAdmin, isNoComments, isCommentOnly} = req.body;
right = get_req_body_elems(obj.init, elems)
if right == 'req.body':
for p in obj.id.properties:
name = p.key.name
if name not in elems:
elems.append(name)
else:
get_req_body_elems(obj.init, elems)
elif obj.type == 'Property':
get_req_body_elems(obj.value, elems)
elif obj.type == 'ObjectExpression':
for s in obj.properties:
get_req_body_elems(s, elems)
elif obj.type == 'CallExpression':
for s in obj.arguments:
get_req_body_elems(s, elems)
elif obj.type == 'ArrayExpression':
for s in obj.elements:
get_req_body_elems(s, elems)
elif obj.type == 'IfStatement':
get_req_body_elems(obj.test, elems)
if obj.consequent is not None:
get_req_body_elems(obj.consequent, elems)
if obj.alternate is not None:
get_req_body_elems(obj.alternate, elems)
elif obj.type in ('LogicalExpression', 'BinaryExpression', 'AssignmentExpression'):
get_req_body_elems(obj.left, elems)
get_req_body_elems(obj.right, elems)
elif obj.type in ('ReturnStatement', 'UnaryExpression'):
get_req_body_elems(obj.argument, elems)
elif obj.type == 'Literal':
pass
elif obj.type == 'Identifier':
return obj.name
elif obj.type == 'FunctionDeclaration':
pass
else:
print(obj)
return ''
def cleanup_jsdocs(jsdoc):
# remove leading spaces before the first '*'
doc = [s.lstrip() for s in jsdoc.value.split('\n')]
# remove leading stars
doc = [s.lstrip('*') for s in doc]
# remove leading empty lines
while len(doc) and not doc[0].strip():
doc.pop(0)
# remove terminating empty lines
while len(doc) and not doc[-1].strip():
doc.pop(-1)
return doc
class JS2jsonDecoder(json.JSONDecoder):
def decode(self, s):
result = super().decode(s) # result = super(Decoder, self).decode(s) for Python 2.x
return self._decode(result)
def _decode(self, o):
if isinstance(o, str) or isinstance(o, unicode):
try:
return int(o)
except ValueError:
return o
elif isinstance(o, dict):
return {k: self._decode(v) for k, v in o.items()}
elif isinstance(o, list):
return [self._decode(v) for v in o]
else:
return o
def load_return_type_jsdoc_json(data):
regex_replace = [(r'\n', r' '), # replace new lines by spaces
(r'([\{\s,])(\w+)(:)', r'\1"\2"\3'), # insert double quotes in keys
(r'(:)\s*([^:\},\]]+)\s*([\},\]])', r'\1"\2"\3'), # insert double quotes in values
(r'(\[)\s*([^{].+)\s*(\])', r'\1"\2"\3'), # insert double quotes in array items
(r'^\s*([^\[{].+)\s*', r'"\1"')] # insert double quotes in single item
for r, s in regex_replace:
data = re.sub(r, s, data)
return json.loads(data)
class EntryPoint(object):
def __init__(self, schema, statements):
self.schema = schema
self.method, self._path, self.body = statements
self._jsdoc = None
self._doc = {}
self._raw_doc = None
self.path = self.compute_path()
self.method_name = self.method.value.lower()
self.body_params = []
if self.method_name in ('post', 'put'):
get_req_body_elems(self.body, self.body_params)
# replace the :parameter in path by {parameter}
self.url = re.sub(r':([^/]*)Id', r'{\1}', self.path)
self.url = re.sub(r':([^/]*)', r'{\1}', self.url)
# reduce the api name
# get_boards_board_cards() should be get_board_cards()
tokens = self.url.split('/')
reduced_function_name = []
for i, token in enumerate(tokens):
if token in ('api'):
continue
if (i < len(tokens) - 1 and # not the last item
tokens[i + 1].startswith('{')): # and the next token is a parameter
continue
reduced_function_name.append(token.strip('{}'))
self.reduced_function_name = '_'.join(reduced_function_name)
# mark the schema as used
schema.used = True
def compute_path(self):
return self._path.value.rstrip('/')
def error(self, message):
if self._raw_doc is None:
sys.stderr.write('in {},\n'.format(self.schema.name))
sys.stderr.write('{}\n'.format(message))
return
sys.stderr.write('in {}, lines {}-{}\n'.format(self.schema.name,
self._raw_doc.loc.start.line,
self._raw_doc.loc.end.line))
sys.stderr.write('{}\n'.format(self._raw_doc.value))
sys.stderr.write('{}\n'.format(message))
@property
def doc(self):
return self._doc
@doc.setter
def doc(self, doc):
'''Parse the JSDoc attached to an entry point.
`jsdoc` will not get these right as they are not attached to a method.
So instead, we do our custom parsing here (yes, subject to errors).
The expected format is the following (empty lines between entries
are ignored):
/**
* @operation name_of_entry_point
* @tag: a_tag_to_add
* @tag: an_other_tag_to_add
* @summary A nice summary, better in one line.
*
* @description This is a quite long description.
* We can use *mardown* as the final rendering is done
* by slate.
*
* indentation doesn't matter.
*
* @param param_0 description of param 0
* @param {string} param_1 we can also put the type of the parameter
* before its name, like in JSDoc
* @param {boolean} [param_2] we can also tell if the parameter is
* optional by adding square brackets around its name
*
* @return Documents a return value
*/
Notes:
- name_of_entry_point will be referenced in the ToC of the generated
document. This is also the operationId used in the resulting openapi
file. It needs to be uniq in the namesapce (the current schema.js
file)
- tags are appended to the current Schema attached to the file
'''
self._raw_doc = doc
self._jsdoc = cleanup_jsdocs(doc)
def store_tag(tag, data):
# check that there is something to store first
if not data.strip():
return
# remove terminating whitespaces and empty lines
data = data.rstrip()
# parameters are handled specially
if tag == 'param':
if 'params' not in self._doc:
self._doc['params'] = {}
params = self._doc['params']
param_type = None
try:
name, desc = data.split(maxsplit=1)
except ValueError:
desc = ''
if name.startswith('{'):
param_type = name.strip('{}')
if param_type not in ['string', 'number', 'boolean', 'integer', 'array', 'file']:
self.error('Warning, unknown type {}\n allowed values: string, number, boolean, integer, array, file'.format(param_type))
try:
name, desc = desc.split(maxsplit=1)
except ValueError:
desc = ''
optional = name.startswith('[') and name.endswith(']')
if optional:
name = name[1:-1]
# we should not have 2 identical parameter names
if tag in params:
self.error('Warning, overwriting parameter {}'.format(name))
params[name] = (param_type, optional, desc)
if name.endswith('Id'):
# we strip out the 'Id' from the form parameters, we need
# to keep the actual description around
name = name[:-2]
if name not in params:
params[name] = (param_type, optional, desc)
return
# 'tag' can be set several times
if tag == 'tag':
if tag not in self._doc:
self._doc[tag] = []
self._doc[tag].append(data)
return
# 'return' tag is json
if tag == 'return_type':
try:
data = load_return_type_jsdoc_json(data)
except json.decoder.JSONDecodeError:
pass
# we should not have 2 identical tags but @param or @tag
if tag in self._doc:
self.error('Warning, overwriting tag {}'.format(tag))
self._doc[tag] = data
# reset the current doc fields
self._doc = {}
# first item is supposed to be the description
current_tag = 'description'
current_data = ''
for line in self._jsdoc:
if line.lstrip().startswith('@'):
tag, data = line.lstrip().split(maxsplit=1)
if tag in ['@operation', '@summary', '@description', '@param', '@return_type', '@tag']:
# store the current data
store_tag(current_tag, current_data)
current_tag = tag.lstrip('@')
current_data = ''
line = data
else:
self.error('Unknown tag {}, ignoring'.format(tag))
current_data += line + '\n'
store_tag(current_tag, current_data)
@property
def summary(self):
if 'summary' in self._doc:
# new lines are not allowed
return self._doc['summary'].replace('\n', ' ')
return None
def doc_param(self, name):
if 'params' in self._doc and name in self._doc['params']:
return self._doc['params'][name]
return None, None, None
def print_openapi_param(self, name, indent):
ptype, poptional, pdesc = self.doc_param(name)
if pdesc is not None:
print('{}description: |'.format(' ' * indent))
print('{}{}'.format(' ' * (indent + 2), pdesc))
else:
print('{}description: the {} value'.format(' ' * indent, name))
if ptype is not None:
print('{}type: {}'.format(' ' * indent, ptype))
else:
print('{}type: string'.format(' ' * indent))
if poptional:
print('{}required: false'.format(' ' * indent))
else:
print('{}required: true'.format(' ' * indent))
@property
def operationId(self):
if 'operation' in self._doc:
return self._doc['operation']
return '{}_{}'.format(self.method_name, self.reduced_function_name)
@property
def description(self):
if 'description' in self._doc:
return self._doc['description']
return None
@property
def returns(self):
if 'return_type' in self._doc:
return self._doc['return_type']
return None
@property
def tags(self):
tags = []
if self.schema.fields is not None:
tags.append(self.schema.name)
if 'tag' in self._doc:
tags.extend(self._doc['tag'])
return tags
def print_openapi_return(self, obj, indent):
if isinstance(obj, dict):
print('{}type: object'.format(' ' * indent))
print('{}properties:'.format(' ' * indent))
for k, v in obj.items():
print('{}{}:'.format(' ' * (indent + 2), k))
self.print_openapi_return(v, indent + 4)
elif isinstance(obj, list):
if len(obj) > 1:
self.error('Error while parsing @return tag, an array should have only one type')
print('{}type: array'.format(' ' * indent))
print('{}items:'.format(' ' * indent))
self.print_openapi_return(obj[0], indent + 2)
elif isinstance(obj, str) or isinstance(obj, unicode):
rtype = 'type: ' + obj
if obj == self.schema.name:
rtype = '$ref: "#/definitions/{}"'.format(obj)
print('{}{}'.format(' ' * indent, rtype))
def print_openapi(self):
parameters = [token[1:-2] if token.endswith('Id') else token[1:]
for token in self.path.split('/')
if token.startswith(':')]
print(' {}:'.format(self.method_name))
print(' operationId: {}'.format(self.operationId))
if self.summary is not None:
print(' summary: {}'.format(self.summary))
if self.description is not None:
print(' description: |')
for line in self.description.split('\n'):
if line.strip():
print(' {}'.format(line))
else:
print('')
if len(self.tags) > 0:
print(' tags:')
for tag in self.tags:
print(' - {}'.format(tag))
# export the parameters
if self.method_name in ('post', 'put'):
print(''' consumes:
- multipart/form-data
- application/json''')
if len(parameters) > 0 or self.method_name in ('post', 'put'):
print(' parameters:')
if self.method_name in ('post', 'put'):
for f in self.body_params:
print(''' - name: {}
in: formData'''.format(f))
self.print_openapi_param(f, 10)
for p in parameters:
if p in self.body_params:
self.error(' '.join((p, self.path, self.method_name)))
print(''' - name: {}
in: path'''.format(p))
self.print_openapi_param(p, 10)
print(''' produces:
- application/json
security:
- UserSecurity: []
responses:
'200':
description: |-
200 response''')
if self.returns is not None:
print(' schema:')
self.print_openapi_return(self.returns, 12)
class SchemaProperty(object):
def __init__(self, statement, schema):
self.schema = schema
self.statement = statement
self.name = statement.key.name or statement.key.value
self.type = 'object'
self.blackbox = False
self.required = True
for p in statement.value.properties:
if p.key.name == 'type':
if p.value.type == 'Identifier':
self.type = p.value.name.lower()
elif p.value.type == 'ArrayExpression':
self.type = 'array'
self.elements = [e.name.lower() for e in p.value.elements]
elif p.key.name == 'allowedValues':
self.type = 'enum'
self.enum = [e.value.lower() for e in p.value.elements]
elif p.key.name == 'blackbox':
self.blackbox = True
elif p.key.name == 'optional' and p.value.value:
self.required = False
self._doc = None
self._raw_doc = None
@property
def doc(self):
return self._doc
@doc.setter
def doc(self, jsdoc):
self._raw_doc = jsdoc
self._doc = cleanup_jsdocs(jsdoc)
def process_jsdocs(self, jsdocs):
start = self.statement.key.loc.start.line
for index, doc in enumerate(jsdocs):
if start + 1 == doc.loc.start.line:
self.doc = doc
jsdocs.pop(index)
return
def __repr__(self):
return 'SchemaProperty({}{}, {})'.format(self.name,
'*' if self.required else '',
self.doc)
def print_openapi(self, indent, current_schema, required_properties):
schema_name = self.schema.name
name = self.name
# deal with subschemas
if '.' in name:
if name.endswith('$'):
# reference in reference
subschema = ''.join([n.capitalize() for n in self.name.split('.')[:-1]])
subschema = self.schema.name + subschema
if current_schema != subschema:
if required_properties is not None and required_properties:
print(' required:')
for f in required_properties:
print(' - {}'.format(f))
required_properties.clear()
print(''' {}:
type: object'''.format(subschema))
return current_schema
subschema = name.split('.')[0]
schema_name = self.schema.name + subschema.capitalize()
name = name.split('.')[-1]
if current_schema != schema_name:
if required_properties is not None and required_properties:
print(' required:')
for f in required_properties:
print(' - {}'.format(f))
required_properties.clear()
print(''' {}:
type: object
properties:'''.format(schema_name))
if required_properties is not None and self.required:
required_properties.append(name)
print('{}{}:'.format(' ' * indent, name))
if self.doc is not None:
print('{} description: |'.format(' ' * indent))
for line in self.doc:
if line.strip():
print('{} {}'.format(' ' * indent, line))
else:
print('')
ptype = self.type
if ptype in ('enum', 'date'):
ptype = 'string'
if ptype != 'object':
print('{} type: {}'.format(' ' * indent, ptype))
if self.type == 'array':
print('{} items:'.format(' ' * indent))
for elem in self.elements:
if elem == 'object':
print('{} $ref: "#/definitions/{}"'.format(' ' * indent, schema_name + name.capitalize()))
else:
print('{} type: {}'.format(' ' * indent, elem))
if not self.required:
print('{} x-nullable: true'.format(' ' * indent))
elif self.type == 'object':
if self.blackbox:
print('{} type: object'.format(' ' * indent))
else:
print('{} $ref: "#/definitions/{}"'.format(' ' * indent, schema_name + name.capitalize()))
elif self.type == 'enum':
print('{} enum:'.format(' ' * indent))
for enum in self.enum:
print('{} - {}'.format(' ' * indent, enum))
if '.' not in self.name and not self.required:
print('{} x-nullable: true'.format(' ' * indent))
return schema_name
class Schemas(object):
def __init__(self, data=None, jsdocs=None, name=None):
self.name = name
self._data = data
self.fields = None
self.used = False
if data is not None:
if self.name is None:
self.name = data.expression.callee.object.name
content = data.expression.arguments[0].arguments[0]
self.fields = [SchemaProperty(p, self) for p in content.properties]
self._doc = None
self._raw_doc = None
if jsdocs is not None:
self.process_jsdocs(jsdocs)
@property
def doc(self):
if self._doc is None:
return None
return ' '.join(self._doc)
@doc.setter
def doc(self, jsdoc):
self._raw_doc = jsdoc
self._doc = cleanup_jsdocs(jsdoc)
def process_jsdocs(self, jsdocs):
start = self._data.loc.start.line
end = self._data.loc.end.line
for doc in jsdocs:
if doc.loc.end.line + 1 == start:
self.doc = doc
docs = [doc
for doc in jsdocs
if doc.loc.start.line >= start and doc.loc.end.line <= end]
for field in self.fields:
field.process_jsdocs(docs)
def print_openapi(self):
# empty schemas are skipped
if self.fields is None:
return
print(' {}:'.format(self.name))
print(' type: object')
if self.doc is not None:
print(' description: {}'.format(self.doc))
print(' properties:')
# first print out the object itself
properties = [field for field in self.fields if '.' not in field.name]
for prop in properties:
prop.print_openapi(6, None, None)
required_properties = [f.name for f in properties if f.required]
if required_properties:
print(' required:')
for f in required_properties:
print(' - {}'.format(f))
# then print the references
current = None
required_properties = []
properties = [f for f in self.fields if '.' in f.name and not f.name.endswith('$')]
for prop in properties:
current = prop.print_openapi(6, current, required_properties)
if required_properties:
print(' required:')
for f in required_properties:
print(' - {}'.format(f))
required_properties = []
# then print the references in the references
for prop in [f for f in self.fields if '.' in f.name and f.name.endswith('$')]:
current = prop.print_openapi(6, current, required_properties)
if required_properties:
print(' required:')
for f in required_properties:
print(' - {}'.format(f))
def parse_schemas(schemas_dir):
schemas = {}
entry_points = []
for root, dirs, files in os.walk(schemas_dir):
files.sort()
for filename in files:
path = os.path.join(root, filename)
with open(path) as f:
data = ''.join(f.readlines())
try:
# if the file failed, it's likely it doesn't contain a schema
program = esprima.parseScript(data, options={'comment': True, 'loc': True})
except:
continue
current_schema = None
jsdocs = [c for c in program.comments
if c.type == 'Block' and c.value.startswith('*\n')]
for statement in program.body:
# find the '<ITEM>.attachSchema(new SimpleSchema(<data>)'
# those are the schemas
if (statement.type == 'ExpressionStatement' and
statement.expression.callee is not None and
statement.expression.callee.property is not None and
statement.expression.callee.property.name == 'attachSchema' and
statement.expression.arguments[0].type == 'NewExpression' and
statement.expression.arguments[0].callee.name == 'SimpleSchema'):
schema = Schemas(statement, jsdocs)
current_schema = schema.name
schemas[current_schema] = schema
# find all the 'if (Meteor.isServer) { JsonRoutes.add('
# those are the entry points of the API
elif (statement.type == 'IfStatement' and
statement.test.type == 'MemberExpression' and
statement.test.object.name == 'Meteor' and
statement.test.property.name == 'isServer'):
data = [s.expression.arguments
for s in statement.consequent.body
if (s.type == 'ExpressionStatement' and
s.expression.type == 'CallExpression' and
s.expression.callee.object.name == 'JsonRoutes')]
# we found at least one entry point, keep them
if len(data) > 0:
if current_schema is None:
current_schema = filename
schemas[current_schema] = Schemas(name=current_schema)
schema_entry_points = [EntryPoint(schemas[current_schema], d)
for d in data]
entry_points.extend(schema_entry_points)
# try to match JSDoc to the operations
for entry_point in schema_entry_points:
operation = entry_point.method # POST/GET/PUT/DELETE
jsdoc = [j for j in jsdocs
if j.loc.end.line + 1 == operation.loc.start.line]
if bool(jsdoc):
entry_point.doc = jsdoc[0]
return schemas, entry_points
def generate_openapi(schemas, entry_points, version):
print('''swagger: '2.0'
info:
title: Wekan REST API
version: {0}
description: |
The REST API allows you to control and extend Wekan with ease.
If you are an end-user and not a dev or a tester, [create an issue](https://github.com/wekan/wekan/issues/new) to request new APIs.
> All API calls in the documentation are made using `curl`. However, you are free to use Java / Python / PHP / Golang / Ruby / Swift / Objective-C / Rust / Scala / C# or any other programming languages.
# Production Security Concerns
When calling a production Wekan server, ensure it is running via HTTPS and has a valid SSL Certificate. The login method requires you to post your username and password in plaintext, which is why we highly suggest only calling the REST login api over HTTPS. Also, few things to note:
* Only call via HTTPS
* Implement a timed authorization token expiration strategy
* Ensure the calling user only has permissions for what they are calling and no more
schemes:
- http
securityDefinitions:
UserSecurity:
type: apiKey
in: header
name: Authorization
paths:
/users/login:
post:
operationId: login
summary: Login with REST API
consumes:
- application/x-www-form-urlencoded
- application/json
tags:
- Login
parameters:
- name: username
in: formData
required: true
description: |
Your username
type: string
- name: password
in: formData
required: true
description: |
Your password
type: string
format: password
responses:
200:
description: |-
Successful authentication
schema:
items:
properties:
id:
type: string
token:
type: string
tokenExpires:
type: string
400:
description: |
Error in authentication
schema:
items:
properties:
error:
type: number
reason:
type: string
default:
description: |
Error in authentication
/users/register:
post:
operationId: register
summary: Register with REST API
description: |
Notes:
- You will need to provide the token for any of the authenticated methods.
consumes:
- application/x-www-form-urlencoded
- application/json
tags:
- Login
parameters:
- name: username
in: formData
required: true
description: |
Your username
type: string
- name: password
in: formData
required: true
description: |
Your password
type: string
format: password
- name: email
in: formData
required: true
description: |
Your email
type: string
responses:
200:
description: |-
Successful registration
schema:
items:
properties:
id:
type: string
token:
type: string
tokenExpires:
type: string
400:
description: |
Error in registration
schema:
items:
properties:
error:
type: number
reason:
type: string
default:
description: |
Error in registration
'''.format(version))
# GET and POST on the same path are valid, we need to reshuffle the paths
# with the path as the sorting key
methods = {}
for ep in entry_points:
if ep.path not in methods:
methods[ep.path] = []
methods[ep.path].append(ep)
sorted_paths = list(methods.keys())
sorted_paths.sort()
for path in sorted_paths:
print(' {}:'.format(methods[path][0].url))
for ep in methods[path]:
ep.print_openapi()
print('definitions:')
for schema in schemas.values():
# do not export the objects if there is no API attached
if not schema.used:
continue
schema.print_openapi()
def main():
parser = argparse.ArgumentParser(description='Generate an OpenAPI 2.0 from the given JS schemas.')
script_dir = os.path.dirname(os.path.realpath(__file__))
parser.add_argument('--release', default='git-master', nargs=1,
help='the current version of the API, can be retrieved by running `git describe --tags --abbrev=0`')
parser.add_argument('dir', default='{}/../models'.format(script_dir), nargs='?',
help='the directory where to look for schemas')
args = parser.parse_args()
schemas, entry_points = parse_schemas(args.dir)
generate_openapi(schemas, entry_points, args.release[0])
if __name__ == '__main__':
main()
| libreboard/libreboard | openapi/generate_openapi.py | Python | mit | 32,085 |
import glob
import json
import os
from collections import OrderedDict
from functools import lru_cache, wraps
from itertools import chain
from mwapi import Session
from html import escape
import uglipyjs
from flask import current_app, request
from ..i18n import i18n
from ..util.wikimedia import host_from_dbname
from .responses import bad_request
class ParamError(Exception):
pass
def read_param(request, param, default=None, type=str):
try:
value = request.args.get(param, request.form.get(param, default))
return type(value.strip())
except (ValueError, TypeError) as e:
error = bad_request("Could not interpret {0}. {1}"
.format(param, str(e)))
raise ParamError(error)
def read_bar_split_param(request, param, default=None, type=str):
values = read_param(request, param, default=default)
if values is None:
return []
return [type(value) for value in values.split("|")]
def jsonp(func):
"""Wraps JSONified output for JSONP requests."""
@wraps(func)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', None)
if callback is not None:
data = str(func(*args, **kwargs).data)
content = str(callback) + '(' + data + ')'
mimetype = 'application/javascript'
return current_app.response_class(content, mimetype=mimetype)
else:
return func(*args, **kwargs)
return decorated_function
def static_file_path(path):
dir_name = os.path.dirname(os.path.abspath(__file__))
return os.path.join(dir_name, "static", path)
@lru_cache(128)
def read_javascript(static_paths, minify=False):
if minify:
return uglipyjs.compile(read_cat(static_paths))
else:
return read_cat(static_paths)
@lru_cache(128)
def minify_js(js_text):
return uglipyjs.compile(js_text)
@lru_cache(128)
def read_cat(static_paths):
return "".join(open(static_file_path(path)).read()
for path in static_paths)
def build_script_tags(static_paths, config):
return "".join('<script src="{0}"></script>'
.format(static_path(path, config))
for path in static_paths)
def build_style_tags(static_paths, config):
return "".join('<link rel="stylesheet" type="text/css" href="{0}" />'
.format(static_path(path, config))
for path in static_paths)
def app_path(path, config):
return path_join("/", config['wsgi']['application_root'], path)
def static_path(path, config):
if path[:4] == "http" or path[:2] == "//":
return path
elif path[:1] == "/":
return app_path(path[1:], config)
else:
return app_path(path_join("static", path), config)
def url_for(path):
return path_join(request.url_root, path)
def path_join(*path_parts):
path_parts = [path for path in path_parts if len(path) > 0]
if len(path_parts) == 0:
return ""
elif len(path_parts) == 1:
return path_parts[0]
else: # len(path_parts) >= 2
return "/".join(chain([path_parts[0].rstrip("/")],
(path.strip("/") for path in path_parts[1:-1]),
[path_parts[-1].lstrip("/")]))
def get_i18n_dir():
path = os.path.abspath(__file__)
path = os.path.dirname(path)
path = os.path.join(path, '../i18n/')
return path
def i18n_dict():
i18n_dir = get_i18n_dir()
i18n = {}
for lang_file_path in glob.glob(os.path.join(i18n_dir, "*.json")):
f = open(lang_file_path, 'r')
lang_i18n = json.load(f)
filename = os.path.basename(lang_file_path)
i18n[filename[:-5]] = OrderedDict(
sorted(lang_i18n.items(), key=lambda t: t[0]))
i18n = OrderedDict(sorted(i18n.items(), key=lambda t: t[0]))
return i18n
def pretty_json(data):
return json.dumps(data, ensure_ascii=False, sort_keys=True,
separators=(',', ': '), indent=8)
def build_maintenance_notice(request, config):
accept_header = request.headers.get('Accept-Language', "en;q=0")
accept_langs = accept_header.split(";")[0].split(",")
if 'maintenance_notice' in config['wikilabels']:
notice = config['wikilabels']['maintenance_notice']
ahref = '<a href="{0}">{0}</a>'.format(notice['url'])
return i18n("maintenance notice", accept_langs,
[notice['date'], ahref])
@lru_cache(500)
def get_user_info(user_id, host):
params = {
'action': 'query',
'meta': 'globaluserinfo',
'guiid': user_id
}
session = Session(host)
res = session.get(**params)
if 'error' in res:
return {}
return res.get('query', {}).get('globaluserinfo', {})
def parse_user_data(case, config):
# Batching is not possible, don't ask why
user_info = get_user_info(case['user'],
config['wikilabels']['central_auth_host'])
case['user_name'] = escape(user_info.get('name', ''))
if 'home' in user_info and case['user_name'] != '':
home_wiki = host_from_dbname(user_info['home'])
case['user_name'] = (
'<a href="https://{host}/wiki/User:{user__name}">'
'{user_name}</a>'.format(
host=home_wiki,
user__name=case['user_name'].replace(' ', '_'),
user_name=case['user_name']))
return case
| wiki-ai/wikilabels | wikilabels/wsgi/util.py | Python | mit | 5,467 |
# -*- coding: utf-8 -*-
#
# django-vote documentation build configuration file, created by
# sphinx-quickstart on Mon Jul 7 17:06:24 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-vote'
copyright = u'2014, shellfly'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-votedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'django-vote.tex', u'django-vote Documentation',
u'shellfly', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-vote', u'django-vote Documentation',
[u'shellfly'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'django-vote', u'django-vote Documentation',
u'shellfly', 'django-vote', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| hwbuluo/django-vote | docs/conf.py | Python | bsd-2-clause | 8,957 |
import cherrypy
from model.Note import Note
from model.Notebook import Notebook
from model.User import User
from Test_controller import Test_controller
class Test_root( Test_controller ):
def setUp( self ):
Test_controller.setUp( self )
self.notebook = Notebook.create( self.database.next_id( Notebook ), u"my notebook", trash_id = u"foo" )
self.database.save( self.notebook )
self.anon_notebook = Notebook.create( self.database.next_id( Notebook ), u"Luminotes" )
self.database.save( self.anon_notebook )
self.anon_note = Note.create(
self.database.next_id( Note ), u"<h3>my note</h3>",
notebook_id = self.anon_notebook.object_id,
)
self.database.save( self.anon_note )
self.login_note = Note.create(
self.database.next_id( Note ), u"<h3>login</h3>",
notebook_id = self.anon_notebook.object_id,
)
self.database.save( self.login_note )
self.blog_notebook = Notebook.create( self.database.next_id( Notebook ), u"Luminotes blog" )
self.database.save( self.blog_notebook )
self.blog_note = Note.create(
self.database.next_id( Note ), u"<h3>my blog entry</h3>",
notebook_id = self.blog_notebook.object_id,
)
self.database.save( self.blog_note )
self.guide_notebook = Notebook.create( self.database.next_id( Notebook ), u"Luminotes user guide" )
self.database.save( self.guide_notebook )
self.guide_note = Note.create(
self.database.next_id( Note ), u"<h3>it's all self-explanatory</h3>",
notebook_id = self.guide_notebook.object_id,
)
self.database.save( self.guide_note )
self.privacy_notebook = Notebook.create( self.database.next_id( Notebook ), u"Luminotes privacy policy" )
self.database.save( self.privacy_notebook )
self.privacy_note = Note.create(
self.database.next_id( Note ), u"<h3>yay privacy</h3>",
notebook_id = self.privacy_notebook.object_id,
)
self.database.save( self.privacy_note )
self.username = u"mulder"
self.password = u"trustno1"
self.email_address = u"outthere@example.com"
self.user = None
self.session_id = None
self.user = User.create( self.database.next_id( User ), self.username, self.password, self.email_address )
self.database.save( self.user )
self.database.execute( self.user.sql_save_notebook( self.notebook.object_id ) )
self.anonymous = User.create( self.database.next_id( User ), u"anonymous" )
self.database.save( self.anonymous )
self.database.execute( self.anonymous.sql_save_notebook( self.anon_notebook.object_id, read_write = False, owner = False, rank = 0 ) )
self.database.execute( self.anonymous.sql_save_notebook( self.blog_notebook.object_id, read_write = False, owner = False, rank = 1 ) )
self.database.execute( self.anonymous.sql_save_notebook( self.guide_notebook.object_id, read_write = False, owner = False, rank = 2 ) )
self.database.execute( self.anonymous.sql_save_notebook( self.privacy_notebook.object_id, read_write = False, owner = False, rank = 3 ) )
def test_index( self ):
result = self.http_get( "/" )
assert result
assert result.get( u"redirect" ) is None
assert result[ u"user" ].username == u"anonymous"
assert len( result[ u"notebooks" ] ) == 4
assert result[ u"first_notebook" ] == None
assert result[ u"login_url" ] == u"https://luminotes.com/notebooks/%s?note_id=%s" % (
self.anon_notebook.object_id, self.login_note.object_id,
)
assert result[ u"logout_url" ] == u"https://luminotes.com/users/logout"
assert result[ u"rate_plan" ]
def test_index_after_login_without_referer( self ):
self.login()
result = self.http_get(
"/",
session_id = self.session_id,
)
assert result
assert result.get( u"redirect" ) == u"https://luminotes.com/notebooks/%s" % self.notebook.object_id
def test_index_after_login_with_referer( self ):
self.login()
result = self.http_get(
"/",
headers = [ ( u"Referer", "http://whee" ) ],
session_id = self.session_id,
)
assert result
assert result.get( u"redirect" ) == u"https://luminotes.com/"
def test_index_with_https_after_login_without_referer( self ):
self.login()
result = self.http_get(
"/",
session_id = self.session_id,
pretend_https = True,
)
assert result
assert result.get( u"redirect" ) == u"https://luminotes.com/notebooks/%s" % self.notebook.object_id
def test_index_with_https_after_login_with_referer( self ):
self.login()
result = self.http_get(
"/",
session_id = self.session_id,
headers = [ ( u"Referer", "http://whee" ) ],
pretend_https = True,
)
assert result
assert result.get( u"redirect" ) is None
assert result[ u"user" ].username == self.user.username
assert len( result[ u"notebooks" ] ) == 1
assert result[ u"first_notebook" ].object_id == self.notebook.object_id
assert result[ u"login_url" ] == None
assert result[ u"logout_url" ] == u"https://luminotes.com/users/logout"
assert result[ u"rate_plan" ]
def test_index_auto_login( self ):
self.settings[ u"global" ][ u"luminotes.auto_login_username" ] = self.username
result = self.http_get(
"/",
)
assert result
assert result.get( u"redirect" ) == u"/notebooks/%s" % self.notebook.object_id
# confirm that we're now logged in and can access the user's notebook without an error
result = self.http_get(
result.get( u"redirect" ),
session_id = self.session_id,
)
assert u"error" not in result
def test_index_auto_login_while_already_logged_in( self ):
self.login()
self.settings[ u"global" ][ u"luminotes.auto_login_username" ] = self.username
result = self.http_get(
"/",
session_id = self.session_id,
)
assert result
assert result.get( u"redirect" ) == u"/notebooks/%s" % self.notebook.object_id
# confirm that we're now logged in and can access the user's notebook without an error
result = self.http_get(
result.get( u"redirect" ),
session_id = self.session_id,
)
assert u"error" not in result
def test_index_auto_login_with_unknown_username( self ):
self.settings[ u"global" ][ u"luminotes.auto_login_username" ] = u"unknownusername"
result = self.http_get(
"/",
)
assert result
assert result.get( u"redirect" ) is None
result = self.http_get(
u"/notebooks/%s" % self.notebook.object_id,
session_id = self.session_id,
)
assert result.get( "status" ) == "302 Found" # redirect to login page
def test_tour( self ):
result = self.http_get( u"/tour" )
assert result
assert result.get( u"redirect" ) is None
assert result[ u"user" ].username == u"anonymous"
assert len( result[ u"notebooks" ] ) == 4
assert result[ u"first_notebook" ] == None
assert result[ u"login_url" ] == u"https://luminotes.com/notebooks/%s?note_id=%s" % (
self.anon_notebook.object_id, self.login_note.object_id,
)
assert result[ u"logout_url" ] == u"https://luminotes.com/users/logout"
assert result[ u"rate_plan" ]
def test_take_a_tour( self ):
result = self.http_get( u"/take_a_tour" )
assert result
assert result.get( u"redirect" ) == u"/tour"
def test_tour_after_login( self ):
self.login()
result = self.http_get(
u"/tour",
session_id = self.session_id,
)
assert result
assert result.get( u"redirect" ) is None
assert result[ u"user" ].username == self.user.username
assert len( result[ u"notebooks" ] ) == 1
assert result[ u"first_notebook" ].object_id == self.notebook.object_id
assert result[ u"login_url" ] == None
assert result[ u"logout_url" ] == u"https://luminotes.com/users/logout"
assert result[ u"rate_plan" ]
def test_take_a_tour_after_login( self ):
self.login()
result = self.http_get(
u"/take_a_tour",
session_id = self.session_id,
)
assert result
assert result.get( u"redirect" ) == u"/tour"
def test_default( self ):
result = self.http_get(
"/my_note",
)
assert result
assert result[ u"notes" ]
assert len( result[ u"notes" ] ) == 1
assert result[ u"notes" ][ 0 ].object_id == self.anon_note.object_id
assert result[ u"notebook" ].object_id == self.anon_notebook.object_id
assert result[ u"user" ].object_id == self.anonymous.object_id
def test_default_with_invite_id( self ):
result = self.http_get(
"/my_note?invite_id=whee",
)
assert result
assert result[ u"notes" ]
assert len( result[ u"notes" ] ) == 1
assert result[ u"notes" ][ 0 ].object_id == self.anon_note.object_id
assert result[ u"notebook" ].object_id == self.anon_notebook.object_id
assert result[ u"invite_id" ] == u"whee"
assert result[ u"user" ].object_id == self.anonymous.object_id
def test_default_with_after_login( self ):
after_login = "/foo/bar"
result = self.http_get(
"/my_note?after_login=%s" % after_login,
)
assert result
assert result[ u"notes" ]
assert len( result[ u"notes" ] ) == 1
assert result[ u"notes" ][ 0 ].object_id == self.anon_note.object_id
assert result[ u"notebook" ].object_id == self.anon_notebook.object_id
assert result[ u"after_login" ] == after_login
assert result[ u"user" ].object_id == self.anonymous.object_id
def test_default_with_after_login_with_full_url( self ):
after_login = "http://example.com/foo/bar"
result = self.http_get(
"/my_note?after_login=%s" % after_login,
)
assert result
assert result[ u"notes" ]
assert len( result[ u"notes" ] ) == 1
assert result[ u"notes" ][ 0 ].object_id == self.anon_note.object_id
assert result[ u"notebook" ].object_id == self.anon_notebook.object_id
assert result.get( u"after_login" ) is None
assert result[ u"user" ].object_id == self.anonymous.object_id
def test_default_with_plan( self ):
plan = u"17"
result = self.http_get(
"/my_note?plan=%s" % plan,
)
assert result
assert result[ u"notes" ]
assert len( result[ u"notes" ] ) == 1
assert result[ u"notes" ][ 0 ].object_id == self.anon_note.object_id
assert result[ u"notebook" ].object_id == self.anon_notebook.object_id
assert result[ u"signup_plan" ] == 17
assert result[ u"user" ].object_id == self.anonymous.object_id
def test_default_with_plan_and_yearly( self ):
plan = u"17"
result = self.http_get(
"/my_note?plan=%s&yearly=True" % plan,
)
assert result
assert result[ u"notes" ]
assert len( result[ u"notes" ] ) == 1
assert result[ u"notes" ][ 0 ].object_id == self.anon_note.object_id
assert result[ u"notebook" ].object_id == self.anon_notebook.object_id
assert result[ u"signup_plan" ] == 17
assert result[ u"user" ].object_id == self.anonymous.object_id
def test_default_after_login( self ):
self.login()
result = self.http_get(
"/my_note",
session_id = self.session_id,
)
assert result
assert result[ u"notes" ]
assert len( result[ u"notes" ] ) == 1
assert result[ u"notes" ][ 0 ].object_id == self.anon_note.object_id
assert result[ u"notebook" ].object_id == self.anon_notebook.object_id
assert result[ u"user" ].object_id == self.user.object_id
def test_default_with_unknown_note( self ):
result = self.http_get(
"/unknown_note",
)
body = result.get( u"body" )
assert body
assert len( body ) > 0
assert u"404" in body[ 0 ]
def test_default_with_login_note( self ):
result = self.http_get(
"/login",
)
assert result
assert result.get( "redirect" )
assert result.get( "redirect" ).startswith( "https://" )
def test_default_with_sign_up_note( self ):
result = self.http_get(
"/sign_up",
)
assert result
assert result.get( "redirect" )
assert result.get( "redirect" ).startswith( "https://" )
def test_guide( self ):
result = self.http_get(
"/guide",
)
assert result
assert u"error" not in result
assert result[ u"notebook" ].object_id == self.guide_notebook.object_id
def test_guide_with_note_id( self ):
result = self.http_get(
"/guide?note_id=%s" % self.guide_note.object_id,
)
assert result
assert u"error" not in result
assert result[ u"notebook" ].object_id == self.guide_notebook.object_id
def test_privacy( self ):
result = self.http_get(
"/privacy",
)
assert result
assert u"error" not in result
assert result[ u"notebook" ].object_id == self.privacy_notebook.object_id
def test_pricing( self ):
result = self.http_get( "/pricing" )
assert result[ u"user" ].username == u"anonymous"
assert len( result[ u"notebooks" ] ) == 4
notebook = [ notebook for notebook in result[ u"notebooks" ] if notebook.object_id == self.anon_notebook.object_id ][ 0 ]
assert notebook.object_id == self.anon_notebook.object_id
assert notebook.name == self.anon_notebook.name
assert notebook.read_write == Notebook.READ_ONLY
assert notebook.owner == False
rate_plan = result[ u"rate_plan" ]
assert rate_plan
assert rate_plan[ u"name" ] == u"super"
assert rate_plan[ u"storage_quota_bytes" ] == 1337 * 10
assert result[ u"first_notebook" ] == None
assert result[ u"rate_plans" ] == self.settings[ u"global" ].get( u"luminotes.rate_plans", [] )
assert result[ u"unsubscribe_button" ] == self.settings[ u"global" ].get( u"luminotes.unsubscribe_button" )
def test_pricing_after_login( self ):
self.login()
result = self.http_get( "/pricing", session_id = self.session_id )
assert result[ u"user" ].username == self.username
assert len( result[ u"notebooks" ] ) == 1
notebook = [ notebook for notebook in result[ u"notebooks" ] if notebook.object_id == self.notebook.object_id ][ 0 ]
assert notebook.object_id == self.notebook.object_id
assert notebook.name == self.notebook.name
assert notebook.read_write == Notebook.READ_WRITE
assert notebook.owner == True
rate_plan = result[ u"rate_plan" ]
assert rate_plan
assert rate_plan[ u"name" ] == u"super"
assert rate_plan[ u"storage_quota_bytes" ] == 1337 * 10
assert result[ u"first_notebook" ].object_id == self.notebook.object_id
assert result[ u"rate_plans" ] == self.settings[ u"global" ].get( u"luminotes.rate_plans", [] )
assert result[ u"unsubscribe_button" ] == self.settings[ u"global" ].get( u"luminotes.unsubscribe_button" )
def upgrade( self ):
result = self.http_get( "/upgrade" )
assert result[ u"redirect" ] == u"/pricing"
def test_next_id( self ):
result = self.http_get( "/next_id" )
assert result.get( "next_id" )
result = self.http_get( "/next_id" )
assert result.get( "next_id" )
def test_ping( self ):
result = self.http_get( "/ping" )
assert result.get( "response" ) == u"pong"
def test_shutdown( self ):
self.settings[ u"global" ][ u"luminotes.allow_shutdown_command" ] = True
assert cherrypy.server._is_ready() is True
result = self.http_get( "/shutdown" )
assert cherrypy.server._is_ready() is False
def test_shutdown_disallowed_explicitly( self ):
self.settings[ u"global" ][ u"luminotes.allow_shutdown_command" ] = False
assert cherrypy.server._is_ready() is True
result = self.http_get( "/shutdown" )
assert cherrypy.server._is_ready() is True
def test_shutdown_disallowed_implicitly( self ):
assert cherrypy.server._is_ready() is True
result = self.http_get( "/shutdown" )
assert cherrypy.server._is_ready() is True
def test_404( self ):
result = self.http_get( "/four_oh_four" )
body = result.get( u"body" )
assert body
assert len( body ) > 0
assert u"404" in body[ 0 ]
status = result.get( u"status" )
assert u"404" in status
headers = result.get( u"headers" )
status = headers.get( u"status" )
assert u"404" in status
def login( self ):
result = self.http_post( "/users/login", dict(
username = self.username,
password = self.password,
login_button = u"login",
) )
self.session_id = result[ u"session_id" ]
def test_redeem_reset( self ):
redeem_reset_id = u"foobarbaz"
result = self.http_get( "/r/%s" % redeem_reset_id )
assert result[ u"redirect" ] == u"/users/redeem_reset/%s" % redeem_reset_id
def test_redeem_invite( self ):
invite_id = u"foobarbaz"
result = self.http_get( "/i/%s" % invite_id )
assert result[ u"redirect" ] == u"/users/redeem_invite/%s" % invite_id
def test_download_thanks( self ):
download_access_id = u"foobarbaz"
result = self.http_get( "/d/%s" % download_access_id )
assert result[ u"redirect" ] == u"/users/thanks_download?access_id=%s" % download_access_id
| osborne6/luminotes | controller/test/Test_root.py | Python | gpl-3.0 | 16,962 |
'''
Created on 2015年12月1日
https://leetcode.com/problems/flatten-binary-tree-to-linked-list/
@author: Darren
'''
class BTNode(object):
def __init__(self,value):
self.left=None
self.right=None
self.value=value
def __repr__(self):
return str(self.value)
class Solution(object):
def __init__(self):
self.pre=None
def flatten(self, root):
"""
:type root: TreeNode
:rtype: void Do not return anything, modify root in-place instead.
"""
if not root:
return
rightNode=root.right
if not self.pre:
self.pre=root
else:
self.pre.left=None
self.pre.right=root
self.pre=root
self.flatten(root.left)
self.flatten(rightNode)
| darrencheng0817/AlgorithmLearning | Python/interview/practiceTwice/BinaryTree2LinkedList.py | Python | mit | 814 |
# -*- coding: utf-8 -*-
# Copyright (c) 2018 Alexander Ezquevo <alexander@acysos.com>
# Copyright (c) 2018 Ignacio Ibeas Izquierdo <ignacio@acysos.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, fields
class res_company(models.Model):
_inherit = 'res.company'
payroll_journal = fields.Many2one(
comodel_name='account.journal', string='Payroll journal', required=True)
move_confirm = fields.Boolean(string='Auto move confirm?')
payroll_payment_mode = fields.Many2one(
comodel_name='account.payment.mode', string='Payroll Payment Mode')
| acysos/odoo-addons | account_payroll_import_base/models/company.py | Python | agpl-3.0 | 616 |
from __future__ import print_function, absolute_import
from .pip import Pip
from .pip3 import Pip3
from .pip_pypy import PipPypy
from .apt import Apt
from .bower import Bower
from .npm import Npm
from .npmg import NpmG
from .yarn import Yarn
from .yarng import YarnG
from .tsd import Tsd
from .typings import Typings
from .exceptions import UnknownManager
from .private.pip import PrivatePip
from .private.pip3 import PrivatePip3
MANAGER_MAP = {
'pip': Pip,
'pip3': Pip3,
'pip_pypy': PipPypy,
'sys': Apt,
'npm': Npm,
'npmg': NpmG,
'yarn': Yarn,
'yarng': YarnG,
'bower': Bower,
'typings': Typings,
'tsd': Tsd
}
PRIVATE_MANAGER_MAP = {
'pip': PrivatePip,
'pip3': PrivatePip3
}
def manager_key_to_cappa(manager_key):
if manager_key in MANAGER_MAP:
return MANAGER_MAP[manager_key]
else:
raise UnknownManager('{} is not a supported manager.'.format(manager_key))
def private_manager_key_to_cappa(manager_key):
if manager_key in PRIVATE_MANAGER_MAP:
return PRIVATE_MANAGER_MAP[manager_key]
else:
raise UnknownManager('{} is not a supported private repo manager.'.format(manager_key))
| Captricity/cappa | cappa/factory.py | Python | mit | 1,186 |
import xml.etree.ElementTree as ET
# #############
# XML related helper functions
def nspace(element_name, mask=False):
name_space = '''http://www.collada.org/2005/11/COLLADASchema'''
if mask:
return '{{' + name_space + '}}' + element_name
else:
return '{' + name_space + '}' + element_name
# #############
# XPATH queries
xp_scenes = nspace('scene') \
+ '/' + nspace('instance_visual_scene')
xp_scene_nodes = nspace('library_visual_scenes', True) \
+ '/' + nspace('visual_scene', True) \
+ '[@id=\'{scene_url}\']' \
+ '/' + nspace('node', True)
xp_subnodes = nspace('node')
xp_instance_geometry = nspace('instance_geometry')
xp_geometry = nspace('library_geometries', True) \
+ '/' + nspace('geometry', True) \
+ '[@id=\'{geometry_url}\']' \
+ '/' + nspace('mesh', True)
xp_node_location = nspace('translate') \
+ '[@sid=\'location\']'
xp_node_rotation = nspace('rotate')
xp_node_scale = nspace('scale') \
+ '[@sid=\'scale\']'
xp_source = nspace('source', True) \
+ '[@id=\'{source_url}\']'
xp_source_accessor = nspace('technique_common') \
+ '/' + nspace('accessor')
xp_source_accessor_params = nspace('param')
xp_geometry_vertices_input = nspace('vertices') \
+ '/' + nspace('input') \
+ '[@semantic=\'POSITION\']'
# ############
# Helper functions
def parse_value_sequence(string_sequence, casters):
result = []
s = string_sequence.split()
if len(s) > 0:
val_group = None
for idx, value in enumerate(s):
if idx % len(casters) == 0:
if val_group:
# print("xxx " + str(val_group))
result.append(val_group)
val_group = []
val_group.append(casters[idx % len(casters)](value))
if val_group:
# print("xxx " + str(val_group))
result.append(val_group)
# print("xxx " + str(casters) + " xxx " + str(s) + " yyy " + str(result))
return result
def read_source(source_node):
type_casters = {
'float': float,
'name': str,
}
accessor = source_node.find(xp_source_accessor)
if accessor is None:
return None
source_url = accessor.attrib['source'][1:]
values_node = source_node.find('./*[@id=\'{}\']'.format(source_url))
if values_node is None:
return None
accessor = source_node.find(xp_source_accessor)
if accessor is None:
return None
casters = []
names = []
for param in accessor.findall(xp_source_accessor_params):
casters.append(type_casters[param.attrib['type']])
names.append(param.attrib['name'].lower())
values = parse_value_sequence(values_node.text, casters)
return names, values
# ############
# Parsing
def parse_collada_file(file_path):
tree = ET.parse(file_path)
collada_root = tree.getroot()
return collada_root
def dae_object_from_node(node, collada_root, parent_object=None):
name = node.attrib.get('name', None)
scene_id = node.attrib.get('id', None)
node_type = node.attrib.get('type', None)
dae_obj = DaeObject(scene_id, name)
if parent_object is not None:
dae_obj.parent = parent_object
loc = node.find(xp_node_location)
if loc is not None:
dae_obj.pos = parse_value_sequence(loc.text, [float, float, float])[0]
scale = node.find(xp_node_scale)
if scale is not None:
dae_obj.scale = parse_value_sequence(scale.text, [float, float, float])[0]
rotations = node.findall(xp_node_rotation)
if len(rotations) > 0:
rot = [0, 0, 0]
for rotation in rotations:
val = float(rotation.text.split()[3])
axis = rotation.attrib['sid'][-1]
if axis.lower() == 'x':
rot[0] = val
elif axis.lower() == 'y':
rot[1] = val
elif axis.lower() == 'z':
rot[2] = val
dae_obj.rot = rot
instance_geometry = node.find(xp_instance_geometry)
if instance_geometry is not None:
url = instance_geometry.attrib['url'][1:]
dae_obj.mesh = get_mesh_for_node(url, collada_root)
return dae_obj
def dae_mesh_from_geometry(geometry_node, collada_root):
vertices_input = geometry_node.find(xp_geometry_vertices_input)
if vertices_input is None:
return None
source_node = geometry_node.find(xp_source.format(source_url=vertices_input.attrib['source'][1:]))
names, values = read_source(source_node)
dae_mesh = DaeMesh()
dae_mesh.vertices = values
return dae_mesh
def get_sub_nodes(node, parent_object, dae_objects, collada_root):
for sub_node in node.findall(xp_subnodes):
dae_obj = dae_object_from_node(sub_node, collada_root, parent_object)
dae_objects[dae_obj.scene_id] = dae_obj
get_sub_nodes(sub_node, dae_obj, dae_objects, collada_root)
def get_mesh_for_node(url, collada_root):
mesh_node = collada_root.find(xp_geometry.format(geometry_url=url))
if mesh_node is None:
return None
return dae_mesh_from_geometry(mesh_node, collada_root)
def get_scene_objects(collada_root, scene_url):
result = {}
nodes = collada_root.findall(
xp_scene_nodes.format(
scene_url=scene_url
)
)
for node in nodes:
dae_obj = dae_object_from_node(node, collada_root)
result[dae_obj.scene_id] = dae_obj
get_sub_nodes(node, dae_obj, result, collada_root)
return result
# ############
# DAE object
class DaeObject(object):
def __init__(self, scene_id, name):
self._scene_id = scene_id
self._name = name
self._pos = [0, 0, 0]
self._rot = [0, 0, 0]
self._scale = [1, 1, 1]
self._parent = None
self._mesh = None
@property
def name(self):
return self._name
@property
def scene_id(self):
return self._scene_id
@property
def pos(self):
return self._pos
@pos.setter
def pos(self, value):
self._pos = value
@property
def rot(self):
return self._rot
@rot.setter
def rot(self, value):
self._rot = value
@property
def scale(self):
return self._scale
@scale.setter
def scale(self, value):
self._scale = value
@property
def parent(self):
return self._parent
@parent.setter
def parent(self, value):
self._parent = value
@property
def mesh(self):
return self._mesh
@mesh.setter
def mesh(self, value):
self._mesh = value
def __repr__(self):
if self._parent is not None:
return '{} - {} - [{:.8g},{:.8g},{:.8g}] - {}'.format(self._name, self._parent.name, self.pos[0],
self.pos[1],
self.pos[2], self._mesh)
else:
return '{} - None - [{:.8g},{:.8g},{:.8g}] - {}'.format(self._name, self.pos[0], self.pos[1], self.pos[2],
self._mesh)
class DaeMesh(object):
def __init__(self):
self._vertices = None
@property
def vertices(self):
return self._vertices
@vertices.setter
def vertices(self, value):
self._vertices = value
class DaeAnimation(object):
@property
def positions(self):
pass
@property
def rotations(self):
pass
@property
def scalings(self):
pass
class KeyFrame(object):
@property
def value(self):
return self._value
@property
def time(self):
return self._time
@property
def frame_type(self):
return "LINEAR"
| grungypolygon/posertools-daesupplement | GrungyPolygon/collada.py | Python | apache-2.0 | 7,974 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A TFGAN-backed GAN Estimator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import enum
from tensorflow.contrib.framework.python.ops import variables as variable_lib
from tensorflow.contrib.gan.python import namedtuples as tfgan_tuples
from tensorflow.contrib.gan.python import train as tfgan_train
from tensorflow.contrib.gan.python.eval.python import summaries as tfgan_summaries
from tensorflow.python.estimator import estimator
from tensorflow.python.estimator import model_fn as model_fn_lib
from tensorflow.python.framework import ops
from tensorflow.python.ops import metrics as metrics_lib
from tensorflow.python.ops import variable_scope
from tensorflow.python.util import tf_inspect as inspect
__all__ = [
'GANEstimator',
'SummaryType'
]
class SummaryType(enum.IntEnum):
NONE = 0
VARIABLES = 1
IMAGES = 2
IMAGE_COMPARISON = 3
_summary_type_map = {
SummaryType.VARIABLES: tfgan_summaries.add_gan_model_summaries,
SummaryType.IMAGES: tfgan_summaries.add_gan_model_image_summaries,
SummaryType.IMAGE_COMPARISON: tfgan_summaries.add_image_comparison_summaries, # pylint:disable=line-too-long
}
# TODO(joelshor): For now, this only supports 1:1 generator:discriminator
# training sequentially. Find a nice way to expose options to the user without
# exposing internals.
class GANEstimator(estimator.Estimator):
"""An estimator for Generative Adversarial Networks (GANs).
This Estimator is backed by TFGAN. The network functions follow the TFGAN API
except for one exception: if either `generator_fn` or `discriminator_fn` have
an argument called `mode`, then the tf.Estimator mode is passed in for that
argument. This helps with operations like batch normalization, which have
different train and evaluation behavior.
Example:
```python
import tensorflow as tf
tfgan = tf.contrib.gan
# See TFGAN's `train.py` for a description of the generator and
# discriminator API.
def generator_fn(generator_inputs):
...
return generated_data
def discriminator_fn(data, conditioning):
...
return logits
# Create GAN estimator.
gan_estimator = tfgan.estimator.GANEstimator(
model_dir,
generator_fn=generator_fn,
discriminator_fn=discriminator_fn,
generator_loss_fn=tfgan.losses.wasserstein_generator_loss,
discriminator_loss_fn=tfgan.losses.wasserstein_discriminator_loss,
generator_optimizer=tf.train.AdamOptimizer(0.1, 0.5),
discriminator_optimizer=tf.train.AdamOptimizer(0.1, 0.5))
# Train estimator.
gan_estimator.train(train_input_fn, steps)
# Evaluate resulting estimator.
gan_estimator.evaluate(eval_input_fn)
# Generate samples from generator.
predictions = np.array([
x for x in gan_estimator.predict(predict_input_fn)])
```
"""
def __init__(self,
model_dir=None,
generator_fn=None,
discriminator_fn=None,
generator_loss_fn=None,
discriminator_loss_fn=None,
generator_optimizer=None,
discriminator_optimizer=None,
get_hooks_fn=None,
get_eval_metric_ops_fn=None,
add_summaries=None,
use_loss_summaries=True,
config=None):
"""Initializes a GANEstimator instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator
to continue training a previously saved model.
generator_fn: A python function that takes a Tensor, Tensor list, or
Tensor dictionary as inputs and returns the outputs of the GAN
generator. See `TFGAN` for more details and examples. Additionally, if
it has an argument called `mode`, the Estimator's `mode` will be passed
in (ex TRAIN, EVAL, PREDICT). This is useful for things like batch
normalization.
discriminator_fn: A python function that takes the output of
`generator_fn` or real data in the GAN setup, and `generator_inputs`.
Outputs a Tensor in the range [-inf, inf]. See `TFGAN` for more details
and examples.
generator_loss_fn: The loss function on the generator. Takes a `GANModel`
tuple.
discriminator_loss_fn: The loss function on the discriminator. Takes a
`GANModel` tuple.
generator_optimizer: The optimizer for generator updates, or a function
that takes no arguments and returns an optimizer. This function will
be called when the default graph is the `GANEstimator`'s graph, so
utilities like `tf.contrib.framework.get_or_create_global_step` will
work.
discriminator_optimizer: Same as `generator_optimizer`, but for the
discriminator updates.
get_hooks_fn: A function that takes a `GANTrainOps` tuple and returns a
list of hooks. These hooks are run on the generator and discriminator
train ops, and can be used to implement the GAN training scheme.
Defaults to `train.get_sequential_train_hooks()`.
get_eval_metric_ops_fn: A function that takes a `GANModel`, and returns a
dict of metric results keyed by name. The output of this function is
passed into `tf.estimator.EstimatorSpec` during evaluation.
add_summaries: `None`, a single `SummaryType`, or a list of `SummaryType`.
use_loss_summaries: If `True`, add loss summaries. If `False`, does not.
If `None`, uses defaults.
config: `RunConfig` object to configure the runtime settings.
Raises:
ValueError: If loss functions aren't callable.
ValueError: If `use_loss_summaries` isn't boolean or `None`.
ValueError: If `get_hooks_fn` isn't callable or `None`.
"""
if not callable(generator_loss_fn):
raise ValueError('generator_loss_fn must be callable.')
if not callable(discriminator_loss_fn):
raise ValueError('discriminator_loss_fn must be callable.')
if use_loss_summaries not in [True, False, None]:
raise ValueError('use_loss_summaries must be True, False or None.')
if get_hooks_fn is not None and not callable(get_hooks_fn):
raise TypeError('get_hooks_fn must be callable.')
def _model_fn(features, labels, mode):
"""GANEstimator model function."""
if mode not in [model_fn_lib.ModeKeys.TRAIN, model_fn_lib.ModeKeys.EVAL,
model_fn_lib.ModeKeys.PREDICT]:
raise ValueError('Mode not recognized: %s' % mode)
real_data = labels # rename inputs for clarity
generator_inputs = features # rename inputs for clarity
# Make GANModel, which encapsulates the GAN model architectures.
gan_model = _get_gan_model(
mode, generator_fn, discriminator_fn, real_data, generator_inputs,
add_summaries)
# Make the EstimatorSpec, which incorporates the GANModel, losses, eval
# metrics, and optimizers (if required).
return _get_estimator_spec(
mode, gan_model, generator_loss_fn, discriminator_loss_fn,
get_eval_metric_ops_fn, generator_optimizer, discriminator_optimizer,
get_hooks_fn)
super(GANEstimator, self).__init__(
model_fn=_model_fn, model_dir=model_dir, config=config)
def _get_gan_model(
mode, generator_fn, discriminator_fn, real_data, generator_inputs,
add_summaries, generator_scope='Generator'):
"""Makes the GANModel tuple, which encapsulates the GAN model architecture."""
if mode == model_fn_lib.ModeKeys.PREDICT:
if real_data is not None:
raise ValueError('`labels` must be `None` when mode is `predict`. '
'Instead, found %s' % real_data)
gan_model = _make_prediction_gan_model(
generator_inputs, generator_fn, generator_scope)
else: # model_fn_lib.ModeKeys.TRAIN or model_fn_lib.ModeKeys.EVAL
gan_model = _make_gan_model(
generator_fn, discriminator_fn, real_data, generator_inputs,
generator_scope, add_summaries, mode)
return gan_model
def _get_estimator_spec(
mode, gan_model, generator_loss_fn, discriminator_loss_fn,
get_eval_metric_ops_fn, generator_optimizer, discriminator_optimizer,
get_hooks_fn=None):
"""Get the EstimatorSpec for the current mode."""
if mode == model_fn_lib.ModeKeys.PREDICT:
estimator_spec = model_fn_lib.EstimatorSpec(
mode=mode, predictions=gan_model.generated_data)
else:
gan_loss = tfgan_tuples.GANLoss(
generator_loss=generator_loss_fn(gan_model),
discriminator_loss=discriminator_loss_fn(gan_model))
if mode == model_fn_lib.ModeKeys.EVAL:
estimator_spec = _get_eval_estimator_spec(
gan_model, gan_loss, get_eval_metric_ops_fn)
else: # model_fn_lib.ModeKeys.TRAIN:
gopt = (generator_optimizer() if callable(generator_optimizer) else
generator_optimizer)
dopt = (discriminator_optimizer() if callable(discriminator_optimizer)
else discriminator_optimizer)
get_hooks_fn = get_hooks_fn or tfgan_train.get_sequential_train_hooks()
estimator_spec = _get_train_estimator_spec(
gan_model, gan_loss, gopt, dopt, get_hooks_fn)
return estimator_spec
def _make_gan_model(generator_fn, discriminator_fn, real_data,
generator_inputs, generator_scope, add_summaries, mode):
"""Construct a `GANModel`, and optionally pass in `mode`."""
# If network functions have an argument `mode`, pass mode to it.
if 'mode' in inspect.getargspec(generator_fn).args:
generator_fn = functools.partial(generator_fn, mode=mode)
if 'mode' in inspect.getargspec(discriminator_fn).args:
discriminator_fn = functools.partial(discriminator_fn, mode=mode)
gan_model = tfgan_train.gan_model(
generator_fn,
discriminator_fn,
real_data,
generator_inputs,
generator_scope=generator_scope,
check_shapes=False)
if add_summaries:
if not isinstance(add_summaries, (tuple, list)):
add_summaries = [add_summaries]
with ops.name_scope(None):
for summary_type in add_summaries:
_summary_type_map[summary_type](gan_model)
return gan_model
def _make_prediction_gan_model(generator_inputs, generator_fn, generator_scope):
"""Make a `GANModel` from just the generator."""
# If `generator_fn` has an argument `mode`, pass mode to it.
if 'mode' in inspect.getargspec(generator_fn).args:
generator_fn = functools.partial(generator_fn,
mode=model_fn_lib.ModeKeys.PREDICT)
with variable_scope.variable_scope(generator_scope) as gen_scope:
generator_inputs = tfgan_train._convert_tensor_or_l_or_d(generator_inputs) # pylint:disable=protected-access
generated_data = generator_fn(generator_inputs)
generator_variables = variable_lib.get_trainable_variables(gen_scope)
return tfgan_tuples.GANModel(
generator_inputs,
generated_data,
generator_variables,
gen_scope,
generator_fn,
real_data=None,
discriminator_real_outputs=None,
discriminator_gen_outputs=None,
discriminator_variables=None,
discriminator_scope=None,
discriminator_fn=None)
def _get_eval_estimator_spec(gan_model, gan_loss, get_eval_metric_ops_fn=None,
name=None):
"""Return an EstimatorSpec for the eval case."""
scalar_loss = gan_loss.generator_loss + gan_loss.discriminator_loss
with ops.name_scope(None, 'metrics',
[gan_loss.generator_loss,
gan_loss.discriminator_loss]):
def _summary_key(head_name, val):
return '%s/%s' % (val, head_name) if head_name else val
eval_metric_ops = {
_summary_key(name, 'generator_loss'):
metrics_lib.mean(gan_loss.generator_loss),
_summary_key(name, 'discriminator_loss'):
metrics_lib.mean(gan_loss.discriminator_loss)
}
if get_eval_metric_ops_fn is not None:
custom_eval_metric_ops = get_eval_metric_ops_fn(gan_model)
if not isinstance(custom_eval_metric_ops, dict):
raise TypeError('get_eval_metric_ops_fn must return a dict, '
'received: {}'.format(custom_eval_metric_ops))
eval_metric_ops.update(custom_eval_metric_ops)
return model_fn_lib.EstimatorSpec(
mode=model_fn_lib.ModeKeys.EVAL,
predictions=gan_model.generated_data,
loss=scalar_loss,
eval_metric_ops=eval_metric_ops)
def _get_train_estimator_spec(
gan_model, gan_loss, generator_optimizer, discriminator_optimizer,
get_hooks_fn, train_op_fn=tfgan_train.gan_train_ops):
"""Return an EstimatorSpec for the train case."""
scalar_loss = gan_loss.generator_loss + gan_loss.discriminator_loss
train_ops = train_op_fn(gan_model, gan_loss, generator_optimizer,
discriminator_optimizer)
training_hooks = get_hooks_fn(train_ops)
return model_fn_lib.EstimatorSpec(
loss=scalar_loss,
mode=model_fn_lib.ModeKeys.TRAIN,
train_op=train_ops.global_step_inc_op,
training_hooks=training_hooks)
| aselle/tensorflow | tensorflow/contrib/gan/python/estimator/python/gan_estimator_impl.py | Python | apache-2.0 | 13,972 |
import unittest
from troposphere import Ref
from troposphere.cloudformation import WaitCondition, WaitConditionHandle
from troposphere.policies import CreationPolicy, ResourceSignal
class TestWaitCondition(unittest.TestCase):
def test_CreationPolicy(self):
w = WaitCondition(
"mycondition",
CreationPolicy=CreationPolicy(
ResourceSignal=ResourceSignal(
Timeout='PT15M')),
)
w.validate()
def test_CreationPolicyWithProps(self):
w = WaitCondition(
"mycondition",
Count=10,
CreationPolicy=CreationPolicy(
ResourceSignal=ResourceSignal(
Timeout='PT15M')),
)
with self.assertRaises(ValueError):
w.validate()
def test_RequiredProps(self):
handle = WaitConditionHandle("myWaitHandle")
w = WaitCondition(
"mycondition",
Handle=Ref(handle),
Timeout="300",
)
w.validate()
if __name__ == '__main__':
unittest.main()
| pas256/troposphere | tests/test_cloudformation.py | Python | bsd-2-clause | 1,087 |
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.conf import settings
from memcached_clients import RestclientPymemcacheClient
class IDCardPhotoCache(RestclientPymemcacheClient):
def get_cache_expiration_time(self, service, url, status=None):
if 'pws' == service:
return getattr(settings, 'IDCARD_PHOTO_EXPIRES', 60 * 60)
| uw-it-aca/course-roster-lti | course_roster/cache.py | Python | apache-2.0 | 399 |
# -*- coding: utf-8 -*-
class Constraint(object):
"""
A Constraint check if all vars into list_vars validate a specific function
Basic Constraint use a user-defined function
"""
def __init__(self, list_vars, function=None):
self.list_vars = list_vars
self.coeff_count = 1
self.function = function
def check(self, state):
if self.function is not None:
return self.function(state, self.list_vars)
return True
class MustBeDifferentConstraint(Constraint):
"""
Each vars in list_vars must be different
"""
def __init__(self, list_vars):
super(MustBeDifferentConstraint, self).__init__(list_vars)
def check(self, state):
var_exist = []
for var in self.list_vars:
if state[var] in var_exist:
return False
else:
var_exist.append(state[var])
return True
class SumEqualsConstraint(Constraint):
"""
Sum of all list_vars must be equal to _sum
coeff_count is set to the difference between _sum and effective sum
"""
def __init__(self, _sum, list_vars):
super(SumEqualsConstraint, self).__init__(list_vars)
self.sum = _sum
def check(self, state):
list_nb = [state[var] for var in self.list_vars]
diff = abs(self.sum - sum(list_nb))
self.coeff_count = diff
return diff == 0
| marienfressinaud/AI_Puzzle-Solver | PuzzleSolver/LocalSearchGames/constraints.py | Python | mit | 1,430 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class WebsiteConfigSettings(models.TransientModel):
_name = 'website.config.settings'
_inherit = 'res.config.settings'
def _default_website(self):
return self.env['website'].search([], limit=1)
website_id = fields.Many2one('website', string="website", default=_default_website, required=True)
website_name = fields.Char('Website Name', related='website_id.name')
language_ids = fields.Many2many(related='website_id.language_ids', relation='res.lang')
default_lang_id = fields.Many2one(related='website_id.default_lang_id', relation='res.lang')
default_lang_code = fields.Char('Default language code', related='website_id.default_lang_code')
google_analytics_key = fields.Char('Google Analytics Key', related='website_id.google_analytics_key')
social_twitter = fields.Char(related='website_id.social_twitter')
social_facebook = fields.Char(related='website_id.social_facebook')
social_github = fields.Char(related='website_id.social_github')
social_linkedin = fields.Char(related='website_id.social_linkedin')
social_youtube = fields.Char(related='website_id.social_youtube')
social_googleplus = fields.Char(related='website_id.social_googleplus')
compress_html = fields.Boolean('Compress rendered HTML for a better Google PageSpeed result', related='website_id.compress_html')
cdn_activated = fields.Boolean('Use a Content Delivery Network (CDN)', related='website_id.cdn_activated')
cdn_url = fields.Char(related='website_id.cdn_url')
cdn_filters = fields.Text(related='website_id.cdn_filters')
module_website_form_editor = fields.Selection([
(0, 'Use standard forms'),
(1, 'Create and customize forms to generate emails, leads, issues and extra information in the checkout process (new snippet available)')
], "Form Builder")
module_website_version = fields.Selection([
(0, 'No version management and A/B testing (easy)'),
(1, 'Allow multiple versions of the same page (advanced)')
], "A/B Testing")
favicon = fields.Binary('Favicon', related='website_id.favicon')
# Set as global config parameter since methods using it are not website-aware. To be changed
# when multi-website is implemented
google_maps_api_key = fields.Char(string='Google Maps API Key')
def set_google_maps_api_key(self):
self.env['ir.config_parameter'].set_param(
'google_maps_api_key', (self.google_maps_api_key or '').strip(), groups=['base.group_system'])
def get_default_google_maps_api_key(self, fields):
google_maps_api_key = self.env['ir.config_parameter'].get_param('google_maps_api_key', default='')
return dict(google_maps_api_key=google_maps_api_key)
| kosgroup/odoo | addons/website/models/res_config.py | Python | gpl-3.0 | 2,870 |
import legume
import time
import random
from openps.shared.message import ServerMessage
from openps.shared.message import ServerCommand
from openps.shared.message import PlayerMessage
class ServerRoom:
def __init__(self):
self.peers = []
class Server:
PORT = 29050
def __init__(self):
self.peersState = {}
self.rooms = {}
self.nextRoomId = 1
self.games = {}
def update_peer(self, peer):
peer.send_reliable_message(self.peersState[peer.address])
def hello(self, peer):
m = ServerMessage()
m.state.value = ServerMessage.WHAT_DO_YOU_WANT
self.peersState[peer.address] = m
self.update_peer(peer)
def create_room(self, peer):
r = ServerRoom()
r.peers.append(peer)
self.peersState[peer.address].room_id.value = self.nextRoomId
self.peersState[peer.address].state.value = ServerMessage.IN_ROOM
self.peersState[peer.address].player_id.value = 0
self.rooms[self.nextRoomId] = r
self.nextRoomId += 1
self.update_peer(peer)
def find_room(self, peer):
if len(self.rooms) == 0:
self.peersState[peer.address].state.value = ServerMessage.NOPE
else:
for r in self.rooms:
self.peersState[peer.address].state.value = ServerMessage.IN_ROOM
self.peersState[peer.address].room_id.value = r
self.peersState[peer.address].player_id.value = len(self.rooms[r].peers)
self.rooms[r].peers.append(peer)
break
self.update_peer(peer)
def start_game(self, peer):
print(str(peer.address)+" start game")
print("room id: "+str(self.peersState[peer.address].room_id.value))
r = self.peersState[peer.address].room_id.value
if not r in self.rooms:
print("Room "+str(r)+" does not exist")
return
self.games[r] = self.rooms[r]
del self.rooms[r]
seed = random.random()
for p in self.games[r].peers:
self.peersState[p.address].state.value = ServerMessage.IN_GAME
self.peersState[p.address].seed.value = seed
print(str(p.address)+" in game")
self.update_peer(p)
def message_handler(self, sender, message):
if message.MessageTypeID == ServerMessage.MessageTypeID:
print("ServerMessage from "+str(sender.address))
elif message.MessageTypeID == ServerCommand.MessageTypeID:
print("command "+str(message.command.value)+" from "+str(sender.address))
if message.command.value == ServerCommand.CREATE_ROOM:
self.create_room(sender)
elif message.command.value == ServerCommand.FIND_ROOM:
self.find_room(sender)
elif message.command.value == ServerCommand.START_GAME:
self.start_game(sender)
elif message.MessageTypeID == PlayerMessage.MessageTypeID:
print("player command from "+str(sender.address)+" player id="+str(message.player_id.value))
if self.peersState[sender.address] and self.peersState[sender.address].state.value == ServerMessage.IN_GAME:
if self.peersState[sender.address].room_id.value in self.games:
for p in self.games[self.peersState[sender.address].room_id.value].peers:
p.send_reliable_message(message)
def run(self):
s = legume.Server()
s.OnMessage += self.message_handler
s.listen(('', Server.PORT))
t = time.time()
player_counter = 0
while True:
s.update()
if time.time() > t + 1.0:
t = time.time()
for peerAdr in list(self.peersState.keys()):
found = False
for peer in s.peers:
if peer.address == peerAdr:
found = True
break
if not found:
print("Lost Connexion: "+str(peerAdr)+" player_id: "+str(self.peersState[peerAdr].player_id.value))
del(self.peersState[peerAdr])
for peer in s.peers:
#print(str(peer.address)+" "+str(peer.latency))
if not peer.address in self.peersState:
print("New Connexion: "+str(peer.address))
self.hello(peer)
time.sleep(0.001)
if __name__ == '__main__':
s = Server()
s.run()
| Bramas/OpenPS | Server.py | Python | gpl-3.0 | 3,789 |
import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
from django.db.utils import IntegrityError
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
parser.add_argument(
'--system',
type=int,
help='System that this import is for',
required=True,
)
parser.add_argument(
'--truncate',
dest='truncate',
action='store_true',
help='Truncat any data that would not fit into the DB',
default=True,
)
parser.add_argument(
'--rr',
dest='rr',
action='store_true',
help='Import in Radio Refrence Format',
default=False,
)
def handle(self, *args, **options):
import_tg_file(self, options)
def import_tg_file(self, options):
''' Using the talkgroup file from trunk-recorder'''
file_name = options['file']
system_id = options['system']
truncate = options['truncate']
rrFormat = options['rr']
try:
system = System.objects.get(pk=system_id)
except System.DoesNotExist:
self.stdout.write("Valid systems")
for system in System.objects.all():
self.stdout.write("#{} - {}".format(system.pk, system.name))
raise CommandError('System #{} was not a valid system'.format(system_id))
self.stdout.write("Importing talkgroups for system #{} - {}".format(system.pk, system.name))
if truncate:
mode_max_length = TalkGroup._meta.get_field('mode').max_length
alpha_tag_max_length = TalkGroup._meta.get_field('alpha_tag').max_length
description_max_length = TalkGroup._meta.get_field('description').max_length
with open(file_name) as tg_file:
tg_info = csv.reader(tg_file, delimiter=',', quotechar='"')
line_number = 0
if not rrFormat:
for row in tg_info:
line_number+=1
try:
if truncate:
if len(row[2]) > mode_max_length:
row[2] = row[2][:mode_max_length]
self.stdout.write("Truncating mode from line ({}) TG {}".format(line_number, row[3]))
if len(row[3]) > alpha_tag_max_length:
row[3] = row[3][:alpha_tag_max_length]
self.stdout.write("Truncating alpha_tag from line ({}) TG {}".format(line_number, row[3]))
if len(row[4]) > description_max_length:
row[4] = row[4][:description_max_length]
self.stdout.write("Truncating description from line ({}) TG {}".format(line_number, row[3]))
#print('LEN ' + str(len(row)))
priority = 3
try:
priority = row[7]
except IndexError:
pass
try:
priority = int(priority)
except ValueError:
priority = 3
obj, create = TalkGroup.objects.update_or_create(dec_id=row[0], system=system, defaults={'mode': row[2], 'alpha_tag': row[3], 'description': row[4], 'priority': priority})
obj.service_type = row[5][:20]
obj.save()
except (IntegrityError, IndexError):
pass
#print("Skipping {}".format(row[3]))
else:
for row in tg_info:
line_number+=1
try:
if truncate:
if len(row[3]) > mode_max_length:
row[3] = row[3][:mode_max_length]
self.stdout.write("Truncating mode from line ({}) TG {}".format(line_number, row[2]))
if len(row[2]) > alpha_tag_max_length:
row[2] = row[2][:alpha_tag_max_length]
self.stdout.write("Truncating alpha_tag from line ({}) TG {}".format(line_number, row[2]))
if len(row[4]) > description_max_length:
row[4] = row[4][:description_max_length]
self.stdout.write("Truncating description from line ({}) TG {}".format(line_number, row[2]))
#print('LEN ' + str(len(row)))
priority = 3
obj, create = TalkGroup.objects.update_or_create(dec_id=row[0], system=system, defaults={'mode': row[3], 'alpha_tag': row[2], 'description': row[4], 'priority': priority})
obj.service_type = row[5][:20]
obj.save()
except (IntegrityError, IndexError, ValueError):
pass
#print("Skipping {}".format(row[3]))
| ScanOC/trunk-player | radio/management/commands/import_talkgroups.py | Python | mit | 5,097 |
from math import *
from simplesvg import *
from copy import copy
################## A #####################
class Turtle:
def __init__(self, filename, size=(200, 200), startx=0, starty=0):
self.x = startx
self.y = starty
self.alfa = 0
self.pts = [(self.x, self.y)]
self.dwg = make_drawing(filename=filename, size=size)
self.up = False
def forward(self, step):
x, y = step * cos(self.alfa * pi / 180), step * sin(self.alfa * pi / 180)
self.x += x
self.y += y
if not self.up:
self.pts.append((self.x, self.y))
def back(self, step):
self.forward(-step)
def left(self, alfa):
self.alfa -= alfa
def right(self, alfa):
self.left(-alfa)
def penup(self):
self.up = True
def pendown(self):
self.up = False
def save(self):
polyline(self.dwg, self.pts)
save_drawing(self.dwg)
def drawPolygon(turtle, n, d):
degree = 180 - (n-2)*180/n
for i in range(n):
turtle.forward(d)
turtle.right(degree)
turtle.save()
def drawStar(turtle, n, d):
degree = 180-180/n
for i in range(n):
turtle.forward(d)
turtle.right(degree)
turtle.save()
################## B #####################
def drawRelA(turtle, n, d):
degree = 180 - (n-2)*180/n
drawStar(turtle, n, 2*d * cos(degree/2 * pi/180))
turtle.left(degree/2)
for i in range(n):
turtle.forward(d)
turtle.right(degree)
turtle.save()
def drawAbsA(fname, n, r):
dwg = make_drawing(fname, size=(2*r,2*r))
degree = 180 - (n-2)*180/n
points = [(r + r*sin(i * degree* pi/180), r + r*cos(i * degree* pi/180)) for i in range(n)]
for i in range(n):
for p in points[i+1:]:
line(dwg, points[i], p)
save_drawing(dwg)
def drawRelB(turtle, s, r, c):
rdeg = 90
indeg = atan(s/(r*(s-s/r)))*180/pi
for i in range(c):
for j in range(4):
turtle.forward(s)
turtle.right(rdeg)
l = s/r
turtle.forward(l)
turtle.right(indeg)
s = sqrt((s-l)**2 + l**2)
turtle.save()
def drawAbsC(fname, r, n):
dwg = make_drawing(fname, size=(2*r, 2*r))
degree = 360 / (n*2)
points = [(r + r*sin(i * degree* pi/180), r + r*cos(i * degree* pi/180)) for i in range(n*2)]
for i in range(n * 2):
line(dwg, points[i], points[n-i])
line(dwg, points[i], points[-i])
save_drawing(dwg)
################## C #####################
def drawShrub(t, s, d):
if d == 0:
return
else:
t.forward(s)
print(d, t.x, t.y, t.pts)
t.left(45)
drawShrub(copy(t), s/2, d-1)
t.right(90)
drawShrub(copy(t), s/2, d-1)
if __name__=='__main__':
t = Turtle('C_ker', startx=0, starty=100)
drawShrub(t, 80, 2)
t.save()
"""
drawAbsC('C_abs', 40, 20)
t4 = Turtle('B_rel', size=(300,300), startx=10, starty=10)
drawRelB(t4, 380, 4, 12)
drawAbsA('A_abs', 5, 40)
t3 = Turtle('A_rel', size=(300,300), startx=30, starty=60)
drawRelA(t3, 5, 90)
t1 = Turtle('polygon', size=(300,300), startx=30, starty=10)
drawPolygon(t1, 7, 40)
t2 = Turtle('star', size=(300,300), startx=10, starty=40)
drawStar(t2, 9, 80)
"""
| j3rgus/assignments | math toys/hw3.py | Python | gpl-3.0 | 2,896 |
import json
import logging
from functools import lru_cache
import pyvat
from django.conf import settings
from django.contrib import auth
from django.core import exceptions as django_exceptions
from django.core.validators import RegexValidator
from django.db import models as django_models
from django.db import transaction
from django.db.models import Q
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from rest_framework import exceptions, serializers
from waldur_core.core import fields as core_fields
from waldur_core.core import models as core_models
from waldur_core.core import serializers as core_serializers
from waldur_core.core.clean_html import clean_html
from waldur_core.core.fields import MappedChoiceField
from waldur_core.media.serializers import ProtectedMediaSerializerMixin
from waldur_core.structure import models
from waldur_core.structure import permissions as structure_permissions
from waldur_core.structure.exceptions import (
ServiceBackendError,
ServiceBackendNotImplemented,
)
from waldur_core.structure.filters import filter_visible_users
from waldur_core.structure.managers import filter_queryset_for_user
from waldur_core.structure.models import CUSTOMER_DETAILS_FIELDS
from waldur_core.structure.registry import get_resource_type, get_service_type
User = auth.get_user_model()
logger = logging.getLogger(__name__)
def get_options_serializer_class(service_type):
return next(
cls
for cls in ServiceOptionsSerializer.get_subclasses()
if get_service_type(cls) == service_type
)
@lru_cache
def get_resource_serializer_class(resource_type):
try:
return next(
cls
for cls in BaseResourceSerializer.get_subclasses()
if get_resource_type(cls.Meta.model) == resource_type
and get_service_type(cls) is not None
)
except StopIteration:
return None
class PermissionFieldFilteringMixin:
"""
Mixin allowing to filter related fields.
In order to constrain the list of entities that can be used
as a value for the field:
1. Make sure that the entity in question has corresponding
Permission class defined.
2. Implement `get_filtered_field_names()` method
in the class that this mixin is mixed into and return
the field in question from that method.
"""
def get_fields(self):
fields = super(PermissionFieldFilteringMixin, self).get_fields()
try:
request = self.context['request']
user = request.user
except (KeyError, AttributeError):
return fields
for field_name in self.get_filtered_field_names():
if field_name not in fields: # field could be not required by user
continue
field = fields[field_name]
field.queryset = filter_queryset_for_user(field.queryset, user)
return fields
def get_filtered_field_names(self):
raise NotImplementedError(
'Implement get_filtered_field_names() ' 'to return list of filtered fields'
)
class PermissionListSerializer(serializers.ListSerializer):
"""
Allows to filter related queryset by user.
Counterpart of PermissionFieldFilteringMixin.
In order to use it set Meta.list_serializer_class. Example:
>>> class PermissionProjectSerializer(BasicProjectSerializer):
>>> class Meta(BasicProjectSerializer.Meta):
>>> list_serializer_class = PermissionListSerializer
>>>
>>> class CustomerSerializer(serializers.HyperlinkedModelSerializer):
>>> projects = PermissionProjectSerializer(many=True, read_only=True)
"""
def to_representation(self, data):
try:
request = self.context['request']
user = request.user
except (KeyError, AttributeError):
pass
else:
if isinstance(data, (django_models.Manager, django_models.query.QuerySet)):
data = filter_queryset_for_user(data.all(), user)
return super(PermissionListSerializer, self).to_representation(data)
class BasicUserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = (
'url',
'uuid',
'username',
'full_name',
'native_name',
)
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
}
class BasicProjectSerializer(core_serializers.BasicInfoSerializer):
class Meta(core_serializers.BasicInfoSerializer.Meta):
model = models.Project
class PermissionProjectSerializer(BasicProjectSerializer):
class Meta(BasicProjectSerializer.Meta):
list_serializer_class = PermissionListSerializer
class ProjectTypeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = models.ProjectType
fields = ('uuid', 'url', 'name', 'description')
extra_kwargs = {
'url': {'lookup_field': 'uuid', 'view_name': 'project_type-detail'},
}
class ProjectDetailsSerializerMixin(serializers.Serializer):
def validate_description(self, value):
return clean_html(value.strip())
def validate_end_date(self, end_date):
if end_date and end_date < timezone.datetime.today().date():
raise serializers.ValidationError(
{'end_date': _('Cannot be earlier than the current date.')}
)
return end_date
class ProjectSerializer(
ProjectDetailsSerializerMixin,
core_serializers.RestrictedSerializerMixin,
PermissionFieldFilteringMixin,
core_serializers.AugmentedSerializerMixin,
serializers.HyperlinkedModelSerializer,
):
class Meta:
model = models.Project
fields = (
'url',
'uuid',
'name',
'customer',
'customer_uuid',
'customer_name',
'customer_native_name',
'customer_abbreviation',
'description',
'created',
'type',
'type_name',
'type_uuid',
'backend_id',
'end_date',
'oecd_fos_2007_code',
)
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
'customer': {'lookup_field': 'uuid'},
'type': {'lookup_field': 'uuid', 'view_name': 'project_type-detail'},
}
related_paths = {
'customer': ('uuid', 'name', 'native_name', 'abbreviation'),
'type': ('name', 'uuid'),
}
@staticmethod
def eager_load(queryset, request=None):
related_fields = (
'uuid',
'name',
'created',
'description',
'customer__uuid',
'customer__name',
'customer__native_name',
'customer__abbreviation',
)
return queryset.select_related('customer').only(*related_fields)
def get_filtered_field_names(self):
return ('customer',)
def validate(self, attrs):
customer = (
attrs.get('customer') if not self.instance else self.instance.customer
)
end_date = attrs.get('end_date')
if end_date:
structure_permissions.is_owner(self.context['request'], None, customer)
return attrs
class CountrySerializerMixin(serializers.Serializer):
COUNTRIES = core_fields.COUNTRIES
if settings.WALDUR_CORE.get('COUNTRIES'):
COUNTRIES = [
item for item in COUNTRIES if item[0] in settings.WALDUR_CORE['COUNTRIES']
]
country = serializers.ChoiceField(
required=False, choices=COUNTRIES, allow_blank=True
)
country_name = serializers.ReadOnlyField(source='get_country_display')
class CustomerSerializer(
ProtectedMediaSerializerMixin,
CountrySerializerMixin,
core_serializers.RestrictedSerializerMixin,
core_serializers.AugmentedSerializerMixin,
serializers.HyperlinkedModelSerializer,
):
projects = PermissionProjectSerializer(many=True, read_only=True)
owners = BasicUserSerializer(source='get_owners', many=True, read_only=True)
support_users = BasicUserSerializer(
source='get_support_users', many=True, read_only=True
)
service_managers = BasicUserSerializer(
source='get_service_managers', many=True, read_only=True
)
display_name = serializers.ReadOnlyField(source='get_display_name')
division_name = serializers.ReadOnlyField(source='division.name')
division_uuid = serializers.ReadOnlyField(source='division.uuid')
division_parent_name = serializers.ReadOnlyField(source='division.parent.name')
division_parent_uuid = serializers.ReadOnlyField(source='division.parent.uuid')
division_type_name = serializers.ReadOnlyField(source='division.type.name')
division_type_uuid = serializers.ReadOnlyField(source='division.type.uuid')
class Meta:
model = models.Customer
fields = (
'url',
'uuid',
'created',
'division',
'division_name',
'division_uuid',
'division_parent_name',
'division_parent_uuid',
'division_type_name',
'division_type_uuid',
'display_name',
'projects',
'owners',
'support_users',
'service_managers',
'backend_id',
'image',
'default_tax_percent',
'accounting_start_date',
) + CUSTOMER_DETAILS_FIELDS
staff_only_fields = (
'access_subnets',
'accounting_start_date',
'default_tax_percent',
'agreement_number',
'domain',
'division',
)
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
'division': {'lookup_field': 'uuid'},
}
def get_fields(self):
fields = super(CustomerSerializer, self).get_fields()
try:
request = self.context['view'].request
user = request.user
except (KeyError, AttributeError):
return fields
if not user.is_staff:
for field_name in set(CustomerSerializer.Meta.staff_only_fields) & set(
fields.keys()
):
fields[field_name].read_only = True
return fields
def create(self, validated_data):
user = self.context['request'].user
if 'domain' not in validated_data:
# Staff can specify domain name on organization creation
validated_data['domain'] = user.organization
return super(CustomerSerializer, self).create(validated_data)
@staticmethod
def eager_load(queryset, request=None):
return queryset.prefetch_related('projects')
def validate(self, attrs):
country = attrs.get('country')
vat_code = attrs.get('vat_code')
if vat_code:
# Check VAT format
if not pyvat.is_vat_number_format_valid(vat_code, country):
raise serializers.ValidationError(
{'vat_code': _('VAT number has invalid format.')}
)
# Check VAT number in EU VAT Information Exchange System
# if customer is new or either VAT number or country of the customer has changed
if (
not self.instance
or self.instance.vat_code != vat_code
or self.instance.country != country
):
check_result = pyvat.check_vat_number(vat_code, country)
if check_result.is_valid:
attrs['vat_name'] = check_result.business_name
attrs['vat_address'] = check_result.business_address
if not attrs.get('contact_details'):
attrs['contact_details'] = attrs['vat_address']
elif check_result.is_valid is False:
raise serializers.ValidationError(
{'vat_code': _('VAT number is invalid.')}
)
else:
logger.debug(
'Unable to check VAT number %s for country %s. Error message: %s',
vat_code,
country,
check_result.log_lines,
)
raise serializers.ValidationError(
{'vat_code': _('Unable to check VAT number.')}
)
return attrs
class NestedCustomerSerializer(
core_serializers.AugmentedSerializerMixin,
core_serializers.HyperlinkedRelatedModelSerializer,
):
class Meta:
model = models.Customer
fields = ('uuid', 'url')
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
}
class NestedProjectSerializer(
core_serializers.AugmentedSerializerMixin,
core_serializers.HyperlinkedRelatedModelSerializer,
):
class Meta:
model = models.Project
fields = ('uuid', 'url')
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
}
class NestedProjectPermissionSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedRelatedField(
source='project',
lookup_field='uuid',
view_name='project-detail',
queryset=models.Project.objects.all(),
)
uuid = serializers.ReadOnlyField(source='project.uuid')
name = serializers.ReadOnlyField(source='project.name')
permission = serializers.HyperlinkedRelatedField(
source='pk',
view_name='project_permission-detail',
queryset=models.ProjectPermission.objects.all(),
)
class Meta:
model = models.ProjectPermission
fields = ['url', 'uuid', 'name', 'role', 'permission', 'expiration_time']
class CustomerUserSerializer(serializers.ModelSerializer):
role = serializers.ReadOnlyField()
is_service_manager = serializers.ReadOnlyField()
expiration_time = serializers.ReadOnlyField(source='perm.expiration_time')
permission = serializers.HyperlinkedRelatedField(
source='perm.pk', view_name='customer_permission-detail', read_only=True,
)
projects = NestedProjectPermissionSerializer(many=True, read_only=True)
class Meta:
model = User
fields = [
'url',
'uuid',
'username',
'full_name',
'email',
'role',
'permission',
'projects',
'is_service_manager',
'expiration_time',
]
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
}
def to_representation(self, user):
customer = self.context['customer']
permission = models.CustomerPermission.objects.filter(
customer=customer, user=user, is_active=True
).first()
projects = models.ProjectPermission.objects.filter(
project__customer=customer, user=user, is_active=True
)
is_service_manager = customer.has_user(
user, role=models.CustomerRole.SERVICE_MANAGER
)
setattr(user, 'perm', permission)
setattr(user, 'role', permission and permission.role)
setattr(user, 'projects', projects)
setattr(user, 'is_service_manager', is_service_manager)
return super(CustomerUserSerializer, self).to_representation(user)
class ProjectUserSerializer(serializers.ModelSerializer):
role = serializers.ReadOnlyField()
expiration_time = serializers.ReadOnlyField(source='perm.expiration_time')
permission = serializers.HyperlinkedRelatedField(
source='perm.pk',
view_name='project_permission-detail',
queryset=models.ProjectPermission.objects.all(),
)
class Meta:
model = User
fields = [
'url',
'uuid',
'username',
'full_name',
'email',
'role',
'permission',
'expiration_time',
]
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
}
def to_representation(self, user):
project = self.context['project']
permission = models.ProjectPermission.objects.filter(
project=project, user=user, is_active=True
).first()
setattr(user, 'perm', permission)
setattr(user, 'role', permission and permission.role)
return super(ProjectUserSerializer, self).to_representation(user)
class BasePermissionSerializer(
core_serializers.AugmentedSerializerMixin, serializers.HyperlinkedModelSerializer
):
class Meta:
fields = (
'user',
'user_full_name',
'user_native_name',
'user_username',
'user_uuid',
'user_email',
)
related_paths = {
'user': ('username', 'full_name', 'native_name', 'uuid', 'email'),
}
class BasicCustomerPermissionSerializer(BasePermissionSerializer):
class Meta(BasePermissionSerializer.Meta):
model = models.CustomerPermission
fields = (
'url',
'pk',
'role',
'customer_uuid',
'customer_name',
'customer_native_name',
'customer_abbreviation',
)
related_paths = dict(
customer=('name', 'native_name', 'abbreviation', 'uuid'),
**BasePermissionSerializer.Meta.related_paths
)
extra_kwargs = {
'customer': {
'view_name': 'customer-detail',
'lookup_field': 'uuid',
'queryset': models.Customer.objects.all(),
}
}
class CustomerPermissionSerializer(
PermissionFieldFilteringMixin, BasePermissionSerializer
):
class Meta(BasePermissionSerializer.Meta):
model = models.CustomerPermission
fields = (
'url',
'pk',
'role',
'created',
'expiration_time',
'created_by',
'customer',
'customer_uuid',
'customer_name',
'customer_native_name',
'customer_abbreviation',
) + BasePermissionSerializer.Meta.fields
related_paths = dict(
customer=('name', 'native_name', 'abbreviation', 'uuid'),
**BasePermissionSerializer.Meta.related_paths
)
protected_fields = ('customer', 'role', 'user', 'created_by', 'created')
extra_kwargs = {
'user': {
'view_name': 'user-detail',
'lookup_field': 'uuid',
'queryset': User.objects.all(),
},
'created_by': {
'view_name': 'user-detail',
'lookup_field': 'uuid',
'read_only': True,
},
'customer': {
'view_name': 'customer-detail',
'lookup_field': 'uuid',
'queryset': models.Customer.objects.all(),
},
}
def validate(self, data):
if not self.instance:
customer = data['customer']
user = data['user']
if customer.has_user(user):
raise serializers.ValidationError(
_('The fields customer and user must make a unique set.')
)
return data
def create(self, validated_data):
customer = validated_data['customer']
user = validated_data['user']
role = validated_data['role']
expiration_time = validated_data.get('expiration_time')
created_by = self.context['request'].user
permission, _ = customer.add_user(user, role, created_by, expiration_time)
return permission
def validate_expiration_time(self, value):
if value is not None and value < timezone.now():
raise serializers.ValidationError(
_('Expiration time should be greater than current time.')
)
return value
def get_filtered_field_names(self):
return ('customer',)
class CustomerPermissionLogSerializer(CustomerPermissionSerializer):
class Meta(CustomerPermissionSerializer.Meta):
view_name = 'customer_permission_log-detail'
class CustomerPermissionReviewSerializer(
core_serializers.AugmentedSerializerMixin, serializers.HyperlinkedModelSerializer
):
class Meta:
model = models.CustomerPermissionReview
view_name = 'customer_permission_review-detail'
fields = (
'url',
'uuid',
'reviewer_full_name',
'reviewer_uuid',
'customer_uuid',
'customer_name',
'is_pending',
'created',
'closed',
)
read_only_fields = (
'is_pending',
'closed',
)
related_paths = {
'reviewer': ('full_name', 'uuid'),
'customer': ('name', 'uuid'),
}
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
}
class ProjectPermissionSerializer(
PermissionFieldFilteringMixin, BasePermissionSerializer
):
customer_name = serializers.ReadOnlyField(source='project.customer.name')
class Meta(BasePermissionSerializer.Meta):
model = models.ProjectPermission
fields = (
'url',
'pk',
'role',
'created',
'expiration_time',
'created_by',
'project',
'project_uuid',
'project_name',
'customer_name',
) + BasePermissionSerializer.Meta.fields
related_paths = dict(
project=('name', 'uuid'), **BasePermissionSerializer.Meta.related_paths
)
protected_fields = ('project', 'role', 'user', 'created_by', 'created')
extra_kwargs = {
'user': {
'view_name': 'user-detail',
'lookup_field': 'uuid',
'queryset': User.objects.all(),
},
'created_by': {
'view_name': 'user-detail',
'lookup_field': 'uuid',
'read_only': True,
},
'project': {
'view_name': 'project-detail',
'lookup_field': 'uuid',
'queryset': models.Project.objects.all(),
},
}
def validate(self, data):
if not self.instance:
project = data['project']
user = data['user']
if project.has_user(user):
raise serializers.ValidationError(
_('The fields project and user must make a unique set.')
)
return data
def create(self, validated_data):
project = validated_data['project']
user = validated_data['user']
role = validated_data['role']
expiration_time = validated_data.get('expiration_time')
created_by = self.context['request'].user
permission, _ = project.add_user(user, role, created_by, expiration_time)
return permission
def validate_expiration_time(self, value):
if value is not None and value < timezone.now():
raise serializers.ValidationError(
_('Expiration time should be greater than current time.')
)
return value
def get_filtered_field_names(self):
return ('project',)
class BasicProjectPermissionSerializer(BasePermissionSerializer):
customer_name = serializers.ReadOnlyField(source='project.customer.name')
class Meta(BasePermissionSerializer.Meta):
model = models.ProjectPermission
fields = (
'url',
'pk',
'role',
'project_uuid',
'project_name',
'customer_name',
)
related_paths = dict(
project=('name', 'uuid'), **BasePermissionSerializer.Meta.related_paths
)
extra_kwargs = {
'project': {
'view_name': 'project-detail',
'lookup_field': 'uuid',
'queryset': models.Project.objects.all(),
}
}
class ProjectPermissionLogSerializer(ProjectPermissionSerializer):
class Meta(ProjectPermissionSerializer.Meta):
view_name = 'project_permission_log-detail'
class UserSerializer(
core_serializers.AugmentedSerializerMixin, serializers.HyperlinkedModelSerializer
):
email = serializers.EmailField()
agree_with_policy = serializers.BooleanField(
write_only=True,
required=False,
help_text=_('User must agree with the policy to register.'),
)
competence = serializers.ChoiceField(
choices=settings.WALDUR_CORE.get('USER_COMPETENCE_LIST', []),
allow_blank=True,
required=False,
)
token = serializers.ReadOnlyField(source='auth_token.key')
customer_permissions = serializers.SerializerMethodField()
project_permissions = serializers.SerializerMethodField()
requested_email = serializers.SerializerMethodField()
full_name = serializers.CharField(max_length=200, required=False)
def get_customer_permissions(self, user):
permissions = models.CustomerPermission.objects.filter(
user=user, is_active=True
).select_related('customer')
serializer = BasicCustomerPermissionSerializer(
instance=permissions, many=True, context=self.context
)
return serializer.data
def get_project_permissions(self, user):
permissions = models.ProjectPermission.objects.filter(
user=user, is_active=True
).select_related('project')
serializer = BasicProjectPermissionSerializer(
instance=permissions, many=True, context=self.context
)
return serializer.data
def get_requested_email(self, user):
try:
requested_email = core_models.ChangeEmailRequest.objects.get(user=user)
return requested_email.email
except core_models.ChangeEmailRequest.DoesNotExist:
pass
class Meta:
model = User
fields = (
'url',
'uuid',
'username',
'full_name',
'native_name',
'job_title',
'email',
'phone_number',
'organization',
'civil_number',
'description',
'is_staff',
'is_active',
'is_support',
'token',
'token_lifetime',
'registration_method',
'date_joined',
'agree_with_policy',
'agreement_date',
'preferred_language',
'competence',
'customer_permissions',
'project_permissions',
'requested_email',
'affiliations',
'first_name',
'last_name',
)
read_only_fields = (
'uuid',
'civil_number',
'registration_method',
'date_joined',
'agreement_date',
'customer_permissions',
'project_permissions',
'affiliations',
'first_name',
'last_name',
)
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
}
protected_fields = ('email',)
def get_fields(self):
fields = super(UserSerializer, self).get_fields()
try:
request = self.context['view'].request
user = request.user
except (KeyError, AttributeError):
return fields
if user.is_anonymous:
return fields
if not user.is_staff:
protected_fields = ('is_active', 'is_staff', 'is_support', 'description')
if user.is_support:
for field in protected_fields:
if field in fields:
fields[field].read_only = True
else:
for field in protected_fields:
if field in fields:
del fields[field]
if not self._can_see_token(user):
del fields['token']
del fields['token_lifetime']
if request.method in ('PUT', 'PATCH'):
fields['username'].read_only = True
protected_methods = settings.WALDUR_CORE[
'PROTECT_USER_DETAILS_FOR_REGISTRATION_METHODS'
]
if (
user.registration_method
and user.registration_method in protected_methods
):
detail_fields = (
'full_name',
'native_name',
'job_title',
'email',
'phone_number',
'organization',
)
for field in detail_fields:
fields[field].read_only = True
return fields
def _can_see_token(self, user):
# Nobody apart from the user herself can see her token.
# User can see the token either via details view or /api/users/me
if isinstance(self.instance, list) and len(self.instance) == 1:
return self.instance[0] == user
else:
return self.instance == user
def validate(self, attrs):
agree_with_policy = attrs.pop('agree_with_policy', False)
if self.instance and not self.instance.agreement_date:
if not agree_with_policy:
raise serializers.ValidationError(
{'agree_with_policy': _('User must agree with the policy.')}
)
else:
attrs['agreement_date'] = timezone.now()
# Convert validation error from Django to DRF
# https://github.com/tomchristie/django-rest-framework/issues/2145
try:
user = User(id=getattr(self.instance, 'id', None), **attrs)
user.clean()
except django_exceptions.ValidationError as error:
raise exceptions.ValidationError(error.message_dict)
return attrs
class UserEmailChangeSerializer(serializers.Serializer):
email = serializers.EmailField()
class PasswordSerializer(serializers.Serializer):
password = serializers.CharField(
min_length=7,
validators=[
RegexValidator(
regex=r'\d', message=_('Ensure this field has at least one digit.'),
),
RegexValidator(
regex='[a-zA-Z]',
message=_('Ensure this field has at least one latin letter.'),
),
],
)
class SshKeySerializer(serializers.HyperlinkedModelSerializer):
user_uuid = serializers.ReadOnlyField(source='user.uuid')
class Meta:
model = core_models.SshPublicKey
fields = (
'url',
'uuid',
'name',
'public_key',
'fingerprint',
'user_uuid',
'is_shared',
'type',
)
read_only_fields = ('fingerprint', 'is_shared')
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
}
def validate_name(self, value):
return value.strip()
def validate_public_key(self, value):
value = value.strip()
if len(value.splitlines()) > 1:
raise serializers.ValidationError(
_('Key is not valid: it should be single line.')
)
try:
fingerprint = core_models.get_ssh_key_fingerprint(value)
except (IndexError, TypeError):
raise serializers.ValidationError(
_('Key is not valid: cannot generate fingerprint from it.')
)
if core_models.SshPublicKey.objects.filter(fingerprint=fingerprint).exists():
raise serializers.ValidationError(
_('Key with same fingerprint already exists.')
)
return value
class MoveProjectSerializer(serializers.Serializer):
customer = NestedCustomerSerializer(
queryset=models.Customer.objects.all(), required=True, many=False
)
class ServiceOptionsSerializer(serializers.Serializer):
class Meta:
secret_fields = ()
@classmethod
def get_subclasses(cls):
for subclass in cls.__subclasses__():
yield from subclass.get_subclasses()
yield subclass
class ServiceSettingsSerializer(
PermissionFieldFilteringMixin,
core_serializers.RestrictedSerializerMixin,
core_serializers.AugmentedSerializerMixin,
serializers.HyperlinkedModelSerializer,
):
customer_native_name = serializers.ReadOnlyField(source='customer.native_name')
state = MappedChoiceField(
choices=[(v, k) for k, v in core_models.StateMixin.States.CHOICES],
choice_mappings={v: k for k, v in core_models.StateMixin.States.CHOICES},
read_only=True,
)
scope = core_serializers.GenericRelatedField(
related_models=models.BaseResource.get_all_models(),
required=False,
allow_null=True,
)
options = serializers.DictField()
class Meta:
model = models.ServiceSettings
fields = (
'url',
'uuid',
'name',
'type',
'state',
'error_message',
'shared',
'customer',
'customer_name',
'customer_native_name',
'terms_of_services',
'scope',
'options',
)
protected_fields = ('type', 'customer')
read_only_fields = ('shared', 'state', 'error_message')
related_paths = ('customer',)
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
'customer': {'lookup_field': 'uuid'},
}
def get_filtered_field_names(self):
return ('customer',)
@staticmethod
def eager_load(queryset, request=None):
return queryset.select_related('customer')
def get_fields(self):
fields = super(ServiceSettingsSerializer, self).get_fields()
method = self.context['view'].request.method
if method == 'GET' and 'options' in fields:
fields['options'] = serializers.SerializerMethodField('get_options')
return fields
def get_options(self, service):
options = {
'backend_url': service.backend_url,
'username': service.username,
'password': service.password,
'domain': service.domain,
'token': service.token,
**service.options,
}
request = self.context['request']
if request.user.is_staff:
return options
if service.customer and service.customer.has_user(
request.user, models.CustomerRole.OWNER
):
return options
options_serializer_class = get_options_serializer_class(service.type)
secret_fields = options_serializer_class.Meta.secret_fields
return {k: v for (k, v) in options.items() if k not in secret_fields}
def validate(self, attrs):
if 'options' not in attrs:
return attrs
service_type = self.instance and self.instance.type or attrs['type']
options_serializer_class = get_options_serializer_class(service_type)
options_serializer = options_serializer_class(
instance=self.instance, data=attrs['options'], context=self.context
)
options_serializer.is_valid(raise_exception=True)
service_options = options_serializer.validated_data
attrs.update(service_options)
self._validate_settings(models.ServiceSettings(**attrs))
return attrs
def _validate_settings(self, service_settings):
try:
backend = service_settings.get_backend()
backend.validate_settings()
except ServiceBackendError as e:
raise serializers.ValidationError(_('Wrong settings: %s.') % e)
except ServiceBackendNotImplemented:
pass
class BasicResourceSerializer(serializers.Serializer):
uuid = serializers.ReadOnlyField()
name = serializers.ReadOnlyField()
resource_type = serializers.SerializerMethodField()
def get_resource_type(self, resource):
return get_resource_type(resource)
class ManagedResourceSerializer(BasicResourceSerializer):
project_name = serializers.ReadOnlyField(source='project.name')
project_uuid = serializers.ReadOnlyField(source='project.uuid')
customer_uuid = serializers.ReadOnlyField(source='project.customer.uuid')
customer_name = serializers.ReadOnlyField(source='project.customer.name')
class TagList(list):
"""
This class serializes tags as JSON list as the last step of serialization process.
"""
def __str__(self):
return json.dumps(self)
class TagSerializer(serializers.Serializer):
"""
This serializer updates tags field using django-taggit API.
"""
def create(self, validated_data):
if 'tags' in validated_data:
tags = validated_data.pop('tags')
instance = super(TagSerializer, self).create(validated_data)
instance.tags.set(*tags)
else:
instance = super(TagSerializer, self).create(validated_data)
return instance
def update(self, instance, validated_data):
if 'tags' in validated_data:
tags = validated_data.pop('tags')
instance = super(TagSerializer, self).update(instance, validated_data)
instance.tags.set(*tags)
else:
instance = super(TagSerializer, self).update(instance, validated_data)
return instance
class TagListSerializerField(serializers.Field):
child = serializers.CharField()
default_error_messages = {
'not_a_list': _('Expected a list of items but got type "{input_type}".'),
'invalid_json': _(
'Invalid json list. A tag list submitted in string form must be valid json.'
),
'not_a_str': _('All list items must be of string type.'),
}
def to_internal_value(self, value):
if isinstance(value, str):
if not value:
value = '[]'
try:
value = json.loads(value)
except ValueError:
self.fail('invalid_json')
if not isinstance(value, list):
self.fail('not_a_list', input_type=type(value).__name__)
for s in value:
if not isinstance(s, str):
self.fail('not_a_str')
self.child.run_validation(s)
return value
def get_attribute(self, instance):
"""
Fetch tags from cache defined in TagMixin.
"""
return instance.get_tags()
def to_representation(self, value):
if not isinstance(value, TagList):
value = TagList(value)
return value
class BaseResourceSerializer(
core_serializers.RestrictedSerializerMixin,
PermissionFieldFilteringMixin,
core_serializers.AugmentedSerializerMixin,
TagSerializer,
serializers.HyperlinkedModelSerializer,
):
state = serializers.ReadOnlyField(source='get_state_display')
project = serializers.HyperlinkedRelatedField(
queryset=models.Project.objects.all(),
view_name='project-detail',
lookup_field='uuid',
)
project_name = serializers.ReadOnlyField(source='project.name')
project_uuid = serializers.ReadOnlyField(source='project.uuid')
service_name = serializers.ReadOnlyField(source='service_settings.name')
service_settings = serializers.HyperlinkedRelatedField(
queryset=models.ServiceSettings.objects.all(),
view_name='servicesettings-detail',
lookup_field='uuid',
)
service_settings_uuid = serializers.ReadOnlyField(source='service_settings.uuid')
service_settings_state = serializers.ReadOnlyField(
source='service_settings.human_readable_state'
)
service_settings_error_message = serializers.ReadOnlyField(
source='service_settings.error_message'
)
customer = serializers.HyperlinkedRelatedField(
source='project.customer',
view_name='customer-detail',
read_only=True,
lookup_field='uuid',
)
customer_name = serializers.ReadOnlyField(source='project.customer.name')
customer_abbreviation = serializers.ReadOnlyField(
source='project.customer.abbreviation'
)
customer_native_name = serializers.ReadOnlyField(
source='project.customer.native_name'
)
created = serializers.DateTimeField(read_only=True)
resource_type = serializers.SerializerMethodField()
tags = TagListSerializerField(required=False)
access_url = serializers.SerializerMethodField()
class Meta:
model = NotImplemented
fields = (
'url',
'uuid',
'name',
'description',
'service_name',
'service_settings',
'service_settings_uuid',
'service_settings_state',
'service_settings_error_message',
'project',
'project_name',
'project_uuid',
'customer',
'customer_name',
'customer_native_name',
'customer_abbreviation',
'tags',
'error_message',
'error_traceback',
'resource_type',
'state',
'created',
'modified',
'backend_id',
'access_url',
)
protected_fields = (
'project',
'service_settings',
)
read_only_fields = ('error_message', 'error_traceback', 'backend_id')
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
}
def get_filtered_field_names(self):
return ('project', 'service_settings')
def get_resource_type(self, obj):
return get_resource_type(obj)
def get_resource_fields(self):
return [f.name for f in self.Meta.model._meta.get_fields()]
# an optional generic URL for accessing a resource
def get_access_url(self, obj):
return obj.get_access_url()
def get_fields(self):
fields = super(BaseResourceSerializer, self).get_fields()
# skip validation on object update
if not self.instance:
service_type = get_service_type(self.Meta.model)
if (
'service_settings' in fields
and not fields['service_settings'].read_only
):
queryset = fields['service_settings'].queryset.filter(type=service_type)
fields['service_settings'].queryset = queryset
return fields
@transaction.atomic
def create(self, validated_data):
data = validated_data.copy()
fields = self.get_resource_fields()
# Remove `virtual` properties which ain't actually belong to the model
data = {key: value for key, value in data.items() if key in fields}
resource = super(BaseResourceSerializer, self).create(data)
resource.increase_backend_quotas_usage()
return resource
@classmethod
def get_subclasses(cls):
for subclass in cls.__subclasses__():
yield from subclass.get_subclasses()
if subclass.Meta.model != NotImplemented:
yield subclass
class BaseResourceActionSerializer(BaseResourceSerializer):
project = serializers.HyperlinkedRelatedField(
view_name='project-detail', lookup_field='uuid', read_only=True,
)
service_settings = serializers.HyperlinkedRelatedField(
view_name='servicesettings-detail', lookup_field='uuid', read_only=True,
)
class Meta(BaseResourceSerializer.Meta):
pass
class SshPublicKeySerializerMixin(serializers.HyperlinkedModelSerializer):
ssh_public_key = serializers.HyperlinkedRelatedField(
view_name='sshpublickey-detail',
lookup_field='uuid',
queryset=core_models.SshPublicKey.objects.all(),
required=False,
write_only=True,
)
def get_fields(self):
fields = super(SshPublicKeySerializerMixin, self).get_fields()
if 'request' in self.context:
user = self.context['request'].user
ssh_public_key = fields.get('ssh_public_key')
if ssh_public_key:
if not user.is_staff:
visible_users = list(filter_visible_users(User.objects.all(), user))
subquery = Q(user__in=visible_users) | Q(is_shared=True)
ssh_public_key.queryset = ssh_public_key.queryset.filter(subquery)
return fields
class VirtualMachineSerializer(SshPublicKeySerializerMixin, BaseResourceSerializer):
external_ips = serializers.ListField(
child=serializers.IPAddressField(protocol='ipv4'), read_only=True,
)
internal_ips = serializers.ListField(
child=serializers.IPAddressField(protocol='ipv4'), read_only=True,
)
class Meta(BaseResourceSerializer.Meta):
fields = BaseResourceSerializer.Meta.fields + (
'start_time',
'cores',
'ram',
'disk',
'min_ram',
'min_disk',
'ssh_public_key',
'user_data',
'external_ips',
'internal_ips',
'latitude',
'longitude',
'key_name',
'key_fingerprint',
'image_name',
)
read_only_fields = BaseResourceSerializer.Meta.read_only_fields + (
'start_time',
'cores',
'ram',
'disk',
'min_ram',
'min_disk',
'external_ips',
'internal_ips',
'latitude',
'longitude',
'key_name',
'key_fingerprint',
'image_name',
)
protected_fields = BaseResourceSerializer.Meta.protected_fields + (
'user_data',
'ssh_public_key',
)
def create(self, validated_data):
if 'image' in validated_data:
validated_data['image_name'] = validated_data['image'].name
return super(VirtualMachineSerializer, self).create(validated_data)
class BasePropertySerializer(
core_serializers.AugmentedSerializerMixin, serializers.HyperlinkedModelSerializer,
):
class Meta:
model = NotImplemented
class DivisionSerializer(serializers.HyperlinkedModelSerializer):
type = serializers.ReadOnlyField(source='type.name')
parent_uuid = serializers.ReadOnlyField(source='parent.uuid')
parent_name = serializers.ReadOnlyField(source='parent.type.name')
class Meta:
model = models.Division
fields = ('uuid', 'url', 'name', 'type', 'parent_uuid', 'parent_name', 'parent')
extra_kwargs = {
'url': {'lookup_field': 'uuid'},
'parent': {'lookup_field': 'uuid'},
}
class DivisionTypesSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = models.DivisionType
fields = (
'uuid',
'url',
'name',
)
extra_kwargs = {
'url': {'lookup_field': 'uuid', 'view_name': 'division-type-detail'},
}
| opennode/waldur-mastermind | src/waldur_core/structure/serializers.py | Python | mit | 47,633 |
import click
from alembic import command as alembic_command
from alembic.util import CommandError
from boiler.cli.colors import *
from boiler.feature.orm import db
from boiler import bootstrap
def get_config():
"""
Prepare and return alembic config
These configurations used to live in alembic config initialiser, but that
just tight coupling. Ideally we should move that to userspace and find a
way to pass these into alembic commands.
@todo: think about it
"""
from boiler.migrations.config import MigrationsConfig
# used for errors
map = dict(
path='MIGRATIONS_PATH',
db_url='SQLALCHEMY_DATABASE_URI',
metadata='SQLAlchemy metadata'
)
app = bootstrap.get_app()
params = dict()
params['path'] = app.config.get(map['path'], 'migrations')
params['db_url'] = app.config.get(map['db_url'])
params['metadata'] = db.metadata
for param, value in params.items():
if not value:
msg = 'Configuration error: [{}] is undefined'
raise Exception(msg.format(map[param]))
config = MigrationsConfig(**params)
return config
# -----------------------------------------------------------------------------
# Group setup
# -----------------------------------------------------------------------------
@click.group(help=yellow('Database management commands'))
def cli():
pass
# -----------------------------------------------------------------------------
# Commands
# -----------------------------------------------------------------------------
@cli.command(name='init')
def init():
""" Initialize new migrations directory """
try:
config = get_config()
alembic_command.init(config, config.dir, 'project')
except CommandError as e:
click.echo(red(str(e)))
@cli.command(name='revision')
@click.option('--revision', type=str, default=None, help='Specify a hardcoded revision id instead of generating one')
@click.option('--path', type=str, default=None, help='Specify a hardcoded revision id instead of generating one')
@click.option('--branch-label', type=str, default=None, help='Specify a branch label to apply to the new revision')
@click.option('--splice', type=bool, is_flag=True, default=False, help='Allow a non-head revision as the "head" to splice onto')
@click.option('--head', type=str, default=None, help='Specify head revision or <branch>@head to base new revision on')
@click.option('--sql', type=bool, is_flag=True, default=False, help='Do not execute SQL - dump to standard output instead')
@click.option('--autogenerate', type=bool, is_flag=True, default=False, help='Populate revision with autoganerated diff')
@click.option('--message', '-m', type=str, default=None, help='Migration title')
def revision(revision, path, branch_label, splice, head, sql, autogenerate, message):
""" Create new revision file """
alembic_command.revision(
config=get_config(),
rev_id=revision,
version_path=path,
branch_label=branch_label,
splice=splice,
head=head,
sql=sql,
autogenerate=autogenerate,
message=message
)
@cli.command('generate')
@click.option('--revision', type=str, default=None, help='Specify a hardcoded revision id instead of generating one')
@click.option('--path', type=str, default=None, help='Specify a hardcoded revision id instead of generating one')
@click.option('--branch-label', type=str, default=None, help='Specify a branch label to apply to the new revision')
@click.option('--splice', type=bool, is_flag=True, default=False, help='Allow a non-head revision as the "head" to splice onto')
@click.option('--head', type=str, default=None, help='Specify head revision or <branch>@head to base new revision on')
@click.option('--sql', type=bool, is_flag=True, default=False, help='Do not execute SQL - dump to standard output instead')
@click.option('--message', '-m', type=str, default=None, help='Migration title')
def generate(revision, path, branch_label, splice, head, sql, message):
""" Autogenerate new revision file """
alembic_command.revision(
config=get_config(),
rev_id=revision,
version_path=path,
branch_label=branch_label,
splice=splice,
head=head,
sql=sql,
autogenerate=True,
message=message
)
@cli.command(name='merge')
@click.option('--revision', type=str, default=None, help='Specify a hardcoded revision id instead of generating one')
@click.option('--branch-label', type=str, default=None, help='Specify a branch label to apply to the new revision')
@click.option('--message', '-m', type=str, default=None, help='Migration title')
@click.option('--list-revisions', type=str, default=None, help='One or more revisions, or "heads" for all heads')
def merge(revision, branch_label, message, list_revisions=''):
""" Merge two revision together, create new revision file """
alembic_command.merge(
config=get_config(),
revisions=list_revisions,
message=message,
branch_label=branch_label,
rev_id=revision
)
@cli.command(name='up')
@click.option('--tag', type=str, default=None, help='Arbitrary tag name (used by custom env.py)')
@click.option('--sql', type=bool, is_flag=True, default=False, help='Do not execute SQL - dump to standard output instead')
@click.option('--revision', type=str, default='head', help='Revision id')
def up(tag, sql, revision):
""" Upgrade to revision """
alembic_command.upgrade(
config=get_config(),
revision=revision,
sql=sql,
tag=tag
)
@cli.command(name='down')
@click.option('--tag', type=str, default=None, help='Arbitrary tag name (used by custom env.py)')
@click.option('--sql', type=bool, is_flag=True, default=False, help='Do not execute SQL - dump to standard output instead')
@click.option('--revision', type=str, default='-1', help='Revision id')
def down(tag, sql, revision):
""" Downgrade to revision """
alembic_command.downgrade(
config=get_config(),
revision=revision,
sql=sql,
tag=tag
)
@cli.command(name='show')
@click.option('--revision', type=str, default='head', help='Revision id')
def show(revision):
""" Show the revisions """
alembic_command.show(
config=get_config(),
rev=revision
)
@cli.command(name='history')
@click.option('--verbose', '-v', type=bool, is_flag=True, default=False, help='Use more verbose output')
@click.option('--range', '-r', type=str, default=None, help='Specify a revision range; format is [start]:[end]')
def history(verbose, range):
""" List revision changesets chronologically """
alembic_command.history(
config=get_config(),
rev_range=range,
verbose=verbose
)
@cli.command(name='heads')
@click.option('--resolve', '-r', type=bool, is_flag=True, default=False, help='Treat dependency versions as down revisions')
@click.option('--verbose', '-v', type=bool, is_flag=True, default=False, help='Use more verbose output')
def heads(resolve, verbose):
""" Show available heads """
alembic_command.heads(
config=get_config(),
verbose=verbose,
resolve_dependencies=resolve
)
@cli.command(name='branches')
@click.option('--verbose', '-v', type=bool, is_flag=True, default=False, help='Use more verbose output')
def branches(verbose):
""" Show current branch points """
alembic_command.branches(
config=get_config(),
verbose=verbose
)
@cli.command(name='current')
@click.option('--verbose', '-v', type=bool, is_flag=True, default=False, help='Use more verbose output')
def current(verbose):
""" Display current revision """
alembic_command.current(
config=get_config(),
verbose=verbose
)
@cli.command()
@click.option('--tag', type=str, default=None, help='Arbitrary tag name (used by custom env.py)')
@click.option('--sql', type=bool, is_flag=True, default=False, help='Do not execute SQL - dump to standard output instead')
@click.option('--revision', type=str, default='head', help='Revision id')
def stamp(revision, sql, tag):
""" Stamp db to given revision without migrating """
alembic_command.stamp(
config=get_config(),
revision=revision,
sql=sql,
tag=tag
)
| projectshift/shift-boiler | boiler/cli/db.py | Python | mit | 8,383 |
__author__ = 'JunSong<songjun54cm@gmail.com>'
from BasicEvaluator import BasicEvaluator
import numpy as np
class CategoricalEvaluator(BasicEvaluator):
def __init__(self):
super(CategoricalEvaluator, self).__init__()
def categorical_accuracy(self, gth, pred):
return np.mean(np.equal(np.argmax(gth, axis=-1),
np.argmax(pred, axis=-1))) | songjun54cm/ml_idiot | evaluator/CategoricalEvaluator.py | Python | gpl-3.0 | 391 |
# $Id$
#
# Copyright (C) 2004-2008 greg Landrum and Rational Discovery LLC
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
from __future__ import print_function
from rdkit import RDConfig
import unittest,sys,os
import io
from rdkit.six import PY3
from rdkit.six.moves import cPickle
from rdkit import Chem
from rdkit.Chem import ChemicalFeatures,rdDistGeom
import EmbedLib
import gzip
from rdkit import DistanceGeometry as DG
from rdkit import Geometry
import Pharmacophore
import numpy
def feq(n1,n2,tol=1e-5):
return abs(n1-n2)<=tol
class TestCase(unittest.TestCase):
def setUp(self):
self.dataDir = os.path.join(RDConfig.RDCodeDir,'Chem/Pharm3D/test_data')
self.fdefBlock = \
"""DefineFeature HAcceptor1 [N,O;H0]
Family HBondAcceptor
Weights 1.0
EndFeature
DefineFeature HDonor1 [N,O;!H0]
Family HBondDonor
Weights 1.0
EndFeature
DefineFeature Aromatic1 c1ccccc1
Family Aromatic
Weights 1.,1.,1.,1.,1.,1.
EndFeature\n"""
self.featFactory = ChemicalFeatures.BuildFeatureFactoryFromString(self.fdefBlock)
self.feats = [ChemicalFeatures.FreeChemicalFeature('HBondAcceptor', 'HAcceptor1',
Geometry.Point3D(0.0, 0.0, 0.0)),
ChemicalFeatures.FreeChemicalFeature('HBondDonor', 'HDonor1',
Geometry.Point3D(2.65, 0.0, 0.0)),
ChemicalFeatures.FreeChemicalFeature('Aromatic', 'Aromatic1',
Geometry.Point3D(5.12, 0.908, 0.0)),
]
self.pcophore=Pharmacophore.Pharmacophore(self.feats)
self.pcophore.setLowerBound(0,1, 2.0)
self.pcophore.setUpperBound(0,1, 3.3)
self.pcophore.setLowerBound(0,2, 5.0)
self.pcophore.setUpperBound(0,2, 5.4)
self.pcophore.setLowerBound(1,2, 2.6)
self.pcophore.setUpperBound(1,2, 3.0)
def _matchMol(self,tpl,pcophore,featFactory,downSample):
name,molPkl,boundsMat = tpl
mol = Chem.Mol(molPkl)
matched,matches = EmbedLib.MatchPharmacophoreToMol(mol,featFactory,pcophore)
if matched:
r = EmbedLib.MatchPharmacophore(matches,boundsMat,pcophore,
useDownsampling=downSample)
if r[0]:
return 0
else:
return 1
else:
return 0
def test1SearchFullMat(self):
inF = gzip.open(os.path.join(self.dataDir,'cdk2-syn-clip100.pkl.gz'),'rb')
#outF = gzip.open(os.path.join(self.dataDir,'cdk2-syn-clip100.pkl.new.gz'),'wb+')
nDone = 0
nHits = 0
while 1:
try:
tpl = cPickle.load(inF, encoding='latin1')
if PY3:
tpl = tpl[0], tpl[1].encode('latin1'), tpl[2]
#tpl=tpl[0],tpl[1],numpy.array(tpl[2])
#cPickle.dump(tpl,outF)
except Exception:
break
if self._matchMol(tpl,self.pcophore,self.featFactory,0):
nHits+=1
nDone += 1
self.assertEqual(nDone,100)
#print 'nHits:',nHits
self.assertEqual(nHits,47)
def test2SearchDownsample(self):
inF = gzip.open(os.path.join(self.dataDir,'cdk2-syn-clip100.pkl.gz'),'rb')
nDone = 0
nHits = 0
hits = []
while 1:
try:
tpl = cPickle.load(inF, encoding='latin1')
if PY3:
tpl = tpl[0], tpl[1].encode('latin1'), tpl[2]
except Exception:
break
if self._matchMol(tpl,self.pcophore, self.featFactory,1):
nHits+=1
nDone += 1
self.assertEqual(nDone,100)
#print 'nHits:',nHits
self.assertEqual(nHits,47)
def test3Embed(self):
testResults={
'mol_197':(218.80,35.75,110.33,11.58,109.66,11.09,90.35,2.95,0.00),
'mol_223':(259.19,6.27,134.13,1.12,134.06,1.12,85.74,0.61,0.00),
'mol_269':(204.51,7.89,103.89,1.20,102.66,1.20,88.07,1.21,6.00),
}
inF = gzip.open(os.path.join(self.dataDir,'cdk2-syn-clip100.pkl.gz'),'rb')
nDone = 0
nHits = 0
while 1:
try:
name,molPkl,boundsMat = cPickle.load(inF, encoding='latin1')
if PY3:
molPkl = bytes(molPkl, encoding='latin1')
except Exception:
break
nDone += 1
mol = Chem.Mol(molPkl)
nboundsMat = rdDistGeom.GetMoleculeBoundsMatrix(mol)
DG.DoTriangleSmoothing(nboundsMat)
matched,matches = EmbedLib.MatchPharmacophoreToMol(mol,self.featFactory,
self.pcophore)
if matched:
failed,bm,match,stats = EmbedLib.MatchPharmacophore(matches,nboundsMat,
self.pcophore,
useDownsampling=1)
if not failed:
nHits += 1
if name in testResults:
stats = EmbedLib.EmbedOne(mol,name,match,self.pcophore,count=10,
silent=1,randomSeed=23)
tgt = testResults[name]
self.assertEqual(len(tgt),len(stats))
print(name)
print(','.join(['%.2f'%x for x in stats]))
# we'll use different tolerances for the different values:
self.assertTrue(feq(tgt[0],stats[0],5.0),(tgt[0],stats[0]))
for i in range(2,len(tgt)):
self.assertTrue(feq(tgt[i],stats[i],5.0),(tgt[i],stats[i]))
self.assertEqual(nDone,100)
#print 'nHits:',nHits
self.assertEqual(nHits,50)
def test4Search(self):
featFactory = ChemicalFeatures.BuildFeatureFactory(os.path.join(self.dataDir,
'BaseFeatures.fdef'))
activeFeats = [ChemicalFeatures.FreeChemicalFeature('Acceptor',
Geometry.Point3D(0.0, 0.0, 0.0)),
ChemicalFeatures.FreeChemicalFeature('Donor',
Geometry.Point3D(0.0, 0.0, 0.0)),
ChemicalFeatures.FreeChemicalFeature('Aromatic',
Geometry.Point3D(0.0, 0.0, 0.0))]
pcophore= Pharmacophore.Pharmacophore(activeFeats)
pcophore.setLowerBound(0,1,2.251)
pcophore.setUpperBound(0,1,2.451)
pcophore.setUpperBound2D(0,1,3)
pcophore.setLowerBound(0,2,4.970)
pcophore.setUpperBound(0,2,5.170)
pcophore.setUpperBound2D(0,2,6)
pcophore.setLowerBound(1,2,2.681)
pcophore.setUpperBound(1,2,2.881)
pcophore.setUpperBound2D(1,2,6)
inF = gzip.open(os.path.join(self.dataDir,'cdk2-syn-clip100.pkl.gz'),'rb')
nDone = 0
nMatches = 0
nHits = 0
while 1:
try:
name,molPkl,boundsMat = cPickle.load(inF, encoding='latin1')
if PY3:
molPkl = bytes(molPkl, encoding='latin1')
except Exception:
break
nDone += 1
mol = Chem.Mol(molPkl)
boundsMat = rdDistGeom.GetMoleculeBoundsMatrix(mol)
DG.DoTriangleSmoothing(boundsMat)
canMatch,matches = EmbedLib.MatchPharmacophoreToMol(mol,featFactory,
pcophore)
if canMatch:
nMatches+=1
r = EmbedLib.MatchPharmacophore(matches,boundsMat,pcophore,
useDownsampling=True,use2DLimits=True,
mol=mol)
failed,bm,match,details = r
if not failed:
nHits+=1
self.assertEqual(nDone,100)
self.assertEqual(nMatches,93)
#print 'nhits:',nHits
self.assertEqual(nHits,67)
def testIssue268(self):
from rdkit import RDLogger
#RDLogger.EnableLog('rdApp.debug')
featFactory = ChemicalFeatures.BuildFeatureFactory(os.path.join(self.dataDir,
'Issue268.fdef'))
m1 = Chem.MolFromMolFile(os.path.join(self.dataDir,
'Issue268_Mol1.mol'))
m2 = Chem.MolFromMolFile(os.path.join(self.dataDir,
'Issue268_Mol2.mol'))
with open(os.path.join(self.dataDir,
'Issue268_Pcop.pkl'),'r') as inTF:
buf = inTF.read().replace('\r\n', '\n').encode('utf-8')
inTF.close()
with io.BytesIO(buf) as inF:
pcop = cPickle.load(inF, encoding='latin1')
#pcop._boundsMat=numpy.array(pcop._boundsMat)
#pcop._boundsMat2D=numpy.array(pcop._boundsMat2D)
#cPickle.dump(pcop,file(os.path.join(self.dataDir,
# 'Issue268_Pcop.new.pkl'),'wb+'))
match,mList1 = EmbedLib.MatchFeatsToMol(m1,featFactory,pcop.getFeatures())
match,mList2 = EmbedLib.MatchFeatsToMol(m2,featFactory,pcop.getFeatures())
b1 = rdDistGeom.GetMoleculeBoundsMatrix(m1)
b2 = rdDistGeom.GetMoleculeBoundsMatrix(m2)
self.assertEqual(len(EmbedLib.MatchPharmacophore(mList1,b1,pcop)[2]),4)
self.assertEqual(len(EmbedLib.MatchPharmacophore(mList2,b2,pcop)[2]),4)
self.assertEqual(len(EmbedLib.MatchPharmacophore(mList1,b1,pcop,
mol=m1,use2DLimits=True)[2]),4)
self.assertEqual(len(EmbedLib.MatchPharmacophore(mList2,b2,pcop,
mol=m2,use2DLimits=True)[2]),4)
from rdkit import DistanceGeometry as DG
self.assertTrue(DG.DoTriangleSmoothing(b1))
self.assertTrue(DG.DoTriangleSmoothing(b2))
self.assertEqual(len(EmbedLib.MatchPharmacophore(mList1,b1,pcop)[2]),4)
self.assertEqual(len(EmbedLib.MatchPharmacophore(mList2,b2,pcop)[2]),4)
self.assertEqual(len(EmbedLib.MatchPharmacophore(mList1,b1,pcop,
mol=m1,use2DLimits=True)[2]),4)
self.assertEqual(len(EmbedLib.MatchPharmacophore(mList2,b2,pcop,
mol=m2,use2DLimits=True)[2]),4)
if __name__ == '__main__':
unittest.main()
| adalke/rdkit | rdkit/Chem/Pharm3D/UnitTestEmbed.py | Python | bsd-3-clause | 10,229 |
from __future__ import absolute_import
input_name = '../examples/linear_elasticity/linear_elastic_up.py'
output_name = 'test_linear_elastic_up.vtk'
from tests_basic import TestInput
class Test( TestInput ):
pass
| rc/sfepy | tests/test_input_linear_elastic_up.py | Python | bsd-3-clause | 217 |
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_chartarea01.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_2_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test XlsxWriter chartarea properties."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [82933248, 82952960]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({'values': '=Sheet1!$A$1:$A$5'})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
chart.set_chartarea({
'border': {'none': 1},
'fill': {'color': 'red'}
})
chart.set_plotarea({
'border': {'color': 'yellow', 'width': 1, 'dash_type': 'dash'},
'fill': {'color': '#92D050'}
})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| jvrsantacruz/XlsxWriter | xlsxwriter/test/comparison/test_chart_chartarea02.py | Python | bsd-2-clause | 1,825 |
import os
sub import_value(var_name):
return os.environ[var_name]
| Pcolar/OCR-Parser | _scripts/import_shell_values.py | Python | mit | 70 |
# Requires sci-kit learn and matplotlib
import matplotlib.pyplot as pyplot
from sklearn import datasets
from sklearn import svm
digits = datasets.load_digits()
#Gamma breaks when greater than 0.01. Maintains high accuracy at 0.001
clf = svm.SVC(gamma=0.001, C=100)
x,y = digits.data[:-1], digits.target[:-1]
clf.fit(x,y)
#Will return a prediction and display the last digit in dataset
print('Prediction:',clf.predict(digits.data[-1]))
pyplot.imshow(digits.images[-1], cmap=pyplot.cm.gray_r, interpolation="nearest")
pyplot.show()
| FrizzBolt/machine-learning-sandbox | num-prediction.py | Python | mit | 537 |
# -*- coding: utf-8 -*-
#
# SelfTest/Hash/common.py: Common code for Crypto.SelfTest.Hash
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-testing for PyCrypto hash modules"""
__revision__ = "$Id$"
import sys
import unittest
from binascii import a2b_hex, b2a_hex
from Crypto.Util.py3compat import *
# For compatibility with Python 2.1 and Python 2.2
if sys.hexversion < 0x02030000:
# Python 2.1 doesn't have a dict() function
# Python 2.2 dict() function raises TypeError if you do dict(MD5='blah')
def dict(**kwargs):
return kwargs.copy()
else:
dict = dict
class _NoDefault: pass # sentinel object
def _extract(d, k, default=_NoDefault):
"""Get an item from a dictionary, and remove it from the dictionary."""
try:
retval = d[k]
except KeyError:
if default is _NoDefault:
raise
return default
del d[k]
return retval
# Generic cipher test case
class CipherSelfTest(unittest.TestCase):
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
# Extract the parameters
params = params.copy()
self.description = _extract(params, 'description')
self.key = b(_extract(params, 'key'))
self.plaintext = b(_extract(params, 'plaintext'))
self.ciphertext = b(_extract(params, 'ciphertext'))
self.module_name = _extract(params, 'module_name', None)
mode = _extract(params, 'mode', None)
self.mode_name = str(mode)
if mode is not None:
# Block cipher
self.mode = getattr(self.module, "MODE_" + mode)
self.iv = _extract(params, 'iv', None)
if self.iv is not None: self.iv = b(self.iv)
# Only relevant for OPENPGP mode
self.encrypted_iv = _extract(params, 'encrypted_iv', None)
if self.encrypted_iv is not None:
self.encrypted_iv = b(self.encrypted_iv)
else:
# Stream cipher
self.mode = None
self.iv = None
self.extra_params = params
def shortDescription(self):
return self.description
def _new(self, do_decryption=0):
params = self.extra_params.copy()
# Handle CTR mode parameters. By default, we use Counter.new(self.module.block_size)
if hasattr(self.module, "MODE_CTR") and self.mode == self.module.MODE_CTR:
from Crypto.Util import Counter
ctr_class = _extract(params, 'ctr_class', Counter.new)
ctr_params = _extract(params, 'ctr_params', {}).copy()
if ctr_params.has_key('prefix'): ctr_params['prefix'] = a2b_hex(b(ctr_params['prefix']))
if ctr_params.has_key('suffix'): ctr_params['suffix'] = a2b_hex(b(ctr_params['suffix']))
if not ctr_params.has_key('nbits'):
ctr_params['nbits'] = 8*(self.module.block_size - len(ctr_params.get('prefix', '')) - len(ctr_params.get('suffix', '')))
params['counter'] = ctr_class(**ctr_params)
if self.mode is None:
# Stream cipher
return self.module.new(a2b_hex(self.key), **params)
elif self.iv is None:
# Block cipher without iv
return self.module.new(a2b_hex(self.key), self.mode, **params)
else:
# Block cipher with iv
if do_decryption and self.mode == self.module.MODE_OPENPGP:
# In PGP mode, the IV to feed for decryption is the *encrypted* one
return self.module.new(a2b_hex(self.key), self.mode, a2b_hex(self.encrypted_iv), **params)
else:
return self.module.new(a2b_hex(self.key), self.mode, a2b_hex(self.iv), **params)
def runTest(self):
plaintext = a2b_hex(self.plaintext)
ciphertext = a2b_hex(self.ciphertext)
ct1 = b2a_hex(self._new().encrypt(plaintext))
pt1 = b2a_hex(self._new(1).decrypt(ciphertext))
ct2 = b2a_hex(self._new().encrypt(plaintext))
pt2 = b2a_hex(self._new(1).decrypt(ciphertext))
if hasattr(self.module, "MODE_OPENPGP") and self.mode == self.module.MODE_OPENPGP:
# In PGP mode, data returned by the first encrypt()
# is prefixed with the encrypted IV.
# Here we check it and then remove it from the ciphertexts.
eilen = len(self.encrypted_iv)
self.assertEqual(self.encrypted_iv, ct1[:eilen])
self.assertEqual(self.encrypted_iv, ct2[:eilen])
ct1 = ct1[eilen:]
ct2 = ct2[eilen:]
self.assertEqual(self.ciphertext, ct1) # encrypt
self.assertEqual(self.ciphertext, ct2) # encrypt (second time)
self.assertEqual(self.plaintext, pt1) # decrypt
self.assertEqual(self.plaintext, pt2) # decrypt (second time)
class CipherStreamingSelfTest(CipherSelfTest):
def shortDescription(self):
desc = self.module_name
if self.mode is not None:
desc += " in %s mode" % (self.mode_name,)
return "%s should behave like a stream cipher" % (desc,)
def runTest(self):
plaintext = a2b_hex(self.plaintext)
ciphertext = a2b_hex(self.ciphertext)
# The cipher should work like a stream cipher
# Test counter mode encryption, 3 bytes at a time
ct3 = []
cipher = self._new()
for i in range(0, len(plaintext), 3):
ct3.append(cipher.encrypt(plaintext[i:i+3]))
ct3 = b2a_hex(b("").join(ct3))
self.assertEqual(self.ciphertext, ct3) # encryption (3 bytes at a time)
# Test counter mode decryption, 3 bytes at a time
pt3 = []
cipher = self._new()
for i in range(0, len(ciphertext), 3):
pt3.append(cipher.encrypt(ciphertext[i:i+3]))
# PY3K: This is meant to be text, do not change to bytes (data)
pt3 = b2a_hex(b("").join(pt3))
self.assertEqual(self.plaintext, pt3) # decryption (3 bytes at a time)
class CTRSegfaultTest(unittest.TestCase):
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
self.key = b(params['key'])
self.module_name = params.get('module_name', None)
def shortDescription(self):
return """Regression test: %s.new(key, %s.MODE_CTR) should raise TypeError, not segfault""" % (self.module_name, self.module_name)
def runTest(self):
self.assertRaises(TypeError, self.module.new, a2b_hex(self.key), self.module.MODE_CTR)
class CTRWraparoundTest(unittest.TestCase):
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
self.key = b(params['key'])
self.module_name = params.get('module_name', None)
def shortDescription(self):
return """Regression test: %s with MODE_CTR should raise OverflowError on wraparound when shortcut used""" % (self.module_name,)
def runTest(self):
from Crypto.Util import Counter
for disable_shortcut in (0, 1): # (False, True) Test CTR-mode shortcut and PyObject_CallObject code paths
for little_endian in (0, 1): # (False, True) Test both endiannesses
ctr = Counter.new(8*self.module.block_size, initial_value=2L**(8*self.module.block_size)-1, little_endian=little_endian, disable_shortcut=disable_shortcut)
cipher = self.module.new(a2b_hex(self.key), self.module.MODE_CTR, counter=ctr)
block = b("\x00") * self.module.block_size
cipher.encrypt(block)
self.assertRaises(OverflowError, cipher.encrypt, block)
class CFBSegmentSizeTest(unittest.TestCase):
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
self.key = b(params['key'])
self.description = params['description']
def shortDescription(self):
return self.description
def runTest(self):
"""Regression test: m.new(key, m.MODE_CFB, segment_size=N) should require segment_size to be a multiple of 8 bits"""
for i in range(1, 8):
self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), self.module.MODE_CFB, segment_size=i)
self.module.new(a2b_hex(self.key), self.module.MODE_CFB, "\0"*self.module.block_size, segment_size=8) # should succeed
class RoundtripTest(unittest.TestCase):
def __init__(self, module, params):
from Crypto import Random
unittest.TestCase.__init__(self)
self.module = module
self.iv = Random.get_random_bytes(module.block_size)
self.key = b(params['key'])
self.plaintext = 100 * b(params['plaintext'])
self.module_name = params.get('module_name', None)
def shortDescription(self):
return """%s .decrypt() output of .encrypt() should not be garbled""" % (self.module_name,)
def runTest(self):
for mode in (self.module.MODE_ECB, self.module.MODE_CBC, self.module.MODE_CFB, self.module.MODE_OFB, self.module.MODE_OPENPGP):
encryption_cipher = self.module.new(a2b_hex(self.key), mode, self.iv)
ciphertext = encryption_cipher.encrypt(self.plaintext)
if mode != self.module.MODE_OPENPGP:
decryption_cipher = self.module.new(a2b_hex(self.key), mode, self.iv)
else:
eiv = ciphertext[:self.module.block_size+2]
ciphertext = ciphertext[self.module.block_size+2:]
decryption_cipher = self.module.new(a2b_hex(self.key), mode, eiv)
decrypted_plaintext = decryption_cipher.decrypt(ciphertext)
self.assertEqual(self.plaintext, decrypted_plaintext)
class PGPTest(unittest.TestCase):
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
self.key = b(params['key'])
def shortDescription(self):
return "MODE_PGP was implemented incorrectly and insecurely. It's completely banished now."
def runTest(self):
self.assertRaises(ValueError, self.module.new, a2b_hex(self.key),
self.module.MODE_PGP)
class IVLengthTest(unittest.TestCase):
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
self.key = b(params['key'])
def shortDescription(self):
return "Check that all modes except MODE_ECB and MODE_CTR require an IV of the proper length"
def runTest(self):
self.assertRaises(ValueError, self.module.new, a2b_hex(self.key),
self.module.MODE_CBC, "")
self.assertRaises(ValueError, self.module.new, a2b_hex(self.key),
self.module.MODE_CFB, "")
self.assertRaises(ValueError, self.module.new, a2b_hex(self.key),
self.module.MODE_OFB, "")
self.assertRaises(ValueError, self.module.new, a2b_hex(self.key),
self.module.MODE_OPENPGP, "")
self.module.new(a2b_hex(self.key), self.module.MODE_ECB, "")
self.module.new(a2b_hex(self.key), self.module.MODE_CTR, "", counter=self._dummy_counter)
def _dummy_counter(self):
return "\0" * self.module.block_size
def make_block_tests(module, module_name, test_data):
tests = []
extra_tests_added = 0
for i in range(len(test_data)):
row = test_data[i]
# Build the "params" dictionary
params = {'mode': 'ECB'}
if len(row) == 3:
(params['plaintext'], params['ciphertext'], params['key']) = row
elif len(row) == 4:
(params['plaintext'], params['ciphertext'], params['key'], params['description']) = row
elif len(row) == 5:
(params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row
params.update(extra_params)
else:
raise AssertionError("Unsupported tuple size %d" % (len(row),))
# Build the display-name for the test
p2 = params.copy()
p_key = _extract(p2, 'key')
p_plaintext = _extract(p2, 'plaintext')
p_ciphertext = _extract(p2, 'ciphertext')
p_description = _extract(p2, 'description', None)
p_mode = p2.get('mode', 'ECB')
if p_mode == 'ECB':
_extract(p2, 'mode', 'ECB')
if p_description is not None:
description = p_description
elif p_mode == 'ECB' and not p2:
description = "p=%s, k=%s" % (p_plaintext, p_key)
else:
description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2)
name = "%s #%d: %s" % (module_name, i+1, description)
params['description'] = name
params['module_name'] = module_name
# Add extra test(s) to the test suite before the current test
if not extra_tests_added:
tests += [
CTRSegfaultTest(module, params),
CTRWraparoundTest(module, params),
CFBSegmentSizeTest(module, params),
RoundtripTest(module, params),
PGPTest(module, params),
IVLengthTest(module, params),
]
extra_tests_added = 1
# Add the current test to the test suite
tests.append(CipherSelfTest(module, params))
# When using CTR mode, test that the interface behaves like a stream cipher
if p_mode == 'CTR':
tests.append(CipherStreamingSelfTest(module, params))
# When using CTR mode, test the non-shortcut code path.
if p_mode == 'CTR' and not params.has_key('ctr_class'):
params2 = params.copy()
params2['description'] += " (shortcut disabled)"
ctr_params2 = params.get('ctr_params', {}).copy()
params2['ctr_params'] = ctr_params2
if not params2['ctr_params'].has_key('disable_shortcut'):
params2['ctr_params']['disable_shortcut'] = 1
tests.append(CipherSelfTest(module, params2))
return tests
def make_stream_tests(module, module_name, test_data):
tests = []
for i in range(len(test_data)):
row = test_data[i]
# Build the "params" dictionary
params = {}
if len(row) == 3:
(params['plaintext'], params['ciphertext'], params['key']) = row
elif len(row) == 4:
(params['plaintext'], params['ciphertext'], params['key'], params['description']) = row
elif len(row) == 5:
(params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row
params.update(extra_params)
else:
raise AssertionError("Unsupported tuple size %d" % (len(row),))
# Build the display-name for the test
p2 = params.copy()
p_key = _extract(p2, 'key')
p_plaintext = _extract(p2, 'plaintext')
p_ciphertext = _extract(p2, 'ciphertext')
p_description = _extract(p2, 'description', None)
if p_description is not None:
description = p_description
elif not p2:
description = "p=%s, k=%s" % (p_plaintext, p_key)
else:
description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2)
name = "%s #%d: %s" % (module_name, i+1, description)
params['description'] = name
params['module_name'] = module_name
# Add the test to the test suite
tests.append(CipherSelfTest(module, params))
tests.append(CipherStreamingSelfTest(module, params))
return tests
# vim:set ts=4 sw=4 sts=4 expandtab:
| ktan2020/legacy-automation | win/Lib/site-packages/Crypto/SelfTest/Cipher/common.py | Python | mit | 16,599 |
"""Russian-specific forms helpers."""
from __future__ import unicode_literals
import re
from django.forms.fields import RegexField, Select
from django.utils.translation import ugettext_lazy as _
from .ru_regions import RU_COUNTY_CHOICES, RU_REGIONS_CHOICES
phone_digits_re = re.compile(r'^(?:[78]-?)?(\d{3})[-\.]?(\d{3})[-\.]?(\d{4})$')
class RUCountySelect(Select):
"""A Select widget that uses a list of Russian Counties as its choices."""
def __init__(self, attrs=None):
super(RUCountySelect, self).__init__(attrs, choices=RU_COUNTY_CHOICES)
class RURegionSelect(Select):
"""A Select widget that uses a list of Russian Regions as its choices."""
def __init__(self, attrs=None):
super(RURegionSelect, self).__init__(attrs, choices=RU_REGIONS_CHOICES)
class RUPostalCodeField(RegexField):
"""
Russian Postal code field.
Format: XXXXXX, where X is any digit, and first digit is not zero.
"""
default_error_messages = {
'invalid': _('Enter a postal code in the format XXXXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(RUPostalCodeField, self).__init__(r'^\d{6}$',
max_length, min_length, *args, **kwargs)
class RUPassportNumberField(RegexField):
"""
Russian internal passport number format.
XXXX XXXXXX where X - any digit.
"""
default_error_messages = {
'invalid': _('Enter a passport number in the format XXXX XXXXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(RUPassportNumberField, self).__init__(r'^\d{4} \d{6}$',
max_length, min_length, *args, **kwargs)
class RUAlienPassportNumberField(RegexField):
"""
Russian alien's passport number format.
XX XXXXXXX where X - any digit.
"""
default_error_messages = {
'invalid': _('Enter a passport number in the format XX XXXXXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(RUAlienPassportNumberField, self).__init__(r'^\d{2} \d{7}$',
max_length, min_length, *args, **kwargs)
| thor/django-localflavor | localflavor/ru/forms.py | Python | bsd-3-clause | 2,285 |
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
pipe2py.twisted.collections
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides methods for creating asynchronous pipe2py pipes
"""
from pipe2py.modules.pipeforever import asyncPipeForever
from pipe2py.lib.collections import PyPipe
class AsyncPipe(PyPipe):
"""An asynchronous PyPipe object"""
def __init__(self, name=None, context=None, **kwargs):
super(AsyncPipe, self).__init__(name, context)
self.pipe_input = kwargs.pop('input', asyncPipeForever())
self.pipeline = getattr(self.module, 'asyncPipe%s' % self.name.title())
self.kwargs = kwargs
def pipe(self, name, **kwargs):
return AsyncPipe(name, self.context, input=self.output, **kwargs)
def loop(self, name, **kwargs):
embed = AsyncPipe(name, self.context).pipeline
return self.pipe('loop', embed=embed, **kwargs)
| klyap/pipe2py | pipe2py/twisted/collections.py | Python | gpl-2.0 | 901 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2012-2015 Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
HTTPStream is an HTTP client library for Python that is designed to allow
incremental receipt and handling of web content.
"""
try:
from setuptools import setup
from setuptools.extension import Extension
except ImportError:
from distutils.core import setup
from distutils.extension import Extension
from httpstream import __author__, __email__, __license__, __version__
setup(
name="httpstream",
version=__version__,
description=__doc__,
long_description=open("README.rst").read(),
author=__author__,
author_email=__email__,
url="http://nigelsmall.com/httpstream",
packages=[
"httpstream",
"httpstream.packages",
"httpstream.packages.urimagic",
],
install_requires=[
"jsonstream>=1.0.0",
],
license=__license__,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development",
],
zip_safe=False,
)
| nigelsmall/httpstream | setup.py | Python | apache-2.0 | 1,865 |
# System built-in modules
import time
from datetime import datetime
import sys
import os
from multiprocessing import Pool
# Project dependency modules
import pandas as pd
pd.set_option('mode.chained_assignment', None) # block warnings due to DataFrame value assignment
import lasagne
# Project modules
sys.path.append('../')
from sleep_control.traffic_emulator import TrafficEmulator
from sleep_control.traffic_server import TrafficServer
from sleep_control.controller import QController, DummyController, NController
from sleep_control.integration import Emulation
from sleep_control.env_models import SJTUModel
from rl.qtable import QAgent
from rl.qnn_theano import QAgentNN
from rl.mixin import PhiMixin, DynaMixin
sys_stdout = sys.stdout
log_file_name = 'message_2016-6-12_G5_BUF2_AR1_b35.log'
# Composite classes
class Phi_QAgentNN(PhiMixin, QAgentNN):
def __init__(self, **kwargs):
super(Phi_QAgentNN, self).__init__(**kwargs)
# Parameters
# |- Agent
# |- QAgent
actions = [(True, None), (False, 'serve_all')]
gamma, alpha = 0.5, 0.9
explore_strategy, epsilon = 'epsilon', 0.02 # exploration
# |- QAgentNN
# | - Phi
phi_length = 5
dim_state = (1, phi_length, 3+2)
range_state_slice = [(0, 10), (0, 10), (0, 10), (0, 1), (0, 1)]
range_state = [[range_state_slice]*phi_length]
# | - Other params
momentum, learning_rate = 0.9, 0.01 # SGD
num_buffer, memory_size = 2, 200
reward_scaling, reward_scaling_update = 1, 'adaptive'
batch_size, update_period, freeze_period, rs_period = 100, 4, 16, 32
# |- Env model
Rs, Rw, Rf, Co, Cw = 1.0, -1.0, -10.0, -5.0, 0.0
beta = 0.35
reward_params = (Rs, Rw, Rf, Co, Cw, beta)
# |- Env
# |- Time
start_time = pd.to_datetime('2014-11-05 09:20:00')
total_time = pd.Timedelta(days=7)
time_step = pd.Timedelta(seconds=2)
backoff_epochs = num_buffer*memory_size+phi_length
head_datetime = start_time - time_step*backoff_epochs
tail_datetime = head_datetime + total_time
TOTAL_EPOCHS = int(total_time/time_step)
# |- Reward
rewarding = {'serve': Rs, 'wait': Rw, 'fail': Rf}
# load from processed data
session_df =pd.read_csv(
filepath_or_buffer='../data/trace_dh3.dat',
parse_dates=['startTime_datetime', 'endTime_datetime']
)
te = TrafficEmulator(
session_df=session_df, time_step=time_step,
head_datetime=head_datetime, tail_datetime=tail_datetime,
rewarding=rewarding,
verbose=2)
ts = TrafficServer(cost=(Co, Cw), verbose=2)
agent = Phi_QAgentNN(
phi_length=phi_length,
dim_state=dim_state, range_state=range_state,
f_build_net = None,
batch_size=batch_size, learning_rate=learning_rate, momentum=momentum,
reward_scaling=reward_scaling, reward_scaling_update=reward_scaling_update, rs_period=rs_period,
update_period=update_period, freeze_period=freeze_period,
memory_size=memory_size, num_buffer=num_buffer,
# Below is QAgent params
actions=actions, alpha=alpha, gamma=gamma,
explore_strategy=explore_strategy, epsilon=epsilon,
verbose=2)
c = QController(agent=agent)
emu = Emulation(te=te, ts=ts, c=c, beta=beta)
# Heavyliftings
t = time.time()
sys.stdout = sys_stdout
log_path = './log/'
if os.path.isfile(log_path+log_file_name):
print "Log file {} already exist. Experiment cancelled.".format(log_file_name)
else:
log_file = open(log_path+log_file_name,"w")
print datetime.now().strftime('[%Y-%m-%d %H:%M:%S]'),
print '{}%'.format(int(100.0*emu.epoch/TOTAL_EPOCHS)),
print log_file_name
time.sleep(1)
sys.stdout = log_file
while emu.epoch is not None and emu.epoch<TOTAL_EPOCHS:
# log time
print "Epoch {},".format(emu.epoch),
left = emu.te.head_datetime + emu.te.epoch*emu.te.time_step
right = left + emu.te.time_step
print "{} - {}".format(left.strftime("%Y-%m-%d %H:%M:%S"), right.strftime("%Y-%m-%d %H:%M:%S"))
emu.step()
print
if emu.epoch%(0.05*TOTAL_EPOCHS)==0:
sys.stdout = sys_stdout
print datetime.now().strftime('[%Y-%m-%d %H:%M:%S]'),
print '{}%'.format(int(100.0*emu.epoch/TOTAL_EPOCHS)),
print log_file_name
time.sleep(1)
sys.stdout = log_file
sys.stdout = sys_stdout
log_file.close()
print
print log_file_name,
print '{:.3f} sec,'.format(time.time()-t),
print '{:.3f} min'.format((time.time()-t)/60)
| zaxliu/deepnap | experiments/kdd-exps/experiment_message_2016-6-12_G5_BUF2_AR1_b35_legacy.py | Python | bsd-3-clause | 4,374 |
# -*- coding: utf8 -*-
__version__ = "$Revision$ $Date$"
__author__ = "Guillaume Bour <guillaume@bour.cc>"
__license__ = """
Copyright (C) 2010-2011, Guillaume Bour <guillaume@bour.cc>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, version 3.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from twisted.web import server
from mother.callable import Callable, callback, LoopbackSelf as self
from mother import routing
from tentacles import Object
from tentacles.fields import *
from tentacles.queryset import filter, map, len
class Tag(Object, Callable):
__stor_name__ = 'strawberry__tag'
id = Integer(pk=True, autoincrement=True)
name = String(unique=True, allow_none=False)
description = String()
def GET(self, id, **kwargs):
tag = list(filter(lambda x: x.id == id, Tag))
if len(tag) == 0:
return (404, None)
tag = tag[0]
res = {}
for name, fld in tag.__fields__.iteritems():
if isinstance(fld, Reference):
continue
res[name] = getattr(tag, name)
return res
def PUT(self, content):
if 'name' not in content:
return (400, "*name* key is mandatory")
if 'id' in content and len(filter(lambda x: x.id == content['id'], Tag)) > 0:
return (400, "id already exists")
if len(filter(lambda x: x.link == content['name'], Tag)) > 0:
return (400, "name must be unique")
tag = Tag()
for key, value in content.iteritems():
if not key in tag.__fields__:
return(409, "unknown field '%s'" % key)
setattr(tag, key, value)
tag.save()
return tag.id
def DELETE(self, id):
"""
NOTE: associated tags, even if specially created for this link, are not deleted
"""
tags = list(filter(lambda x: x.id == id, Tag))
if len(tags) == 0:
return (404, "not found")
elif len(tags) > 1:
return (500, "return several tags for the same id")
tags[0].delete()
return (200, True)
@callback
def all(self):
return list(map(lambda x: x.id, Tag))
"""
bytag is not one of GET,POST,PUT,DELETE method, it does not take default class
ctype
"""
"""
#@callback(url='/{tag}', content_type='internal/python', modifiers={'text/html': self.html_bytag})
def bytag(self, tag, **kwargs):
#return "search tag by name= %s" % tagQ
errors = {}
if len(tag) == 0:
errors['tag'] = (10, 'field required')
return routing.HTTP_400(errors) # bad request
_tag = list(filter(lambda t: t.name == tag, Tag))
if len(_tag) == 0:
return routing.HTTP_404({'tag': (04, 'not found')})
return routing.HTTP_200(_tag[0])
def html_bytag(self, tag, __callback__, **kwargs):
ret = __callback__(tag)
if not isinstance(ret, routing.HTTP_200):
#TODO: how to reference root app module ?
#strawberry = sys.modules['strawberry']
#print strawberry.http401
#return routing.Redirect(strawberry.http401)
# We return an error page: HTTP code == 404, routed to strawberry.404
return routing.HTTP_404 #(content=Template('404.html', title='404'))
tag = ret.msg
print dir(tag), tag.__fields__
#TODO: tentacles workaround
links = list(tag.Link__tags)
tid = tag.id
related = list(filter(lambda t: not t.Link__tags.isdisjoint(links) and t.id != tid, Tag))
from mother.template import Template
return Template('tag.html',
title="Tag ☄ %s" % tag.name,
query=tag.name,
tagname=tag.name,
links=tag.Link__tags,
searchtags=[],
related=related,
)
"""
| gbour/Strawberry | strawberry/tag.py | Python | agpl-3.0 | 3,892 |
import random
import threading
import uuid
import socket
import pexpect
from pandaharvester.harvestercore import core_utils
# logger
baseLogger = core_utils.setup_logger('ssh_tunnel_pool')
# Pool of SSH tunnels
class SshTunnelPool(object):
# constructor
def __init__(self):
self.lock = threading.Lock()
self.pool = dict()
self.params = dict()
# make a dict key
def make_dict_key(self, host, port):
return '{0}:{1}'.format(host, port)
# make a tunnel server
def make_tunnel_server(self, remote_host, remote_port, remote_bind_port=None, num_tunnels=1,
ssh_username=None, ssh_password=None, private_key=None, pass_phrase=None,
jump_host=None, jump_port=None, login_timeout=60, reconnect=False,
with_lock=True):
dict_key = self.make_dict_key(remote_host, remote_port)
if with_lock:
self.lock.acquire()
# make dicts
if dict_key not in self.pool:
self.pool[dict_key] = []
# preserve parameters
if not reconnect:
self.params[dict_key] = {'remote_bind_port': remote_bind_port,
'num_tunnels': num_tunnels,
'ssh_username': ssh_username,
'ssh_password': ssh_password,
'private_key': private_key,
'pass_phrase': pass_phrase,
'jump_host': jump_host,
'jump_port': jump_port,
'login_timeout': login_timeout
}
else:
remote_bind_port = self.params[dict_key]['remote_bind_port']
num_tunnels = self.params[dict_key]['num_tunnels']
ssh_username = self.params[dict_key]['ssh_username']
ssh_password = self.params[dict_key]['ssh_password']
private_key = self.params[dict_key]['private_key']
pass_phrase = self.params[dict_key]['pass_phrase']
jump_host = self.params[dict_key]['jump_host']
jump_port = self.params[dict_key]['jump_port']
login_timeout = self.params[dict_key]['login_timeout']
# make a tunnel server
for i in range(num_tunnels - len(self.pool[dict_key])):
# get a free port
s = socket.socket()
s.bind(('', 0))
com = "ssh -L {local_bind_port}:127.0.0.1:{remote_bind_port} "
com += "-p {remote_port} {ssh_username}@{remote_host} "
com += "-o ServerAliveInterval=120 -o ServerAliveCountMax=2 "
if private_key is not None:
com += "-i {private_key} "
if jump_port is not None:
com += '-o ProxyCommand="ssh -p {jump_port} {ssh_username}@{jump_host} -W %h:%p" '
local_bind_port = s.getsockname()[1]
com = com.format(remote_host=remote_host, remote_port=remote_port, remote_bind_port=remote_bind_port,
ssh_username=ssh_username, private_key=private_key, jump_host=jump_host,
jump_port=jump_port, local_bind_port=local_bind_port)
s.close()
# list of expected strings
loginString = 'login_to_be_confirmed_with ' + uuid.uuid4().get_hex()
expected_list = [
pexpect.EOF,
pexpect.TIMEOUT,
"(?i)are you sure you want to continue connecting",
'(?i)password:',
'(?i)enter passphrase for key.*',
loginString,
]
c = pexpect.spawn(com, echo=False)
c.logfile_read = baseLogger.handlers[0].stream
isOK = False
for iTry in range(3):
idx = c.expect(expected_list, timeout=login_timeout)
if idx == expected_list.index(loginString):
# succeeded
isOK = True
break
if idx == 1:
# timeout
baseLogger.error('timeout when making a tunnel with com={0} out={1}'.format(com,
c.buffer))
c.close()
break
if idx == 2:
# new certificate
c.sendline("yes")
idx = c.expect(expected_list, timeout=login_timeout)
if idx == 1:
# timeout
baseLogger.error('timeout after accepting new cert with com={0} out={1}'.format(com,
c.buffer))
c.close()
break
if idx == 3:
# password prompt
c.sendline(ssh_password)
elif idx == 4:
# passphrase prompt
c.sendline(pass_phrase)
elif idx == 0:
baseLogger.error('something weired with com={0} out={1}'.format(com,
c.buffer))
c.close()
break
# exec to confirm login
c.sendline('echo {0}'.format(loginString))
if isOK:
self.pool[dict_key].append((local_bind_port, c))
if with_lock:
self.lock.release()
# get a tunnel
def get_tunnel(self, remote_host, remote_port):
dict_key = self.make_dict_key(remote_host, remote_port)
self.lock.acquire()
active_tunnels = []
someClosed = False
for port, child in self.pool[dict_key]:
if child.isalive():
active_tunnels.append([port, child])
else:
child.close()
someClosed = True
if someClosed:
self.make_tunnel_server(remote_host, remote_port, reconnect=True, with_lock=False)
active_tunnels = [item for item in self.pool[dict_key] if item[1].islive()]
if len(active_tunnels) > 0:
port, child = random.choice(active_tunnels)
else:
port, child = None, None
self.lock.release()
return ("127.0.0.1", port, child)
# singleton
sshTunnelPool = SshTunnelPool()
del SshTunnelPool
| dougbenjamin/panda-harvester | pandaharvester/harvestermiddleware/ssh_tunnel_pool.py | Python | apache-2.0 | 6,619 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from thepaper.util import judge_news_crawl
__author__ = 'yinzishao'
import re
import scrapy
from bs4 import BeautifulSoup
import logging
from thepaper.items import NewsItem
import json
logger = logging.getLogger("CyzoneSpider")
from thepaper.settings import *
class CyzoneSpider(scrapy.spiders.Spider):
domain = "http://www.cyzone.cn/"
name = "cyzone"
allowed_domains = ["cyzone.cn",]
end_day = END_DAY #终结天数
end_now = END_NOW
mid_flag = 0 #中间推荐停止标志
quick_flag = 0 #快报停止标志
quick_page = 1 #快报开始爬取页面,首页
#中间推荐板块
# TODO:首页有推荐的文章并不出现在api中
middle_next_url = "http://api.cyzone.cn/index.php?m=content&c=index&a=init&tpl=index_page&page=%s"
quick_url = "http://www.cyzone.cn/category/8/"
#根据快报首页获取最后一篇快报的时间戳
quick_json_url = "http://www.cyzone.cn/index.php?m=content&c=index&a=init&tpl=page_kuaixun&inputtime=%s"
strat_middle_next_url =middle_next_url % 1
# start_urls = [
# strat_middle_next_url,
# ]
#开始爬取页面
def start_requests(self):
#中间推荐模板
mid_request = scrapy.Request(self.strat_middle_next_url,callback=self.parse)
#快报
qic_request = scrapy.Request(self.quick_url,callback=self.parse_quick)
return [mid_request,qic_request]
def parse(self, response):
origin_url = response.url
result = re.search(r"page=(\d+)",origin_url)
# import pdb;pdb.set_trace()
pageindex = result.group(1) if result else None
soup = BeautifulSoup(response.body)
news_list = soup.find_all("div",class_="article-item clearfix")
for news in news_list:
info = news.find("div",class_="item-push-info")
author = info.text[:-3] if info else None
news_date = info.span.get("data-time") if info.span else None #时间戳
struct_date = datetime.datetime.fromtimestamp(int(news_date))
news_date = struct_date.strftime("%Y-%m-%d %H:%M:%S")
title =news.find("a",class_="item-title").text if news.find("a",class_="item-title") else None
news_url =news.find("a",class_="item-title").get("href",None) if news.find("a",class_="item-title") else None
abstract =news.find("p",class_="item-desc").text if news.find("p",class_="item-desc") else None
pic = news.find("img").get("src",None) if news.find("img") else None
id_result = re.search(r"/(\d+)\.html",news_url)
news_no = id_result.group(1) if id_result else None
item = NewsItem(abstract=abstract,
news_url=news_url,
pic=pic,
title=title,
author=author,
news_no=news_no,
news_date=news_date,
catalogue=u"中间推荐模板")
item = judge_news_crawl(item)
if item:
request = scrapy.Request(news_url,meta={"item":item},callback=self.parse_news)
yield request
else:
self.mid_flag =int(pageindex)
if not self.mid_flag:
pageindex = int(pageindex)+1
next_url = self.middle_next_url % pageindex
yield scrapy.Request(next_url)
def parse_news(self,response):
item = response.meta.get("item",NewsItem())
soup = BeautifulSoup(response.body)
content = soup.find("div",class_="article-content").text
tag_list = soup.find_all("a","tag-link")
tags = [i.text for i in tag_list] if tag_list else None
item["content"] = content
item["tags"] = tags
item["crawl_date"]=NOW
yield item
def parse_quick(self,response):
soup = BeautifulSoup(response.body)
news_list_inner = soup.find("div",class_="list-inner")
next_timestamp=None
news_list = news_list_inner.find_all("div",class_=re.compile(r"bulletin-item.*")) if news_list_inner else None
#json 页面
if not news_list:
news_list = soup.find_all("div",class_=re.compile(r"bulletin-item.*"))
for index,news in enumerate(news_list):
origin_date = news.find("div",class_="news-time").get("data-time",None) if news.find("div",class_="news-time") else None
next_timestamp = origin_date if index == len(news_list)-1 else None #取最后一篇文章的时间戳作下一页的时间戳
struct_date = datetime.datetime.fromtimestamp(int(origin_date))
news_date = struct_date.strftime("%Y-%m-%d %H:%M:%S")
title =news.find("a",class_="item-title").text if news.find("a",class_="item-title") else None
news_url =news.find("a",class_="item-title").get("href",None) if news.find("a",class_="item-title") else None
pic = news.find("img").get("src",None) if news.find("img") else None
content =news.find("div",class_="item-desc").text if news.find("div",class_="item-desc") else None
id_result = re.search(r"/(\d+)\.html",news_url)
news_no = id_result.group(1) if id_result else None
item = NewsItem(content=content,
news_url=news_url,
pic=pic,
title=title,
news_no=news_no,
news_date=news_date,
catalogue=u"快报")
item = judge_news_crawl(item)
if item:
request = scrapy.Request(news_url,meta={"item":item},callback=self.parse_quick_news)
yield request
else:
self.quick_flag =int(self.quick_page)
if not self.quick_flag:
if next_timestamp:
next_quick_url = self.quick_json_url % next_timestamp
yield scrapy.Request(next_quick_url,callback=self.parse_quick)
else:
logger.warning("can't find next_timestamp,url is %s " % response)
def parse_quick_news(self,response):
item = response.meta.get("item",NewsItem())
soup = BeautifulSoup(response.body)
referer_web = soup.find("span",class_="name").text if soup.find("span",class_="name") else None
tag_list = soup.find_all("a","tag-link")
tags = [i.text for i in tag_list] if tag_list else None
item["tags"] = tags
item['referer_web'] = referer_web
item['crawl_date'] = NOW
yield item
| kll334477/NewsScrapy | thepaper/thepaper/spiders/cyzone_spider.py | Python | lgpl-3.0 | 6,723 |
"""Initial revision
Revision ID: d322f7ece4cd
Revises:
Create Date: 2017-09-25 14:27:15.990400
"""
# revision identifiers, used by Alembic.
revision = 'd322f7ece4cd'
down_revision = None
branch_labels = ('tickets',)
depends_on = '16be1c0cddd0'
from alembic import op
import sqlalchemy as sa
from sqlalchemy import FetchedValue
from netprofile.db import ddl as npd
from netprofile.db import fields as npf
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tickets_changes_fields',
sa.Column('tcfid', npf.UInt32(), npd.Comment('Ticket change field ID'), nullable=False, default=sa.Sequence('tickets_changes_fields_tcfid_seq', start=101, increment=1)),
sa.Column('name', sa.Unicode(length=255), npd.Comment('Ticket change field name'), nullable=False),
sa.PrimaryKeyConstraint('tcfid', name=op.f('tickets_changes_fields_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_changes_fields', 'Ticket change fields')
op.create_index('tickets_changes_fields_u_name', 'tickets_changes_fields', ['name'], unique=True)
op.create_table('tickets_flags_types',
sa.Column('tftid', npf.UInt32(), npd.Comment('Ticket flag type ID'), nullable=False, default=sa.Sequence('tickets_flags_types_tftid_seq')),
sa.Column('name', sa.Unicode(length=255), npd.Comment('Ticket flag type name'), nullable=False),
sa.Column('descr', sa.UnicodeText(), npd.Comment('Ticket flag type description'), server_default=sa.text('NULL'), nullable=True),
sa.PrimaryKeyConstraint('tftid', name=op.f('tickets_flags_types_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_flags_types', 'Ticket flag types')
op.create_index('tickets_flags_types_u_name', 'tickets_flags_types', ['name'], unique=True)
op.create_table('tickets_origins',
sa.Column('toid', npf.UInt32(), npd.Comment('Ticket origin ID'), nullable=False, default=sa.Sequence('tickets_origins_toid_seq', start=101, increment=1)),
sa.Column('name', sa.Unicode(length=255), npd.Comment('Ticket origin name'), nullable=False),
sa.Column('descr', sa.UnicodeText(), npd.Comment('Ticket origin description'), server_default=sa.text('NULL'), nullable=True),
sa.PrimaryKeyConstraint('toid', name=op.f('tickets_origins_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_origins', 'Origins of tickets')
op.create_index('tickets_origins_u_name', 'tickets_origins', ['name'], unique=True)
op.create_table('tickets_schedulers',
sa.Column('tschedid', npf.UInt32(), npd.Comment('Ticket scheduler ID'), nullable=False, default=sa.Sequence('tickets_schedulers_tschedid_seq')),
sa.Column('name', sa.Unicode(length=255), npd.Comment('Ticket scheduler name'), nullable=False),
sa.Column('sim_user', npf.UInt32(), npd.Comment('Max. simultaneous per user'), server_default=sa.text('NULL'), nullable=True),
sa.Column('sim_group', npf.UInt32(), npd.Comment('Max. simultaneous per group'), server_default=sa.text('NULL'), nullable=True),
sa.Column('ov_dur', npf.UInt32(), npd.Comment('Overridden ticket duration (in sec)'), server_default=sa.text('NULL'), nullable=True),
sa.Column('hour_start', npf.UInt8(), npd.Comment('Allowed starting hour'), server_default=sa.text('0'), nullable=False),
sa.Column('hour_end', npf.UInt8(), npd.Comment('Allowed ending hour'), server_default=sa.text('23'), nullable=False),
sa.Column('wdays', npf.UInt8(), npd.Comment('Weekdays bitmask'), server_default=sa.text('NULL'), nullable=True),
sa.Column('spacing', npf.UInt32(), npd.Comment('Ticket spacing (in sec)'), server_default=sa.text('3600'), nullable=False),
sa.PrimaryKeyConstraint('tschedid', name=op.f('tickets_schedulers_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_schedulers', 'Ticket scheduling presets')
op.create_index('tickets_schedulers_u_name', 'tickets_schedulers', ['name'], unique=True)
op.create_table('tickets_states_types',
sa.Column('tstid', npf.UInt32(), npd.Comment('Ticket state ID'), nullable=False, default=sa.Sequence('tickets_states_types_tstid_seq')),
sa.Column('title', sa.Unicode(length=48), npd.Comment('Ticket state title'), nullable=False),
sa.Column('subtitle', sa.Unicode(length=48), npd.Comment('Ticket state subtitle'), server_default=sa.text('NULL'), nullable=True),
sa.Column('flow', npf.UInt8(), npd.Comment('Process flow index'), server_default=sa.text('1'), nullable=False),
sa.Column('is_start', npf.NPBoolean(), npd.Comment('Can be starting state'), server_default=npf.npbool(False), nullable=False),
sa.Column('is_end', npf.NPBoolean(), npd.Comment('Can be ending state'), server_default=npf.npbool(False), nullable=False),
sa.Column('allow_client', npf.NPBoolean(), npd.Comment('Can be created by clients'), server_default=npf.npbool(False), nullable=False),
sa.Column('dur', npf.UInt32(), npd.Comment('Default ticket duration (in sec)'), server_default=sa.text('NULL'), nullable=True),
sa.Column('style', npf.ASCIIString(length=16), npd.Comment('Ticket state style'), server_default=sa.text('NULL'), nullable=True),
sa.Column('image', npf.ASCIIString(length=16), npd.Comment('Ticket state image'), server_default=sa.text('NULL'), nullable=True),
sa.Column('descr', sa.UnicodeText(), npd.Comment('Ticket state description'), server_default=sa.text('NULL'), nullable=True),
sa.PrimaryKeyConstraint('tstid', name=op.f('tickets_states_types_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_states_types', 'Ticket state types')
op.create_index('tickets_states_types_i_flow', 'tickets_states_types', ['flow'], unique=False)
op.create_index('tickets_states_types_i_is_end', 'tickets_states_types', ['is_end'], unique=False)
op.create_index('tickets_states_types_i_is_start', 'tickets_states_types', ['is_start'], unique=False)
op.create_index('tickets_states_types_u_tst', 'tickets_states_types', ['title', 'subtitle'], unique=True)
op.create_table('tickets_sched_assign_groups',
sa.Column('tschedassid', npf.UInt32(), npd.Comment('Scheduler assignment ID'), nullable=False, default=sa.Sequence('tickets_sched_assign_groups_tschedassid_seq')),
sa.Column('gid', npf.UInt32(), npd.Comment('Group ID'), nullable=False),
sa.Column('tschedid', npf.UInt32(), npd.Comment('Ticket scheduler ID'), nullable=False),
sa.ForeignKeyConstraint(['gid'], ['groups.gid'], name='tickets_sched_assign_groups_fk_gid', onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['tschedid'], ['tickets_schedulers.tschedid'], name='tickets_sched_assign_groups_fk_tschedid', onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('tschedassid', name=op.f('tickets_sched_assign_groups_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_sched_assign_groups', 'Ticket scheduling assignments for groups')
op.create_index('tickets_sched_assign_groups_i_tschedid', 'tickets_sched_assign_groups', ['tschedid'], unique=False)
op.create_index('tickets_sched_assign_groups_u_gid', 'tickets_sched_assign_groups', ['gid'], unique=True)
op.create_table('tickets_states_trans',
sa.Column('ttrid', npf.UInt32(), npd.Comment('Ticket transition ID'), nullable=False, default=sa.Sequence('tickets_states_trans_ttrid_seq')),
sa.Column('name', sa.Unicode(length=48), npd.Comment('Ticket transition name'), nullable=False),
sa.Column('tstid_from', npf.UInt32(), npd.Comment('From state'), nullable=False),
sa.Column('tstid_to', npf.UInt32(), npd.Comment('To state'), nullable=False),
sa.Column('reassign_gid', npf.UInt32(), npd.Comment('Reassign to group ID'), server_default=sa.text('NULL'), nullable=True),
sa.Column('descr', sa.UnicodeText(), npd.Comment('Ticket transition description'), server_default=sa.text('NULL'), nullable=True),
sa.ForeignKeyConstraint(['reassign_gid'], ['groups.gid'], name='tickets_states_trans_fk_reassign_gid', onupdate='CASCADE'),
sa.ForeignKeyConstraint(['tstid_from'], ['tickets_states_types.tstid'], name='tickets_states_trans_fk_tstid_from', onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['tstid_to'], ['tickets_states_types.tstid'], name='tickets_states_trans_fk_tstid_to', onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('ttrid', name=op.f('tickets_states_trans_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_states_trans', 'Ticket state transitions')
op.create_index('tickets_states_trans_i_reassign_gid', 'tickets_states_trans', ['reassign_gid'], unique=False)
op.create_index('tickets_states_trans_i_tstid_to', 'tickets_states_trans', ['tstid_to'], unique=False)
op.create_index('tickets_states_trans_u_trans', 'tickets_states_trans', ['tstid_from', 'tstid_to'], unique=True)
op.create_table('tickets_sched_assign_users',
sa.Column('tschedassid', npf.UInt32(), npd.Comment('Scheduler assignment ID'), nullable=False, default=sa.Sequence('tickets_sched_assign_users_tschedassid_seq')),
sa.Column('uid', npf.UInt32(), npd.Comment('User ID'), nullable=False),
sa.Column('tschedid', npf.UInt32(), npd.Comment('Ticket scheduler ID'), nullable=False),
sa.ForeignKeyConstraint(['tschedid'], ['tickets_schedulers.tschedid'], name='tickets_sched_assign_users_fk_tschedid', onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['uid'], ['users.uid'], name='tickets_sched_assign_users_fk_uid', onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('tschedassid', name=op.f('tickets_sched_assign_users_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_sched_assign_users', 'Ticket scheduling assignments for users')
op.create_index('tickets_sched_assign_users_i_tschedid', 'tickets_sched_assign_users', ['tschedid'], unique=False)
op.create_index('tickets_sched_assign_users_u_uid', 'tickets_sched_assign_users', ['uid'], unique=True)
op.create_table('tickets_templates',
sa.Column('ttplid', npf.UInt32(), npd.Comment('Ticket template ID'), nullable=False, default=sa.Sequence('tickets_templates_ttplid_seq')),
sa.Column('name', sa.Unicode(length=255), npd.Comment('Ticket template name'), nullable=False),
sa.Column('tpl_name', sa.Unicode(length=255), npd.Comment('Template for new ticket name'), nullable=False),
sa.Column('tpl_descr', sa.UnicodeText(), npd.Comment('Template for new ticket description'), server_default=sa.text('NULL'), nullable=True),
sa.Column('assign_self', npf.NPBoolean(), npd.Comment('Assign to logged in user'), server_default=npf.npbool(False), nullable=False),
sa.Column('assign_owngrp', npf.NPBoolean(), npd.Comment("Assign to user's group"), server_default=npf.npbool(False), nullable=False),
sa.Column('assign_uid', npf.UInt32(), npd.Comment('Assign to user ID'), server_default=sa.text('NULL'), nullable=True),
sa.Column('assign_gid', npf.UInt32(), npd.Comment('Assign to group ID'), server_default=sa.text('NULL'), nullable=True),
sa.Column('tschedid', npf.UInt32(), npd.Comment('Ticket scheduler ID'), server_default=sa.text('NULL'), nullable=True),
sa.Column('dur', npf.UInt32(), npd.Comment('Default ticket duration (in sec)'), server_default=sa.text('NULL'), nullable=True),
sa.Column('tstid', npf.UInt32(), npd.Comment('Initial state'), nullable=False),
sa.Column('toid', npf.UInt32(), npd.Comment('Ticket origin ID'), nullable=False),
sa.Column('on_create', npf.ASCIIString(length=255), npd.Comment('Callback on ticket creation'), server_default=sa.text('NULL'), nullable=True),
sa.ForeignKeyConstraint(['assign_gid'], ['groups.gid'], name='tickets_templates_fk_assign_gid', onupdate='CASCADE'),
sa.ForeignKeyConstraint(['assign_uid'], ['users.uid'], name='tickets_templates_fk_assign_uid', onupdate='CASCADE'),
sa.ForeignKeyConstraint(['toid'], ['tickets_origins.toid'], name='tickets_templates_fk_toid', onupdate='CASCADE'),
sa.ForeignKeyConstraint(['tschedid'], ['tickets_schedulers.tschedid'], name='tickets_templates_fk_tschedid', onupdate='CASCADE'),
sa.ForeignKeyConstraint(['tstid'], ['tickets_states_types.tstid'], name='tickets_templates_fk_tstid', onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('ttplid', name=op.f('tickets_templates_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_templates', 'Templates for new tickets')
op.create_index('tickets_templates_i_assign_gid', 'tickets_templates', ['assign_gid'], unique=False)
op.create_index('tickets_templates_i_assign_uid', 'tickets_templates', ['assign_uid'], unique=False)
op.create_index('tickets_templates_i_toid', 'tickets_templates', ['toid'], unique=False)
op.create_index('tickets_templates_i_tstid', 'tickets_templates', ['tstid'], unique=False)
op.create_index('tickets_templates_u_name', 'tickets_templates', ['name'], unique=True)
op.create_table('tickets_def',
sa.Column('ticketid', npf.UInt32(), npd.Comment('Ticket ID'), nullable=False, default=sa.Sequence('tickets_def_ticketid_seq')),
sa.Column('entityid', npf.UInt32(), npd.Comment('Entity ID'), nullable=False),
sa.Column('tstid', npf.UInt32(), npd.Comment('Ticket state ID'), nullable=False),
sa.Column('toid', npf.UInt32(), npd.Comment('Ticket origin ID'), nullable=False),
sa.Column('assigned_uid', npf.UInt32(), npd.Comment('Assigned to user ID'), server_default=sa.text('NULL'), nullable=True),
sa.Column('assigned_gid', npf.UInt32(), npd.Comment('Assigned to group ID'), server_default=sa.text('NULL'), nullable=True),
sa.Column('assigned_time', sa.TIMESTAMP(), npd.Comment('Assigned to date'), nullable=True),
sa.Column('dur', npf.UInt32(), npd.Comment('Ticket duration (in sec)'), server_default=sa.text('NULL'), nullable=True),
sa.Column('archived', npf.NPBoolean(), npd.Comment('Is archived'), server_default=npf.npbool(False), nullable=False),
sa.Column('show_client', npf.NPBoolean(), npd.Comment('Show ticket to client'), server_default=npf.npbool(False), nullable=False),
sa.Column('name', sa.Unicode(length=255), npd.Comment('Ticket name'), nullable=False),
sa.Column('descr', sa.UnicodeText(), npd.Comment('Ticket description'), server_default=sa.text('NULL'), nullable=True),
sa.Column('ctime', sa.TIMESTAMP(), npd.Comment('Creation timestamp'), server_default=FetchedValue(), nullable=True),
sa.Column('mtime', sa.TIMESTAMP(), npd.Comment('Last modification timestamp'), server_default=npd.CurrentTimestampDefault(on_update=True), nullable=False),
sa.Column('ttime', sa.TIMESTAMP(), npd.Comment('Last state transition timestamp'), nullable=True),
sa.Column('cby', npf.UInt32(), npd.Comment('Created by'), server_default=sa.text('NULL'), nullable=True),
sa.Column('mby', npf.UInt32(), npd.Comment('Modified by'), server_default=sa.text('NULL'), nullable=True),
sa.Column('tby', npf.UInt32(), npd.Comment('Transition by'), server_default=sa.text('NULL'), nullable=True),
sa.ForeignKeyConstraint(['assigned_gid'], ['groups.gid'], name='tickets_def_fk_assigned_gid', onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['assigned_uid'], ['users.uid'], name='tickets_def_fk_assigned_uid', onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['cby'], ['users.uid'], name='tickets_def_fk_cby', onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['entityid'], ['entities_def.entityid'], name='tickets_def_fk_entityid', onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['mby'], ['users.uid'], name='tickets_def_fk_mby', onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['tby'], ['users.uid'], name='tickets_def_fk_tby', onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['toid'], ['tickets_origins.toid'], name='tickets_def_fk_toid', onupdate='CASCADE'),
sa.ForeignKeyConstraint(['tstid'], ['tickets_states_types.tstid'], name='tickets_def_fk_tstid', onupdate='CASCADE'),
sa.PrimaryKeyConstraint('ticketid', name=op.f('tickets_def_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_def', 'Tickets')
op.create_trigger('netprofile_tickets', 'tickets_def', 'before', 'insert', 'd322f7ece4cd')
op.create_trigger('netprofile_tickets', 'tickets_def', 'before', 'update', 'd322f7ece4cd')
op.create_trigger('netprofile_tickets', 'tickets_def', 'after', 'insert', 'd322f7ece4cd')
op.create_trigger('netprofile_tickets', 'tickets_def', 'after', 'update', 'd322f7ece4cd')
op.create_trigger('netprofile_tickets', 'tickets_def', 'after', 'delete', 'd322f7ece4cd')
op.create_index('tickets_def_i_archived', 'tickets_def', ['archived'], unique=False)
op.create_index('tickets_def_i_assigned_gid', 'tickets_def', ['assigned_gid'], unique=False)
op.create_index('tickets_def_i_assigned_uid', 'tickets_def', ['assigned_uid'], unique=False)
op.create_index('tickets_def_i_cby', 'tickets_def', ['cby'], unique=False)
op.create_index('tickets_def_i_entityid', 'tickets_def', ['entityid'], unique=False)
op.create_index('tickets_def_i_mby', 'tickets_def', ['mby'], unique=False)
op.create_index('tickets_def_i_name', 'tickets_def', ['name'], unique=False)
op.create_index('tickets_def_i_tby', 'tickets_def', ['tby'], unique=False)
op.create_index('tickets_def_i_toid', 'tickets_def', ['toid'], unique=False)
op.create_index('tickets_def_i_tstid', 'tickets_def', ['tstid'], unique=False)
op.create_table('tickets_changes_def',
sa.Column('tcid', npf.UInt32(), npd.Comment('Ticket change ID'), nullable=False, default=sa.Sequence('tickets_changes_def_tcid_seq')),
sa.Column('ticketid', npf.UInt32(), npd.Comment('Ticket ID'), nullable=False),
sa.Column('ttrid', npf.UInt32(), npd.Comment('Ticket transition ID'), server_default=sa.text('NULL'), nullable=True),
sa.Column('uid', npf.UInt32(), npd.Comment('User ID'), server_default=sa.text('NULL'), nullable=True),
sa.Column('ts', sa.TIMESTAMP(), npd.Comment('Ticket change timestamp'), server_default=npd.CurrentTimestampDefault(on_update=False), nullable=False),
sa.Column('show_client', npf.NPBoolean(), npd.Comment('Show comment to client'), server_default=npf.npbool(False), nullable=False),
sa.Column('from_client', npf.NPBoolean(), npd.Comment('This change is from client'), server_default=npf.npbool(False), nullable=False),
sa.Column('comments', sa.UnicodeText(), npd.Comment('Ticket change comments'), server_default=sa.text('NULL'), nullable=True),
sa.ForeignKeyConstraint(['ticketid'], ['tickets_def.ticketid'], name='tickets_changes_def_fk_ticketid', onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['ttrid'], ['tickets_states_trans.ttrid'], name='tickets_changes_def_fk_ttrid', onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['uid'], ['users.uid'], name='tickets_changes_def_fk_uid', onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('tcid', name=op.f('tickets_changes_def_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_changes_def', 'Ticket changes')
op.create_index('tickets_changes_def_i_show_client', 'tickets_changes_def', ['show_client'], unique=False)
op.create_index('tickets_changes_def_i_ticketid', 'tickets_changes_def', ['ticketid'], unique=False)
op.create_index('tickets_changes_def_i_ts', 'tickets_changes_def', ['ts'], unique=False)
op.create_index('tickets_changes_def_i_ttrid', 'tickets_changes_def', ['ttrid'], unique=False)
op.create_index('tickets_changes_def_i_uid', 'tickets_changes_def', ['uid'], unique=False)
op.create_table('tickets_dependencies',
sa.Column('ticketid_parent', npf.UInt32(), npd.Comment('Ticket which is dependent on'), nullable=False),
sa.Column('ticketid_child', npf.UInt32(), npd.Comment('Ticket which depends on a parent'), nullable=False),
sa.ForeignKeyConstraint(['ticketid_child'], ['tickets_def.ticketid'], name='tickets_dependencies_fk_ticketid_child', onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['ticketid_parent'], ['tickets_def.ticketid'], name='tickets_dependencies_fk_ticketid_parent', onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('ticketid_parent', 'ticketid_child', name=op.f('tickets_dependencies_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_dependencies', 'Ticket resolution dependencies')
op.create_trigger('netprofile_tickets', 'tickets_dependencies', 'before', 'insert', 'd322f7ece4cd')
op.create_trigger('netprofile_tickets', 'tickets_dependencies', 'before', 'update', 'd322f7ece4cd')
op.create_index('tickets_dependencies_i_ticketid_child', 'tickets_dependencies', ['ticketid_child'], unique=False)
op.create_table('tickets_files',
sa.Column('tfid', npf.UInt32(), npd.Comment('Ticket-file mapping ID'), nullable=False, default=sa.Sequence('tickets_files_tfid_seq')),
sa.Column('ticketid', npf.UInt32(), npd.Comment('Ticket ID'), nullable=False),
sa.Column('fileid', npf.UInt32(), npd.Comment('File ID'), nullable=False),
sa.ForeignKeyConstraint(['fileid'], ['files_def.fileid'], name='tickets_files_fk_fileid', onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['ticketid'], ['tickets_def.ticketid'], name='tickets_files_fk_ticketid', onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('tfid', name=op.f('tickets_files_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_files', 'File mappings to tickets')
op.create_index('tickets_files_i_fileid', 'tickets_files', ['fileid'], unique=False)
op.create_index('tickets_files_u_tfl', 'tickets_files', ['ticketid', 'fileid'], unique=True)
op.create_table('tickets_flags_def',
sa.Column('tfid', npf.UInt32(), npd.Comment('Ticket flag ID'), nullable=False, default=sa.Sequence('tickets_flags_def_tfid_seq')),
sa.Column('ticketid', npf.UInt32(), npd.Comment('Ticket ID'), nullable=False),
sa.Column('tftid', npf.UInt32(), npd.Comment('Ticket flag type ID'), nullable=False),
sa.ForeignKeyConstraint(['tftid'], ['tickets_flags_types.tftid'], name='tickets_flags_types_fk_tftid', onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['ticketid'], ['tickets_def.ticketid'], name='tickets_flags_def_fk_ticketid', onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('tfid', name=op.f('tickets_flags_def_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_flags_def', 'Ticket flag mappings')
op.create_trigger('netprofile_tickets', 'tickets_flags_def', 'after', 'insert', 'd322f7ece4cd')
op.create_trigger('netprofile_tickets', 'tickets_flags_def', 'after', 'delete', 'd322f7ece4cd')
op.create_index('tickets_flags_def_i_tftid', 'tickets_flags_def', ['tftid'], unique=False)
op.create_index('tickets_flags_def_u_tf', 'tickets_flags_def', ['ticketid', 'tftid'], unique=True)
op.create_table('tickets_changes_bits',
sa.Column('tcbid', npf.UInt32(), npd.Comment('Ticket change bit ID'), nullable=False, default=sa.Sequence('tickets_changes_bits_tcbid_seq')),
sa.Column('tcid', npf.UInt32(), npd.Comment('Ticket change ID'), nullable=False),
sa.Column('tcfid', npf.UInt32(), npd.Comment('Ticket change field ID'), nullable=False),
sa.Column('old', sa.Unicode(length=255), npd.Comment('Old value'), server_default=sa.text('NULL'), nullable=True),
sa.Column('new', sa.Unicode(length=255), npd.Comment('New value'), server_default=sa.text('NULL'), nullable=True),
sa.ForeignKeyConstraint(['tcfid'], ['tickets_changes_fields.tcfid'], name='tickets_changes_bits_fk_tcfid', onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['tcid'], ['tickets_changes_def.tcid'], name='tickets_changes_bits_fk_tcid', onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('tcbid', name=op.f('tickets_changes_bits_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_changes_bits', 'Ticket change bits')
op.create_index('tickets_changes_bits_i_tcfid', 'tickets_changes_bits', ['tcfid'], unique=False)
op.create_index('tickets_changes_bits_u_tcf', 'tickets_changes_bits', ['tcid', 'tcfid'], unique=True)
op.create_table('tickets_changes_flagmod',
sa.Column('tcfmodid', npf.UInt32(), npd.Comment('Ticket change flag modification ID'), nullable=False, default=sa.Sequence('tickets_changes_flagmod_tcfmodid_seq')),
sa.Column('tcid', npf.UInt32(), npd.Comment('Ticket change ID'), nullable=False),
sa.Column('tftid', npf.UInt32(), npd.Comment('Ticket flag type ID'), nullable=False),
sa.Column('newstate', npf.NPBoolean(), npd.Comment('Resulting flag state'), nullable=False),
sa.ForeignKeyConstraint(['tcid'], ['tickets_changes_def.tcid'], name='tickets_changes_flagmod_fk_tcid', onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['tftid'], ['tickets_flags_types.tftid'], name='tickets_changes_flagmod_fk_tftid', onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('tcfmodid', name=op.f('tickets_changes_flagmod_pk')),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.set_table_comment('tickets_changes_flagmod', 'Ticket change bits modifying flags')
op.create_index('tickets_changes_flagmod_i_tftid', 'tickets_changes_flagmod', ['tftid'], unique=False)
op.create_index('tickets_changes_flagmod_u_tcflag', 'tickets_changes_flagmod', ['tcid', 'tftid'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('tickets_changes_flagmod_u_tcflag', table_name='tickets_changes_flagmod')
op.drop_index('tickets_changes_flagmod_i_tftid', table_name='tickets_changes_flagmod')
op.drop_table('tickets_changes_flagmod')
op.drop_index('tickets_changes_bits_u_tcf', table_name='tickets_changes_bits')
op.drop_index('tickets_changes_bits_i_tcfid', table_name='tickets_changes_bits')
op.drop_table('tickets_changes_bits')
op.drop_index('tickets_flags_def_u_tf', table_name='tickets_flags_def')
op.drop_index('tickets_flags_def_i_tftid', table_name='tickets_flags_def')
op.drop_table('tickets_flags_def')
op.drop_index('tickets_files_u_tfl', table_name='tickets_files')
op.drop_index('tickets_files_i_fileid', table_name='tickets_files')
op.drop_table('tickets_files')
op.drop_index('tickets_dependencies_i_ticketid_child', table_name='tickets_dependencies')
op.drop_table('tickets_dependencies')
op.drop_index('tickets_changes_def_i_uid', table_name='tickets_changes_def')
op.drop_index('tickets_changes_def_i_ttrid', table_name='tickets_changes_def')
op.drop_index('tickets_changes_def_i_ts', table_name='tickets_changes_def')
op.drop_index('tickets_changes_def_i_ticketid', table_name='tickets_changes_def')
op.drop_index('tickets_changes_def_i_show_client', table_name='tickets_changes_def')
op.drop_table('tickets_changes_def')
op.drop_index('tickets_def_i_tstid', table_name='tickets_def')
op.drop_index('tickets_def_i_toid', table_name='tickets_def')
op.drop_index('tickets_def_i_tby', table_name='tickets_def')
op.drop_index('tickets_def_i_name', table_name='tickets_def')
op.drop_index('tickets_def_i_mby', table_name='tickets_def')
op.drop_index('tickets_def_i_entityid', table_name='tickets_def')
op.drop_index('tickets_def_i_cby', table_name='tickets_def')
op.drop_index('tickets_def_i_assigned_uid', table_name='tickets_def')
op.drop_index('tickets_def_i_assigned_gid', table_name='tickets_def')
op.drop_index('tickets_def_i_archived', table_name='tickets_def')
op.drop_table('tickets_def')
op.drop_index('tickets_templates_u_name', table_name='tickets_templates')
op.drop_index('tickets_templates_i_tstid', table_name='tickets_templates')
op.drop_index('tickets_templates_i_toid', table_name='tickets_templates')
op.drop_index('tickets_templates_i_assign_uid', table_name='tickets_templates')
op.drop_index('tickets_templates_i_assign_gid', table_name='tickets_templates')
op.drop_table('tickets_templates')
op.drop_index('tickets_sched_assign_users_u_uid', table_name='tickets_sched_assign_users')
op.drop_index('tickets_sched_assign_users_i_tschedid', table_name='tickets_sched_assign_users')
op.drop_table('tickets_sched_assign_users')
op.drop_index('tickets_states_trans_u_trans', table_name='tickets_states_trans')
op.drop_index('tickets_states_trans_i_tstid_to', table_name='tickets_states_trans')
op.drop_index('tickets_states_trans_i_reassign_gid', table_name='tickets_states_trans')
op.drop_table('tickets_states_trans')
op.drop_index('tickets_sched_assign_groups_u_gid', table_name='tickets_sched_assign_groups')
op.drop_index('tickets_sched_assign_groups_i_tschedid', table_name='tickets_sched_assign_groups')
op.drop_table('tickets_sched_assign_groups')
op.drop_index('tickets_states_types_u_tst', table_name='tickets_states_types')
op.drop_index('tickets_states_types_i_is_start', table_name='tickets_states_types')
op.drop_index('tickets_states_types_i_is_end', table_name='tickets_states_types')
op.drop_index('tickets_states_types_i_flow', table_name='tickets_states_types')
op.drop_table('tickets_states_types')
op.drop_index('tickets_schedulers_u_name', table_name='tickets_schedulers')
op.drop_table('tickets_schedulers')
op.drop_index('tickets_origins_u_name', table_name='tickets_origins')
op.drop_table('tickets_origins')
op.drop_index('tickets_flags_types_u_name', table_name='tickets_flags_types')
op.drop_table('tickets_flags_types')
op.drop_index('tickets_changes_fields_u_name', table_name='tickets_changes_fields')
op.drop_table('tickets_changes_fields')
# ### end Alembic commands ###
| unikmhz/npui | netprofile_tickets/migrations/d322f7ece4cd_initial_revision.py | Python | agpl-3.0 | 30,296 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-09-16 10:46
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('organization_network', '0010_personactivity_weeks'),
]
operations = [
migrations.AddField(
model_name='team',
name='code',
field=models.CharField(blank=True, max_length=64, null=True, verbose_name='code'),
),
migrations.AlterField(
model_name='personactivity',
name='umr',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='organization_network.UMR', verbose_name='UMR'),
),
]
| Ircam-Web/mezzanine-organization | organization/network/migrations/0011_auto_20160916_1246.py | Python | agpl-3.0 | 800 |
# Based on cage v1.1.4
# http://www.alcyone.com/software/cage/
# Copyright (C) 2002-2006 Erik Max Francis <max@alcyone.com>
# GPL License
class Topology:
"""Encaptulation of the shape and dimentionality of a cellular automata"""
def get(self, address):
raise NotImplementedError
def set(self, address, state):
raise NotImplementedError
def normalize(self, address):
raise NotImplementedError
class Neighborhood:
"""Abstraction of the set of cells adjacent to any given cell"""
def neighbors(self, address):
"""Returns a list of addresses which are neighbors."""
raise NotImplementedError
def states(self, address):
"""Returns the list of cell values for all neighbors"""
return [self.get(x) for x in self.neighbors(address)]
class GridTopology(Topology):
"""A two dimentional, bounded topology consisting of a rectangular grid
of cells"""
background = 0
border = 0
def __init__(self, size):
self.width, self.height = size
self.buffer = []
for _ in range(self.width):
self.buffer.append([self.background] * self.height)
self.zero = (0, 0)
def normalize(self, address):
x, y = address
if x < 0 or x >= self.width or y < 0 or y >= self.height:
return None
return address
def get(self, address):
addr = self.normalize(address)
if addr:
x, y = addr
return self.buffer[x][y]
else:
return self.border
def set(self, address, state):
addr = self.normalize(address)
if addr:
x, y = addr
self.buffer[x][y] = state
else:
raise IndexError
class ExtendedNeighborhood(Neighborhood):
"""A neighborhood that retrieves a list of states on each direction"""
def states(self, address, max=1):
return [[self.get(i) for i in j] for j in self.neighbors(address, max)]
class Automaton:
"""Abstraction for the actions that can be made over the different cells
and states of a specified map"""
def __init__(self, map):
self.map = map
self.generation = 0
def update(self):
self.generation += 1
class Rule:
"""Definition of rules to follow to change a cell value in an automaton"""
def __init__(self, map, address):
self.populate(map, address)
def populate(self, map, address):
raise NotImplementedError
def apply(self):
raise NotImplementedError
| wichovw/tca-gt | server/tca/cellaut.py | Python | mit | 2,703 |
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import tryUrlencode, ss, sp
from couchpotato.core.helpers.variable import cleanHost, mergeDicts
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
from datetime import timedelta
from urllib2 import URLError
import json
import os
import traceback
log = CPLog(__name__)
class Sabnzbd(Downloader):
protocol = ['nzb']
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
if not data: data = {}
log.info('Sending "%s" to SABnzbd.', data.get('name'))
req_params = {
'cat': self.conf('category'),
'mode': 'addurl',
'nzbname': self.createNzbName(data, media),
'priority': self.conf('priority'),
}
nzb_filename = None
if filedata:
if len(filedata) < 50:
log.error('No proper nzb available: %s', filedata)
return False
# If it's a .rar, it adds the .rar extension, otherwise it stays .nzb
nzb_filename = self.createFileName(data, filedata, media)
req_params['mode'] = 'addfile'
else:
req_params['name'] = data.get('url')
try:
if nzb_filename and req_params.get('mode') is 'addfile':
sab_data = self.call(req_params, files = {'nzbfile': (ss(nzb_filename), filedata)})
else:
sab_data = self.call(req_params)
except URLError:
log.error('Failed sending release, probably wrong HOST: %s', traceback.format_exc(0))
return False
except:
log.error('Failed sending release, use API key, NOT the NZB key: %s', traceback.format_exc(0))
return False
log.debug('Result from SAB: %s', sab_data)
if sab_data.get('status') and not sab_data.get('error'):
log.info('NZB sent to SAB successfully.')
if filedata:
return self.downloadReturnId(sab_data.get('nzo_ids')[0])
else:
return True
else:
log.error('Error getting data from SABNZBd: %s', sab_data)
return False
def getAllDownloadStatus(self, ids):
log.debug('Checking SABnzbd download status.')
# Go through Queue
try:
queue = self.call({
'mode': 'queue',
})
except:
log.error('Failed getting queue: %s', traceback.format_exc(1))
return []
# Go through history items
try:
history = self.call({
'mode': 'history',
'limit': 15,
})
except:
log.error('Failed getting history json: %s', traceback.format_exc(1))
return []
release_downloads = ReleaseDownloadList(self)
# Get busy releases
for nzb in queue.get('slots', []):
if nzb['nzo_id'] in ids:
status = 'busy'
if 'ENCRYPTED / ' in nzb['filename']:
status = 'failed'
release_downloads.append({
'id': nzb['nzo_id'],
'name': nzb['filename'],
'status': status,
'original_status': nzb['status'],
'timeleft': nzb['timeleft'] if not queue['paused'] else -1,
})
# Get old releases
for nzb in history.get('slots', []):
if nzb['nzo_id'] in ids:
status = 'busy'
if nzb['status'] == 'Failed' or (nzb['status'] == 'Completed' and nzb['fail_message'].strip()):
status = 'failed'
elif nzb['status'] == 'Completed':
status = 'completed'
release_downloads.append({
'id': nzb['nzo_id'],
'name': nzb['name'],
'status': status,
'original_status': nzb['status'],
'timeleft': str(timedelta(seconds = 0)),
'folder': sp(os.path.dirname(nzb['storage']) if os.path.isfile(nzb['storage']) else nzb['storage']),
})
return release_downloads
def removeFailed(self, release_download):
log.info('%s failed downloading, deleting...', release_download['name'])
try:
self.call({
'mode': 'queue',
'name': 'delete',
'del_files': '1',
'value': release_download['id']
}, use_json = False)
self.call({
'mode': 'history',
'name': 'delete',
'del_files': '1',
'value': release_download['id']
}, use_json = False)
except:
log.error('Failed deleting: %s', traceback.format_exc(0))
return False
return True
def processComplete(self, release_download, delete_files = False):
log.debug('Requesting SabNZBd to remove the NZB %s.', release_download['name'])
try:
self.call({
'mode': 'history',
'name': 'delete',
'del_files': '0',
'value': release_download['id']
}, use_json = False)
except:
log.error('Failed removing: %s', traceback.format_exc(0))
return False
return True
def call(self, request_params, use_json = True, **kwargs):
url = cleanHost(self.conf('host'), ssl = self.conf('ssl')) + 'api?' + tryUrlencode(mergeDicts(request_params, {
'apikey': self.conf('api_key'),
'output': 'json'
}))
data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()}, **kwargs)
if use_json:
d = json.loads(data)
if d.get('error'):
log.error('Error getting data from SABNZBd: %s', d.get('error'))
return {}
return d.get(request_params['mode']) or d
else:
return data
| hamiltont/CouchPotatoServer | couchpotato/core/downloaders/sabnzbd/main.py | Python | gpl-3.0 | 6,237 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.