repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
vrutkovs/gnome-music | gnomemusic/widgets.py | Python | gpl-2.0 | 34,519 | 0.000985 | # Copyright (c) 2013 Vadim Rutkovsky <vrutkovs@redhat.com>
# Copyright (c) 2013 Shivani Poddar <shivani.poddar92@gmail.com>
# Copyright (c) 2013 Arnel A. Borja <kyoushuu@yahoo.com>
# Copyright (c) 2013 Seif Lotfy <seif@lotfy.com>
# Copyright (c) 2013 Sai Suman Prayaga <suman.sai14@gmail.com>
# Copyright (c) 2013 Jackson Isaac <jacksonisaac2008@gmail.com>
# Copyright (c) 2013 Felipe Borges <felipe10borges@gmail.com>
# Copyright (c) 2013 Giovanni Campagna <scampa.giovanni@gmail.com>
# Copyright (c) 2013 Guillaume Quintard <guillaume.quintard@gmail.com>
#
# GNOME Music is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# GNOME Music is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with GNOME Music; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# The GNOME Music authors hereby grant permission for non-GPL compatible
# GStreamer plugins to be used and distributed together with GStreamer
# and GNOME Music. This permission is above and beyond the permissions
# granted by the GPL license by which GNOME Music is covered. If you
# modify this code, you may extend this exception to your version of the
# code, but you are not obligated to do so. If you do not wish to do so,
# delete this exception statement from your version.
from gi.repository import Gtk, Gdk, Gd, GLib, GObject, Pango, Gio
from gi.repository import GdkPixbuf, Grl
from gettext import gettext as _, ngettext
from gnomemusic.grilo import grilo
from gnomemusic.albumArtCache import AlbumArtCache
from gnomemusic.player import DiscoveryStatus
from gnomemusic.playlists import Playlists, StaticPlaylists
from gnomemusic import log
import logging
logger = logging.getLogger(__name__)
ALBUM_ART_CACHE = AlbumArtCache.get_default()
NOW_PLAYING_ICON_NAME = 'media-playback-start-symbolic'
ERROR_ICON_NAME = 'dialog-error-symbolic'
try:
settings = Gio.Settings.new('org.gnome.Music')
MAX_TITLE_WIDTH = settings.get_int('max-width-chars')
except Exception as e:
MAX_TITLE_WIDTH = 20
logger.error("Error on setting widget max-width-chars: %s", str(e))
playlists = Playlists.get_default()
class StarHandler():
def __repr__(self):
return '<StarHandler>'
@log
def __init__(self, parent, star_index):
self.star_index = star_index
self.star_renderer_click = False
self.parent = parent
@log
def _add_star_renderers(self, list_widget, cols, hidden=False):
star_renderer = CellRendererClickablePixbuf(self.parent.view, hidden=hidden)
star_renderer.connect("clicked", self._on_star_toggled)
list_widget.add_renderer(star_renderer, lambda *args: None, None)
cols[0].clear_attributes(star_renderer)
cols[0].add_attribute(star_renderer, 'show_star', self.star_index)
@log
def _on_star_toggled(self, widget, path):
try:
_iter = self.parent.model.get_iter(path)
except TypeError:
return
try:
if self.parent.model.get_value(_iter, 9) == 2:
return
except AttributeError:
return
new_value = not self.parent.model.get_value(_iter, self.star_index)
self.parent.model.set_value(_iter, self.star_index, new_value)
song_item = self.parent.model.get_value(_iter, 5)
grilo.toggle_favorite(song_item) # toggle favorite status in database
playlists.update_static_playlist(StaticPlaylists.Favorites)
# Use this flag to ignore the upcoming _on_item_activated call
self.star_renderer_click = True
class AlbumWidget(Gtk.EventBox):
tracks = []
duration = 0
loadingIcon = ALBUM_ART_CACHE.get_default_icon(256, 256, True)
noArtworkIcon = ALBUM_ART_CACHE.get_default_icon(256, 256, False)
def __repr__(self):
return '<AlbumWidget>'
@log
def __init__(self, player, parentview):
Gtk.EventBox.__init__(self)
self.player = player
self.iterToClean = None
self.parentview = parentview
self.ui = Gtk.Builder()
self.ui.add_from_resource('/org/gnome/Music/AlbumWidget.ui')
self._create_model()
self.view = Gd.MainView(
shadow_type=Gtk.ShadowType.NONE
)
self.view.set_view_type(Gd.MainViewType.LIST)
self.album = None
self.header_bar = None
self.view.connect('item-activated', self._on_item_activated)
view_box = self.ui.get_object('view')
self.ui.get_object('scrolledWindow').set_placement(Gtk.CornerType.
TOP_LEFT)
self.view.connect('selection-mode-request', self._on_selection_mode_request)
child_view = self.view.get_children()[0]
child_view.set_margin_top(64)
child_view.set_margin_bottom(64)
child_view.set_margin_end(32)
self.view.remove(child_view)
view_box.add(child_view)
self.add(self.ui.get_object('AlbumWidget'))
self.star_handler = StarHandler(self, 9)
self._add_list_renderers()
self.get_style_context().add_class('view')
self.get_style_context().add_class('content-view')
self.show_all()
@log
def _on_selection_mode_request(self, *args):
self.header_bar._select_button.clicked()
@log
def _on_item_activated(self, widget, id, path):
if self.star_handler.star_renderer_click:
self.star_handler.star_renderer_click = False
return
_iter = self.model.get_iter(path)
if self.model.get_value(_iter, 10) != DiscoveryStatus.FAILED:
if (self.iterToClean and self.player.playlistId == self.album):
item = self.model.get_value(self.iterToClean, 5)
title = AlbumArtCache.get_media_title(item)
self.model.set_value(self.iterToClean, 0, title)
# Hide now playing icon
self.model.set_value(self.iterToClean, 6, False)
self.player.set_playlist('Album', self.album, self.model, _iter, 5, 11)
self.player.set_playing(True)
@log
def _add_list_renderers(self):
list_widget = self.view.get_generic_view()
cols = list_widget.get_columns()
cols[0].set_min_width(100)
cols[0].set_max_width(200)
cells = cols[0].get_cells()
cells[2].set_visible(False)
cells[1].set_visible(False)
now_playing_symbol_renderer = Gtk.CellRendererPixbuf(xpad=0,
xalign=0.5,
yalign=0.5)
column_now_playing = Gtk.TreeViewColumn()
column_now_playing.set_fixed_width(48)
column_now_playing.pack_start(now_playing_symbol_renderer, False)
column_now_playing.set_cell_data_func(now_playing_symbol_renderer,
self._on_list_widget_icon_render, None)
list_widget.insert_column(column_now_playing, 0)
type_re | nderer = Gd.StyledTextRenderer(
xpad=16,
ellipsize=Pango.EllipsizeMode.END,
xalign=0.0
)
list_widget.add_renderer(type_renderer, lambda *args: None, None)
cols[0].clear_attributes(type_renderer)
cols[0].add_attribute(type_renderer, 'markup', 0)
durationRenderer = Gd | .StyledTextRenderer(
xpad=16,
ellipsize=Pango.EllipsizeMode.END,
xalign=1.0
)
durationRenderer.add_class('dim-label')
list_widget.add_renderer(durationRenderer, lambda *args: None, None)
cols[0].clear_attributes(durationRenderer)
cols[0].add_attribute(dura |
DAFRELECTRONICS/IoTprinter | gfx/adalogo.py | Python | cc0-1.0 | 4,011 | 0.168786 | width = 75
height = 75
data = [
0x00,0x00,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x01,0xf0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x03,0xf0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x03,0xf8,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x07,0xf8,0x00, | 0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x0f,0xf8, | 0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x1f,0xfc,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x1f,0xfc,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x3f,0xfc,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x7f,0xfe,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x7f,0xfe,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0xff,0xfe,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x01,0xff,0xff,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x03,0xff,0xff,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x03,0xff,0xff,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x07,0xff,0xff,0x80,0x00,0x00,0x00,
0x00,0x00,0x00,0x07,0xff,0xff,0x80,0x00,0x00,0x00,
0x00,0x00,0x00,0x07,0xff,0xff,0x80,0x00,0x00,0x00,
0x00,0x00,0x00,0x0f,0xff,0xff,0x80,0x00,0x00,0x00,
0x00,0x00,0x00,0x0f,0xff,0xff,0x80,0x00,0x00,0x00,
0x7f,0xff,0xfc,0x0f,0xff,0xff,0x80,0x00,0x00,0x00,
0xff,0xff,0xff,0x0f,0xff,0xff,0x80,0x00,0x00,0x00,
0xff,0xff,0xff,0xcf,0xff,0xff,0x80,0x00,0x00,0x00,
0xff,0xff,0xff,0xef,0xff,0xff,0x80,0x00,0x00,0x00,
0x7f,0xff,0xff,0xf7,0xff,0xff,0x80,0x00,0x00,0x00,
0x3f,0xff,0xff,0xff,0xfb,0xff,0x00,0x00,0x00,0x00,
0x3f,0xff,0xff,0xff,0xf1,0xff,0x3f,0xf0,0x00,0x00,
0x1f,0xff,0xff,0xff,0xf1,0xfe,0xff,0xfe,0x00,0x00,
0x0f,0xff,0xff,0xff,0xf1,0xff,0xff,0xff,0xc0,0x00,
0x0f,0xff,0xff,0xff,0xe1,0xff,0xff,0xff,0xf8,0x00,
0x07,0xff,0xff,0xff,0xe1,0xff,0xff,0xff,0xff,0x00,
0x03,0xff,0xff,0xff,0xe1,0xff,0xff,0xff,0xff,0xc0,
0x01,0xff,0xff,0x3f,0xe1,0xff,0xff,0xff,0xff,0xe0,
0x01,0xff,0xfe,0x07,0xe3,0xff,0xff,0xff,0xff,0xe0,
0x00,0xff,0xff,0x03,0xe3,0xff,0xff,0xff,0xff,0xe0,
0x00,0x7f,0xff,0x00,0xf7,0xff,0xff,0xff,0xff,0xc0,
0x00,0x3f,0xff,0xc0,0xff,0xc0,0x7f,0xff,0xff,0x80,
0x00,0x1f,0xff,0xf0,0xff,0x00,0x3f,0xff,0xff,0x00,
0x00,0x0f,0xff,0xff,0xff,0x00,0x7f,0xff,0xfc,0x00,
0x00,0x07,0xff,0xff,0xff,0x01,0xff,0xff,0xf8,0x00,
0x00,0x01,0xff,0xff,0xff,0xff,0xff,0xff,0xf0,0x00,
0x00,0x00,0x7f,0xff,0xff,0xff,0xff,0xff,0xc0,0x00,
0x00,0x00,0x1f,0xfc,0x7f,0xff,0xff,0xff,0x80,0x00,
0x00,0x00,0x7f,0xf8,0x78,0xff,0xff,0xfe,0x00,0x00,
0x00,0x00,0xff,0xf0,0x78,0x7f,0xff,0xfc,0x00,0x00,
0x00,0x01,0xff,0xe0,0xf8,0x7f,0xff,0xf0,0x00,0x00,
0x00,0x03,0xff,0xc0,0xf8,0x3f,0xdf,0xc0,0x00,0x00,
0x00,0x07,0xff,0xc1,0xfc,0x3f,0xe0,0x00,0x00,0x00,
0x00,0x07,0xff,0x87,0xfc,0x1f,0xf0,0x00,0x00,0x00,
0x00,0x0f,0xff,0xcf,0xfe,0x1f,0xf8,0x00,0x00,0x00,
0x00,0x0f,0xff,0xff,0xff,0x1f,0xf8,0x00,0x00,0x00,
0x00,0x1f,0xff,0xff,0xff,0x1f,0xfc,0x00,0x00,0x00,
0x00,0x1f,0xff,0xff,0xff,0xff,0xfc,0x00,0x00,0x00,
0x00,0x1f,0xff,0xff,0xff,0xff,0xfe,0x00,0x00,0x00,
0x00,0x3f,0xff,0xff,0xff,0xff,0xfe,0x00,0x00,0x00,
0x00,0x3f,0xff,0xff,0xff,0xff,0xfe,0x00,0x00,0x00,
0x00,0x3f,0xff,0xff,0x3f,0xff,0xfe,0x00,0x00,0x00,
0x00,0x7f,0xff,0xff,0x3f,0xff,0xfe,0x00,0x00,0x00,
0x00,0x7f,0xff,0xff,0x3f,0xff,0xfe,0x00,0x00,0x00,
0x00,0x7f,0xff,0xfe,0x3f,0xff,0xfe,0x00,0x00,0x00,
0x00,0xff,0xff,0xfc,0x1f,0xff,0xfe,0x00,0x00,0x00,
0x00,0xff,0xff,0xf8,0x1f,0xff,0xfe,0x00,0x00,0x00,
0x00,0xff,0xff,0xe0,0x0f,0xff,0xfe,0x00,0x00,0x00,
0x01,0xff,0xff,0x80,0x07,0xff,0xfe,0x00,0x00,0x00,
0x01,0xff,0xfc,0x00,0x03,0xff,0xfe,0x00,0x00,0x00,
0x01,0xff,0xe0,0x00,0x01,0xff,0xfe,0x00,0x00,0x00,
0x01,0xff,0x00,0x00,0x00,0xff,0xfe,0x00,0x00,0x00,
0x00,0xf8,0x00,0x00,0x00,0x7f,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x1f,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x0f,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x07,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x01,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x00,0x00
]
|
ODM2/ODMToolsPython | odmtools/lib/ObjectListView/CellEditor.py | Python | bsd-3-clause | 19,490 | 0.003797 | # -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
# Name: CellEditor.py
# Author: Phillip Piper
# Created: 3 April 2008
# SVN-ID: $Id$
# Copyright: (c) 2008 by Phillip Piper, 2008
# License: wxWindows license
#----------------------------------------------------------------------------
# Change log:
# 2008/05/26 JPP Fixed pyLint annoyances
# 2008/04/04 JPP Initial version complete
#----------------------------------------------------------------------------
# To do:
# - there has to be a better DateTimeEditor somewhere!!
"""
The *CellEditor* module provides some editors for standard types that can be installed
in an *ObjectListView*. It also provides a *Registry* that maps between standard types
and functions that will create editors for that type.
Cell Editors
A cell editor can be any subclass of wx.Window provided that it supports the
following protocol:
SetValue(self, value)
The editor should show the given value for editing
GetValue(self)
The editor should return the value that it holds. Return None to indicate
an invalid value. The returned value should be of the correct type, i.e.
don't return a string if the editor was registered for the bool type.
The editor should invoke FinishCellEdit() on its parent ObjectListView when it
loses focus or when the user commits the change by pressing Return or Enter.
The editor should invoke CancelCellEdit() on its parent ObjectListView when
the user presses Escape.
Editor Registry
The editor registry remembers a function that will be called to create
an editor for a given type.
"""
__author__ = "Phillip Piper"
__date__ = "3 May 2008"
__version__ = "1.0"
import datetime
import wx
#======================================================================
# Editor Registry
# Module level variable
_cellEditorRegistrySingleton = None
def CellEditorRegistry():
"""
Return the registry that is managing type to creator functions
"""
global _cellEditorRegistrySingleton
if _cellEditorRegistrySingleton is None:
_cellEditorRegistrySingleton = EditorRegistry()
return _cellEditorRegistrySingleton
class EditorRegistry:
"""
An *EditorRegistry* manages a mapping of types onto creator functions.
When called, creator functions will create the appropriate kind of cell editor
"""
def __init__(self):
self.typeToFunctionMap = {}
# Standard types and their creator functions
self.typeToFunctionMap[str] = self._MakeStringEditor
self.typeToFunctionMap[unicode] = self._MakeStringEditor
self.typeToFunctionMap[bool] = self._MakeBoolEditor
self.typeToFunctionMap[int] = self._MakeIntegerEditor
self.typeToFunctionMap[long] = self._MakeLongEditor
self.typeToFunctionMap[float] = self._MakeFloatEditor
self.typeToFunctionMap[datetime.datetime] = self._MakeDateTimeEditor
self.typeToFunctionMap[datetime.date] = self._MakeDateEditor
self.typeToFunctionMap[datetime.time] = self._MakeTimeEditor
# TODO: Install editors for mxDateTime if installed
def GetCreatorFunction(self, aValue):
"""
Return the creator function that is register for the type of the given value.
Return None if there is no registered function for the type.
"""
return self.typeToFunctionMap.get(type(aValue), None)
def RegisterCreatorFunction(self, aType, aFunction):
"""
Register the given function to be called when we need an editor for the given type.
The function must accept three parameter: an ObjectListView, row index, and subitem index.
It should return a wxWindow that is parented on the listview, and that responds to:
- SetValue(newValue)
- GetValue() to return the value shown in the editor
"""
self.typeToFunctionMap[aType] = aFunction
#----------------------------------------------------------------------------
# Creator functions for standard types
@staticmethod
def _MakeStringEditor(olv, rowIndex, subItemIndex):
return BaseCellTextEditor(olv, subItemIndex)
@staticmethod
def _MakeBoolEditor(olv, rowIndex, subItemIndex):
return BooleanEditor(olv)
@staticmethod
def _MakeIntegerEditor(olv, rowIndex, subItemIndex):
return IntEditor(olv, subItemIndex, validator=NumericValidator())
@staticmethod
def _MakeLongEditor(olv, rowIndex, subItemIndex):
return LongEditor(olv, subItemIndex)
@staticmethod
def _MakeFloatEditor(olv, rowIndex, subItemIndex):
return FloatEditor(olv, subItemIndex, validator=NumericValidator("0123456789-+eE."))
@staticmethod
def _MakeDateTimeEditor(olv, rowIndex, subItemIndex):
dte = DateTimeEditor(olv, subItemIndex)
column = olv.columns[subItemIndex]
if isinstance(column.stringConverter, basestring):
dte.formatString = column.stringConverter
return dte
@staticmethod
def _MakeDateEditor(olv, rowIndex, subItemIndex):
dte = DateEditor(olv, style=wx.DP_DROPDOWN | wx.DP_SHOWCENTURY | wx.WANTS_CHARS)
#dte.SetValidator(MyValidator(olv))
return dte
@staticmethod
def _MakeTimeEditor(olv, rowIndex, subItemIndex):
editor = TimeEditor(olv, subItemIndex)
column = olv.columns[subItemIndex]
if isinstance(column.stringConverter, basestring):
editor.formatString = column.stringConverter
return editor
#======================================================================
# Cell editors
class BooleanEditor(wx.Choice):
"""This is a simple editor to edit a boolean value that can be used in an
ObjectLi | stView"""
d | ef __init__(self, *args, **kwargs):
kwargs["choices"] = ["True", "False"]
wx.Choice.__init__(self, *args, **kwargs)
def GetValue(self):
"Get the value from the editor"
return self.GetSelection() == 0
def SetValue(self, value):
"Put a new value into the editor"
if value:
self.Select(0)
else:
self.Select(1)
#----------------------------------------------------------------------------
class BaseCellTextEditor(wx.TextCtrl):
"""This is a base text editor for text-like editors used in an ObjectListView"""
def __init__(self, olv, subItemIndex, **kwargs):
style = wx.TE_PROCESS_ENTER | wx.TE_PROCESS_TAB
# Allow for odd case where parent isn't an ObjectListView
if hasattr(olv, "columns"):
if olv.HasFlag(wx.LC_ICON):
style |= (wx.TE_CENTRE | wx.TE_MULTILINE)
else:
style |= olv.columns[subItemIndex].GetAlignmentForText()
wx.TextCtrl.__init__(self, olv, style=style, **kwargs)
# With the MULTILINE flag, the text control always has a vertical
# scrollbar, which looks stupid. I don't know how to get rid of it.
# This doesn't do it:
# self.ToggleWindowStyle(wx.VSCROLL)
#----------------------------------------------------------------------------
class IntEditor(BaseCellTextEditor):
"""This is a text editor for integers for use in an ObjectListView"""
def GetValue(self):
"Get the value from the editor"
s = wx.TextCtrl.GetValue(self).strip()
try:
return int(s)
except ValueError:
return None
def SetValue(self, value):
"Put a new value into the editor"
if isinstance(value, int):
value = repr(value)
wx.TextCtrl.SetValue(self, value)
#----------------------------------------------------------------------------
class LongEditor(BaseCellTextEditor):
"""This is a text editor for long values for use in an ObjectListView"""
def GetValue(self):
"Get the value from the editor"
s = wx.TextCtrl.GetValue(self).strip()
try:
return long(s)
except ValueError:
return None
def SetValue(self, value):
|
hlamer/kate | addons/kate/pate/src/plugins/format.py | Python | lgpl-2.1 | 19,591 | 0.007045 | # -*- coding: utf-8 -*-
#
# Kate/Pâté plugins to work with C++ code formatting
# Copyright 2010-2013 by Alex Turbov <i.zaufi@gmail.com>
#
#
# This software is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
#
'''Plugins to work with C++ code formatting'''
from PyKDE4.kdecore import i18nc
from PyKDE4.ktexteditor import KTextEditor
from PyKDE4.kdeui import KXMLGUIClient
import kate
import kate.ui
import kate.view
from libkatepate.decorators import *
from libkatepate import selection
def getLeftNeighbour(lineStr, column):
if column:
return lineStr[column - 1]
return None
def getRightNeighbour(lineStr, column):
if (column + 1) < len(lineStr):
return lineStr[column + 1]
return None
def looksLikeTemplateAngelBracket(lineStr, column):
''' Check if a symbol at given position looks like a template angel bracket
'''
assert(lineStr[column] in '<>')
#kate.kDebug("?LLTAB: ch='" + lineStr[column] + "'")
ln = getLeftNeighbour(lineStr, column)
#kate.kDebug("?LLTAB: ln='" + str(ln) + "'")
rn = getRightNeighbour(lineStr, column)
#kate.kDebug("?LLTAB: rn='" + str(rn) + "'")
# Detect possible template
if lineStr[column] == '<': # --[current char is '<']-------
if ln == '<' or rn == '<': # "<<" in any place on a line...
return False # ... can't be a template!
if ln == ' ' and rn == '=': # " <="
return False # operator<=()
if lineStr[0:column].strip().startswith('template'):# template declaration at the start of line
return True # ... possible smth like `template < typename ...'
if ln == ' ' and rn == ' ': # " < "
return False # operator<()
return True
if lineStr[column] == '>': # --[current char is '>']-------
if lineStr.strip().startswith('>'): # line starts w/ one or more '>'
return True # ... can be end of formatted `typedef <...\n> type;' for example
if ln == ' ' and rn == ' ': # " > "
return False # operator>()
if ln == ' ' and rn == '=': # ">="
return False # operator>=()
if ln == '-':
return False # operator->()
return True
pass
#
# TODO Probably decorators may help to simplify this code ???
#
def getRangeTopology(breakChars):
'''Get range opened w/ `openCh' and closed w/ `closeCh'
@return tuple w/ current range, list of nested ranges
and list of positions of break characters
@note Assume cursor positioned whithin that range already.
'''
document = kate.activeDocument()
view = kate.activeView()
pos = view.cursorPosition()
stack = list()
nestedRanges = list()
breakPositions = list()
firstIteration = True
found = False
# Iterate from the current line towards a document start
for cl in range(pos.line(), -1, -1) | :
lineStr = str(document.line(cl))
i | f not firstIteration: # skip first iteration
pos.setColumn(len(lineStr)) # set current column to the end of current line
else:
firstIteration = False # do nothing on first iteration
# Iterate from the current column to a line start
for cc in range(pos.column() - 1, -1, -1):
#kate.kDebug("c: current position" + str(cl) + "," + str(cc) + ",ch='" + lineStr[cc] + "'")
# Check open/close brackets
if lineStr[cc] == ')': # found closing char: append its position to the stack
stack.append((cl, cc, False))
#kate.kDebug("o( Add position: " + str(stack[-1]))
continue
if lineStr[cc] == '(': # found open char...
if len(stack): # if stack isn't empty (i.e. there are some closing chars met)
#kate.kDebug("o( Pop position: " + str(stack[-1]))
nrl, nrc, isT = stack.pop() # remove last position from the stack
if not isT:
nestedRanges.append( # and append a nested range
KTextEditor.Range(cl, cc, nrl, nrc)
)
else:
raise LookupError(
i18nc(
'@info'
, 'Misbalanced brackets: at <numid>%1</numid>,<numid>%2</numid> and <numid>%3</numid>,<numid>%4</numid>'
, cl + 1, cc + 1, nrl + 1, nrc + 1
)
)
else: # otherwise,
openPos = (cl, cc + 1, False) # remember range start (exclude an open char)
#kate.kDebug("o( Found position: " + str(openPos))
found = True
break
continue
# Check for template angel brackets
if lineStr[cc] == '>':
if looksLikeTemplateAngelBracket(lineStr, cc):
stack.append((cl, cc, True))
#kate.kDebug("o< Add position: " + str(stack[-1]))
#else:
#kate.kDebug("o< Doesn't looks like template: " + str(cl) + "," + str(cc))
continue
if lineStr[cc] == '<':
if not looksLikeTemplateAngelBracket(lineStr, cc):
#kate.kDebug("o< Doesn't looks like template: " + str(cl) + "," + str(cc + 1))
pass
elif len(stack): # if stack isn't empty (i.e. there are some closing chars met)
#kate.kDebug("o< Pop position: " + str(stack[-1]))
nrl, nrc, isT = stack.pop() # remove last position from the stack
if isT:
nestedRanges.append( # and append a nested range
KTextEditor.Range(cl, cc, nrl, nrc)
)
else:
raise LookupError(
i18nc(
'@info'
, 'Misbalanced brackets: at <numid>%1</numid>,<numid>%2</numid> and <numid>%3</numid>,<numid>%4</numid>'
, cl + 1, cc + 1, nrl + 1, nrc + 1
)
)
else:
openPos = (cl, cc + 1, True) # remember range start (exclude an open char)
#kate.kDebug("o< Found position: " + str(openPos))
found = True
break
continue
if lineStr[cc] in breakChars and len(stack) == 0:
breakPositions.append(KTextEditor.Cursor(cl, cc))
# Did we found smth on the current line?
if found:
break |
JohanComparat/nbody-npt-functions | bin/bin_MD/DF_read_density_field.py | Python | cc0-1.0 | 4,660 | 0.03176 | # cd pySU/pyMultidark/trunk/bin/fortranfile-0.2.1/
import numpy as n
import os
from os.path import join
from astropy.io import fits
import time
import fortranfile
import sys
DFdir = join("/data2", "users", "gustavo", "BigMD", "1Gpc_3840_Planck1_New", "DENSFIELDS")
mockDir = "/data1/DATA/eBOSS/Multidark-box-mocks/v1.0/parts/"
def writeDFMock(dataCat, DFfile, Lbox = 1000.):
print dataCat, DFfile
x, y, z, vx, vy ,vz ,Vmax, Vpeak ,Mvir, parent_id, snap_id, kind, z_dis = n.loadtxt(dataCat ,unpack=True)
path_to_outputCat = dataCat[:-4] + ".DF.fits.gz"
# opens the DF file
f = fortranfile.FortranFile(DFfile)
gridx, gridy, gridz = f.readInts()
dx = Lbox/gridx
# convert QSO positions into indexes
i = (x/dx).astype(int)
j = (y/dx).astype(int)
k= (z/dx).astype(int)
#init the output array :
delta = n.empty_like(x)
#delta1 = n.empty_like(x)
#delta2 = n.empty_like(x)
# now loops over k (every line of the file) and assigns delta values.
for kk in range(gridx):
sel = (k==kk)
N = i[sel] + gridx * j[sel]
DF = f.readReals()
delta[sel] = DF[N]
"""
# distance 1 mean density field in the plane
sel1 = (sel)&(i>=1)&(i<gridx-1)&(j>=1)&(j<gridx-1)
N1 = n.transpose([ (i[sel1]-1) + gridx * (j[sel1] -1), (i[sel1]) + gridx * (j[sel1] -1), (i[sel1]-1) + gridx * (j[sel1]), (i[sel1]+1) + gridx * (j[sel1] +1), (i[sel1]+1) + gridx * (j[sel1] ), (i[sel1]) + gridx * (j[sel1] +1), (i[sel1]+1) + gridx * (j[sel1] -1), (i[sel1]-1) + gridx * (j[sel1] +1) ])
delta1[sel1] = n.array([ n.mean(DF[el]) for el in N1 ])
# assign -1 err value to points on the boundary
border1 = (sel)&(sel1==False)
delta1[border1] = n.ones_like(delta1[border1])*-1.
# distance 2 mean density field in the plane
sel2 = (sel)&(i>=2)&(i<gridx-2)&(j>=2)&(j<gridx-2)
N2 = n.transpose([ (i[sel2]-2) + gridx * (j[sel2] -2), (i[sel2]-2) + gridx * (j[sel2] -1), (i[sel2]-2) + gridx * (j[sel2] ), (i[sel2]-2) + gridx * (j[sel2] +1), (i[sel2]-2) + gridx * (j[sel2] +2), (i[sel2]-1) + gridx * (j[sel2] + 2), (i[sel2]) + gridx * (j[sel2] +2), (i[sel2]+11) + gridx * (j[sel2] +2), (i[sel2] + 2) + gridx * (j[sel2] +2), (i[sel2] + 2) + gridx * (j[sel2] +1), (i[sel2] + 2) + gridx * (j[sel2] ), (i[sel2] + 2) + gridx * (j[sel2] -1), (i[sel2] + 2) + gridx * (j[sel2] -2), (i[sel2] + 1) + gridx * (j[sel2] -2), (i[sel2] ) + gridx * (j[sel2] -2), (i[sel2] - 1) + gridx * (j[sel2] -2) ]) -1
delta2[sel2] = n.array([ n.mean(DF[el]) for el in N2 ])
# assign -1 err value to points on the boundary
border2 = (sel)&(sel2==False)
delta2[border2] = n.ones_like(delta2[border2])*-1.
"""
f.close()
c0 = fits.Column(name="DF",format='D', array=delta )
#c01 = fits.Column(name="DF_N1",format='D', array=delta1 )
#c02 = fits.Column(name="DF_N2",format='D', array=delta2 )
c1 = fits.Column(name="x",format='D', array=x )
c2 = fits.Column(name="y",format='D', array=y )
c3 = fits.Column(name="z",format='D', array=z )
c4 = fits.Column(name="vx",format='D', array=vx )
c5 = fits.Column(name="vy",format='D', array=vy )
c6 = fits.Column(name="vz", | format='D', array=vz )
c7 = fits.Column(name="Vmax",format='D', array=Vmax )
c8 = fits.Column(name="Vpeak",format= | 'D', array=Vpeak )
c9 = fits.Column(name="Mvir",format='D', array=Mvir )
c10 = fits.Column(name="parent_id",format='D', array=parent_id )
c11 = fits.Column(name="snap_id",format='D', array=snap_id )
c12 = fits.Column(name="kind",format='D', array=kind )
c13 = fits.Column(name="z_dis",format='D', array=z_dis )
# now writes the catalog
cols = fits.ColDefs([c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c0])#, c01, c02 ])
hdu = fits.BinTableHDU.from_columns(cols)
os.system("rm -rf "+path_to_outputCat)
hdu.writeto(path_to_outputCat)
sys.exit()
DFfile = join(DFdir,"dmdens_cic_104.dat")
#writeDFMock(join( mockDir,"Box_HAM_z0.701838_nbar1.350000e-05_QSO.dat"), DFfile)
writeDFMock(join( mockDir,"Box_HAM_z0.701838_nbar1.359000e-05_QSO_v1.0.dat"), DFfile)
#writeDFMock(join( mockDir,"Box_HAM_z0.701838_nbar2.400000e-04_ELG.dat"), DFfile)
DFfile = join(DFdir,"dmdens_cic_101.dat")
#writeDFMock(join( mockDir,"Box_HAM_z0.818843_nbar1.000000e-04_LRG.dat"), DFfile)
writeDFMock(join( mockDir,"Box_HAM_z0.818843_nbar1.720000e-05_QSO_v1.0.dat"), DFfile)
#writeDFMock(join( mockDir,"Box_HAM_z0.818843_nbar3.200000e-04_ELG.dat"), DFfile)
DFfile = join(DFdir,"dmdens_cic_097.dat")
writeDFMock(join( mockDir,"Box_HAM_z0.987281_nbar1.933000e-05_QSO_v1.0.dat "), DFfile)
#writeDFMock(join( mockDir,"Box_HAM_z0.987281_nbar2.400000e-04_ELG.dat"), DFfile)
DFfile = join(DFdir,"dmdens_cic_087.dat")
writeDFMock(join( mockDir,"Box_HAM_z1.480160_nbar2.040000e-05_QSO_v1.0.dat"), DFfile)
|
caspartse/python-translate | data/db2list.py | Python | mit | 241 | 0 | #!/usr/bin/env python
# -*- coding:utf-8 -*
import dbm
import codecs
db = dbm.open('./vocabulary', 'r')
vDict = db.keys()
vList = sorted(vDict)
print len(vList)
with codecs.op | en('vocabulary.list', 'w') as f:
f.write('\n'.joi | n(vList))
|
MSusik/invenio | invenio/legacy/bibexport/templates.py | Python | gpl-2.0 | 29,937 | 0.008518 | # -*- coding: utf-8 -*-
## $Id: webmessage_templates.py,v 1.32 2008/03/26 23:26:23 tibor Exp $
##
## handles rendering of webmessage module
##
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Templates for field exporter plugin """
__revision__ = "$Id: webmessage_templates.py,v 1.32 2008/03/26 23:26:23 tibor Exp $"
import cgi
from invenio.config import CFG_SITE_LANG, CFG_SITE_URL
from invenio.base.i18n import gettext_set_language
from invenio.utils.date import convert_datestruct_to_datetext, convert_datetext_to_dategui, convert_datestruct_to_dategui
from invenio.legacy.bibexport.fieldexporter_dblayer import Job, JobResult
class Template:
"""Templates for field exporter plugin"""
_JOBS_URL = "%s/exporter/jobs" % (CFG_SITE_URL, )
_EDIT_JOB_URL = "%s/exporter/edit_job" % (CFG_SITE_URL, )
_EDIT_QUERY_URL = "%s/exporter/edit_query" % (CFG_SITE_URL, )
_JOB_RESULTS_URL = "%s/exporter/job_results" % (CFG_SITE_URL, )
_DISPLAY_JOB_RESULT_URL = "%s/exporter/display_job_result" % (CFG_SITE_URL, )
_DOWNLOAD_JOB_RESULT_URL = "%s/exporter/download_job_result" % (CFG_SITE_URL, )
_JOB_HISTORY_URL = "%s/exporter/history" % (CFG_SITE_URL, )
def tmpl_styles(self):
"""Defines the local CSS styles used in the plugin"""
styles = """
<style type="text/css">
.label{
white-space: nowrap;
padding-right: 15px;
}
.textentry{
width: 350px;
}
table.spacedcells td{
padding-right: 20px;
white-space: nowrap; |
}
table.spacedcells th{
padding-right: 20px;
text-align: left;
}
</style>
<script type="text/javascript">
<!--
function SetAllCheckBoxes(FormName, FieldName, CheckValue)
{
if(!document.forms[FormName])
return;
var objCheckBoxes = document.forms[FormName].elements[FieldName];
if(!objCheckBoxes)
return;
var countCheckBoxes = objCheckBoxes.length;
if(!countCheckBoxes)
objCheckBoxes.checked = CheckValue;
else
// set the check value for all ch | eck boxes
for(var i = 0; i < countCheckBoxes; i++)
objCheckBoxes[i].checked = CheckValue;
}
// -->
</script>
"""
return styles
def tmpl_navigation_menu(self, language = CFG_SITE_LANG):
"""Returns HTML representing navigation menu for field exporter."""
_ = gettext_set_language(language)
navigation_menu = """
<table class="headermodulebox">
<tbody><tr>
<td class="headermoduleboxbody">
<a class="header" href="%(job_verview_url)s?ln=%(language)s">%(label_job_overview)s</a>
</td>
<td class="headermoduleboxbody">
<a class="header" href="%(edit_job_url)s?ln=%(language)s">%(label_new_job)s</a>
</td>
<td class="headermoduleboxbody">
<a class="header" href="%(job_history_url)s?ln=%(language)s">%(label_job_history)s</a>
</td>
</tr></tbody></table>
""" % {"edit_job_url" : self._EDIT_JOB_URL,
"job_verview_url" : self._JOBS_URL,
"job_history_url" : self._JOB_HISTORY_URL,
"language" : language,
"label_job_overview" : _("Export Job Overview"),
"label_new_job" : _("New Export Job"),
"label_job_history" : _("Export Job History")
}
return navigation_menu
def tmpl_display_jobs(self, jobs, language = CFG_SITE_LANG):
"""
Creates page for displaying of all the jobs.
@param jobs: list of the jobs that have to be displayed
@param language: language of the page
"""
_ = gettext_set_language(language)
table_rows = ""
for current_job in jobs:
# convert last run date into text proper to be shown to the user
datetext = convert_datestruct_to_datetext(current_job.get_last_run())
last_run = convert_datetext_to_dategui(datetext, language)
# obtain text corresponding to the frequency of execution
frequency = current_job.get_frequency()
frequency_text = self._get_frequency_text(frequency)
row = """<tr>
<td><input type="checkbox" name="selected_jobs" value="%(job_id)s"></input></td>
<td><a href="%(edit_job_url)s?id=%(job_id)s&ln=%(language)s">%(name)s</a></td>
<td>%(frequency)s</td>
<td>%(last_run)s</td>
</tr>""" % self._html_escape_dictionary({
"edit_job_url" : self._EDIT_JOB_URL,
"job_id" : current_job.get_id(),
"name" : current_job.get_name(),
"frequency" : frequency_text,
"language" : language,
"last_run" : last_run
})
table_rows += row
select_all_none_row = """
<tr><td colspan="4">
<small>%s</small><br><br>
</td></tr>""" \
%(self._get_select_all_none_html("jobsForm",
"selected_jobs",
language))
table_rows += select_all_none_row
buttons_row = """<tr>
<td colspan="3">
<input type="Submit" name="run_button" value="%(label_run)s" class="formbutton">
<input type="Submit" name="delete_button" value="%(label_delete)s" class="formbutton">
</td>
<td align="right">
<input type="Submit" name="new_button" value="%(label_new)s" class="formbutton">
</td>
</tr>""" % {
"label_run" : _("Run"),
"label_delete" : _("Delete"),
"label_new" : _("New")
}
table_rows += buttons_row
body = """
<form method="post" name="jobsForm">
<table class="spacedcells">
<th></th>
<th>%(label_name)s</th>
<th>%(label_frequency)s</th>
<th>%(label_last_run)s</th>
%(table_rows)s
</table>
</form>
""" % {
"table_rows" : table_rows,
"label_name" : _("Name"),
"label_frequency" : _("Run"),
"label_last_run" : _("Last run")
}
return body
def tmpl_edit_job(self, job, language = CFG_SITE_LANG):
"""
Creates page for editing of jobs.
@param job: The job that will be edited
@param language: language of the page
"""
_ = gettext_set_language(language)
job_frequency = job.get_frequency()
frequency_select_box_html = self._create_frequency_select_box("job_frequency", job_frequency, language)
output_format_select_box_html = self._create_output_format_select_box(selected_value = job.get_output_format())
body = """
<form method="post">
<input type="Hidden" name="id" value="%(job_id)s">
<table>
<tr>
<td class = "label">%(name_label)s</td>
<td colspan="2"><input type="text" name="job_name" class="textentry" value="%(job_name)s"></td>
</tr>
<tr>
<td class = "label">%(frequency_label)s</td>
<td colspan="2">%(frequency_select_box)s</td>
</tr>
<tr>
<td class = "label">%(output_format_label)s</td>
<td colspan="2">%(output_format_select_box)s</td>
</tr>
|
computationalmodelling/python-package-template | package_template/arith.py | Python | bsd-2-clause | 58 | 0 | def add(x, | y):
"""Add two numbers"""
return | x + y
|
drkitty/cyder | cyder/core/system/validators.py | Python | bsd-3-clause | 173 | 0 | from d | jango.core.exceptions import ValidationError
def validate_no_spaces(value):
if ' ' in value:
raise ValidationError('System name cannot contain spaces.') | |
claremacrae/raspi_code | hardware/raspio_pro_hat/pulse_rgbled.py | Python | mit | 570 | 0.001754 | from gpiozero import RGBLED
from time import sleep
led = RGBLED(red=17, green=18, blue=19)
delay = 0.02
while True:
for x in ran | ge(100):
led.red = x/100
sleep(delay)
for x in range(100, -1, -1):
led.red = x/100
sleep(delay)
for x in range(100):
led.green = x/100
sleep(delay)
for x in range(100, -1, -1):
led.green = x/100
sleep(delay)
for x in range(100):
led.blue = x/100
sleep(delay)
for x in range(10 | 0, -1, -1):
led.blue = x/100
sleep(delay)
|
endlessm/chromium-browser | third_party/llvm/lldb/test/API/issue_verification/disable.py | Python | bsd-3-clause | 656 | 0.001524 | #!/usr/bin/env python
"""Renames *.py files to *.py.park."""
import os
import sys
def main():
"""Drives the main script behavior."""
script_dir = os.path.dirname(os.path.realpath(__file__))
for filename in os.listdir(script_dir):
basename, extension = os.path.splitext(filename)
if basename.startswith("Test") and extension == '.py':
source_path = os.path.join(script_dir, filename)
dest_path = sour | ce_path + ".park"
sys.stdout.write("ren | aming {} to {}\n".format(
source_path, dest_path))
os.rename(source_path, dest_path)
if __name__ == "__main__":
main()
|
tvgdb/pianissimo | backend/tasks/__init__.py | Python | gpl-3.0 | 579 | 0.001727 | from celery | import Celery
def make_celery(app):
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config["CELERY_BACKEND_URL"], include=['tasks.library_tasks'])
celery.conf.update(app.config)
celery.conf['CELERY_ACCEPT_CONTENT'] = ['pickle', 'json']
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBa | se.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
|
FreescaleSemiconductor/quantum | quantum/plugins/metaplugin/run_tests.py | Python | apache-2.0 | 1,656 | 0.000604 | #!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest runner for quantum Meta plu | gin
This file should be run from the top dir in the quantum directory
To run all tests::
PLUGIN_DIR=quantum/plugins/metaplugin ./run_tests.sh
"""
import os
import sys
from nose import config
from nose import core
sys.path.append(os.getcwd())
sys.path.append(os.path.dirname(__file__))
from quantum.common.test_lib import run_tests, test_config
| if __name__ == '__main__':
exit_status = False
# if a single test case was specified,
# we should only invoked the tests once
invoke_once = len(sys.argv) > 1
test_config['plugin_name'] = "meta_quantum_plugin.MetaPluginV2"
cwd = os.getcwd()
working_dir = os.path.abspath("quantum/plugins/metaplugin")
c = config.Config(stream=sys.stdout,
env=os.environ,
verbosity=3,
workingDir=working_dir)
exit_status = exit_status or run_tests(c)
sys.exit(exit_status)
|
aspc/mainsite | aspc/courses/management/commands/solicit_reviews.py | Python | mit | 1,761 | 0.003975 | from django.core.management.base import BaseCommand
from aspc.courses.models import Schedule
from datetime import datetime, timedelta
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.template import Context
from aspc.settings import EMAIL_HOST_USER
# Assume we send the emails at the end of the semester, we
# should only consider schedules that are at least 3 months old
MIN_DAYS = 90
MAX_DAYS = 300
EMAIL_TITLE = "Have you taken these classes?"
class Command(BaseCommand):
args = ''
help = 'imports terms'
def handle(self, *args, **options):
plaintext = get_template('email/solicit_reviews.txt')
htmly = get_template('email/solicit_reviews.html')
| schedules = Schedule.objects.filter(create_ts__lte=datetime.now()-timedelta(days=MIN_DAYS),
create_ts__gte=datetime.now()-timedelta(days=MAX_DAYS))
emails_sent = 0
for schedule in schedules:
try:
context = Context({'user': schedule.user, 'courses': schedule.sections.all()})
text_content = plaintext.render(context)
html_content = htmly.r | ender(context)
user_data = schedule.user.user.all()
if user_data and user_data[0].subscribed_email:
msg = EmailMultiAlternatives(EMAIL_TITLE, text_content, EMAIL_HOST_USER, [schedule.user.email])
msg.attach_alternative(html_content, "text/html")
msg.send()
emails_sent += 1
except Exception as e:
self.stdout.write('Error: %s\n' % e)
self.stdout.write('Successfully send %s emails\n' % emails_sent)
|
allenlavoie/tensorflow | tensorflow/python/layers/convolutional_test.py | Python | apache-2.0 | 50,372 | 0.003871 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.layers.convolutional."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.layers import convolutional as conv_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
@test_util.with_c_api
class ConvTest(test.TestCase):
def testInvalidDataFormat(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'data_format'):
conv_layers.conv2d(images, 32, 3, data_format='invalid')
def testInvalidStrides(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'strides'):
conv_layers.conv2d(images, 32, 3, strides=(1, 2, 3))
with self.assertRaisesRegexp(ValueError, 'strides'):
conv_layers.conv2d(images, 32, 3, strides=None)
def testInvalidKernelSize(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'kernel_size'):
conv_layers.conv2d(images, 32, (1, 2, 3))
with self.assertRaisesRegexp(ValueError, 'kernel_size'):
conv_layers.conv2d(images, 32, None)
def testCreateConv2D(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.Conv2D(32, [3, 3], activation=nn_ops.relu)
output = layer.apply(images)
self.assertEqual(output.op.name, 'conv2d/Relu')
self.assertListEqual(output.get_shape().as_list(),
[5, height - 2, width - 2, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testConv2DFloat16(self):
height, width = 7, 9
| images = random_ops.random_uniform((5, height, width, 4), dtype='float16')
output = conv_layers.conv2d(images, 32, [3, 3], activation=nn_ops.relu)
self. | assertListEqual(output.get_shape().as_list(),
[5, height - 2, width - 2, 32])
def testCreateConv2DIntegerKernelSize(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 4))
layer = conv_layers.Conv2D(32, 3)
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height - 2, width - 2, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testCreateConv2DChannelsFirst(self):
height, width = 7, 9
images = random_ops.random_uniform((5, 4, height, width))
layer = conv_layers.Conv2D(32, [3, 3], data_format='channels_first')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, 32, height - 2, width - 2])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testUnknownInputChannels(self):
images = array_ops.placeholder(dtypes.float32, (5, 7, 9, None))
layer = conv_layers.Conv2D(32, [3, 3], activation=nn_ops.relu)
with self.assertRaisesRegexp(ValueError,
'The channel dimension of the inputs '
'should be defined. Found `None`.'):
_ = layer.apply(images)
images = array_ops.placeholder(dtypes.float32, (5, None, 7, 9))
layer = conv_layers.Conv2D(32, [3, 3], data_format='channels_first')
with self.assertRaisesRegexp(ValueError,
'The channel dimension of the inputs '
'should be defined. Found `None`.'):
_ = layer.apply(images)
def testConv2DPaddingSame(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 32), seed=1)
layer = conv_layers.Conv2D(64, images.get_shape()[1:3], padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 64])
def testCreateConvWithStrides(self):
height, width = 6, 8
# Test strides tuple
images = random_ops.random_uniform((5, height, width, 3), seed=1)
layer = conv_layers.Conv2D(32, [3, 3], strides=(2, 2), padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height / 2, width / 2, 32])
# Test strides integer
layer = conv_layers.Conv2D(32, [3, 3], strides=2, padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height / 2, width / 2, 32])
# Test unequal strides
layer = conv_layers.Conv2D(32, [3, 3], strides=(2, 1), padding='same')
output = layer.apply(images)
self.assertListEqual(output.get_shape().as_list(),
[5, height / 2, width, 32])
def testCreateConv1D(self):
width = 7
data = random_ops.random_uniform((5, width, 4))
layer = conv_layers.Conv1D(32, 3, activation=nn_ops.relu)
output = layer.apply(data)
self.assertEqual(output.op.name, 'conv1d/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, width - 2, 32])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testConv1DFloat16(self):
width = 7
data = random_ops.random_uniform((5, width, 4), dtype='float16')
output = conv_layers.conv1d(data, 32, 3, activation=nn_ops.relu)
self.assertListEqual(output.get_shape().as_list(), [5, width - 2, 32])
def testCreateConv1DChannelsFirst(self):
width = 7
data = random_ops.random_uniform((5, 4, width))
layer = conv_layers.Conv1D(32, 3, data_format='channels_first')
output = layer.apply(data)
self.assertListEqual(output.get_shape().as_list(), [5, 32, width - 2])
self.assertListEqual(layer.kernel.get_shape().as_list(), [3, 4, 32])
self.assertListEqual(layer.bias.get_shape().as_list(), [32])
def testUnknownInputChannelsConv1D(self):
data = array_ops.placeholder(dtypes.float32, (5, 4, None))
layer = conv_layers.Conv1D(32, 3, activation=nn_ops.relu)
with self.assertRaisesRegexp(ValueError,
'The channel dimension of the inputs '
'should be defined. Found `None`.'):
_ = layer.apply(data)
data = array_ops.placeholder(dtypes.float32, (5, None, 4))
layer = conv_layers.Conv1D(32, 3, data_format='channels_first')
with self.assertRaisesRegexp(ValueError,
'The channel dimension of the inputs '
'should be defined. Found `None`.'):
_ = layer.apply(data)
def testCreateConv3D(self):
depth, height, width = 6, 7, 9
volumes = random_ops.random_uniform((5, depth, height, width, 4))
layer = |
r26zhao/django_blog | blog/sitemaps.py | Python | mit | 697 | 0.008608 | from django.contrib.sitemaps import Sitemap
from .models import Post, Category, Tag
class PostSitemap(Sitemap):
priorit | y = 0.9
changefreq = 'weekly'
def items(self):
return Post.objects.all()
def lastmod(self, obj):
return obj.date_modified
class CategorySitemap(Sitemap):
priority = 0.9
changefreq = 'weekly'
def items(self):
return Category.objects.all()
def lastmod(self, obj):
return obj.post_set | .all()[0].date_created
class TagSitemap(Sitemap):
priority = 0.9
changefreq = 'weekly'
def items(self):
return Tag.objects.all()
def lastmod(self, obj):
return obj.post_set.all()[0].date_created |
binsina/python-searchengine | searchengine/scraper.py | Python | bsd-3-clause | 3,070 | 0.004886 | #!/usr/bin/env python
from HTMLParser import HTMLParser, HTMLParseError
import optparse
from urllib2 import urlopen, URLError
from searchengine.logger import Logging
class TagSelector(HTMLParser):
"""
Strip all HTML tags from a string.
"""
def __init__(self, *args, **kwargs):
self.reset()
self.content_dict = {
'title': "",
'content': [],
'links': [],
}
self.current_tag = None
self.current_attrs = None
self.visible_tags = kwargs.get('visible_tags', [
'body', 'title', 'p', 'div', 'td',
'span', 'blockquote', 'li', 'a',
])
def attr_dict(self, attrs):
"""
Iterate through the attrs and convert to a dict.
"""
attrs_dict = {}
if attrs:
for a in attrs:
attrs_dict.update({a[0]: a[1]})
return attrs_dict
def handle_starttag(self, tag, attrs):
"""
Set the current tag the parser has reached.
TODO: add css support
"""
self.current_tag = tag
self.current_attrs = self.attr_dict(attrs)
def handle_data(self, data):
"""
Append visible data to the list.
"""
data = data.strip()
# find visible data and update to the content_dict
if self.current_tag in self.visible_tags and data:
if self.current_tag == 'title':
self.content_dict['title'] = data
if self.current_tag == 'a' and self.current_attrs.has_key('href'):
link = self.current_attrs['href']
self.content_dict['links'].append((data, link))
self.content_dict['content'].append(data)
def get_data(self):
return self.conte | nt_dict
class HTMLScraper(object):
"""
A simple HTML Scraper class to get visable text from a given URL.
"""
def __init__(self, url, *args, **kwargs):
self.log = kwargs.get('log', Logging( | ))
self.url = url
def get_url_content(self):
"""
Open and read the content of a URL.
"""
try:
url_handler = urlopen(self.url)
except URLError, e:
self.log.warning("HTMLScraper", "get_url_content", e)
return ""
html_content = url_handler.read()
url_handler.close()
return html_content
def get_content(self):
"""
Parse the visable content of a url into plain text.
"""
html_content = self.get_url_content()
html_parser = TagSelector()
try:
html_parser.feed(unicode(html_content, errors='replace'))
except HTMLParseError, e:
self.log.warning("HTMLScraper", "get_content", e)
return {}
# get the content and update with the url scraped
parsed_content = html_parser.get_data()
parsed_content.update({'url': self.url})
return parsed_content
|
slinderman/theano_pyglm | pyglm/components/impulse.py | Python | mit | 18,930 | 0.004966 | import theano
import theano.tensor as T
from pyglm.utils.basis import *
from pyglm.components.component import Component
from pyglm.components.priors import create_prior
def create_impulse_component(model, glm, latent):
typ = model['impulse']['type'].lower()
if typ.lower() == 'basis':
return LinearBasisImpulses(model)
elif typ.lower() == 'normalized':
return NormalizedBasisImpulses(model)
elif typ.lower() == 'dirichlet':
return DirichletImpulses(model)
elif typ.lower() == 'exponential':
return ExponentialImpulses(model)
class LinearBasisImpulses(Component):
""" Linear impulse response functions. Here we make use of Theano's
tensordot to sum up the currents from each presynaptic neuron.
"""
def __init__(self, model):
self.model = model
self.imp_model = model['impulse']
self.prior = create_prior(self.imp_model['prior'])
# Number of presynaptic neurons
self.N = model['N']
# Get parameters of the prior
# self.mu = self.prms['mu']
# self.sigma = self.prms['sigma']
# Create a basis for the impulse responses response
self.basis = create_basis(self.imp_model['basis'])
(_,self.B) = self.basis.shape
# The basis is interpolated once the data is specified
self.initialize_basis()
# Initialize memory for the filtered spike train
self.ir = theano.shared(name='ir',
value=np.zeros((1,self.N,self.B)))
# Define weights
self.w_ir = T.dvector('w_ir')
# Repeat them (in a differentiable manner) to create a 3-tensor
w_ir2 = T.reshape(self.w_ir, [self.N,self.B])
| w_ir3 = T.reshape(self.w_ir, [1,self.N,self.B])
# Make w_ir3 broadcastable in the 1st dim
T.addbroadcast(w_ir3,0)
# Take the elementwise product of the fi | ltered stimulus and
# the repeated weights to get the weighted impulse current along each
# impulse basis dimension. Then sum over bases to get the
# total coupling current from each presynaptic neurons at
# all time points
self.I_imp = T.sum(self.ir*w_ir3, axis=2)
# self.log_p = T.sum(-0.5/self.sigma**2 * (self.w_ir-self.mu)**2)
self.log_p = self.prior.log_p(w_ir2)
# Define a helper variable for the impulse response
# after projecting onto the basis
self.impulse = T.dot(w_ir2, T.transpose(self.ibasis))
def get_variables(self):
""" Get the theano variables associated with this model.
"""
return {str(self.w_ir): self.w_ir}
def set_hyperparameters(self, model):
""" Set the hyperparameters of the model
"""
self.prior.set_hyperparameters(model['prior'])
def sample(self, acc):
"""
return a sample of the variables
"""
# w = self.mu + self.sigma*np.random.randn(self.N*self.B)
w = self.prior.sample(None, size=(self.N, self.B)).ravel()
return {str(self.w_ir): w}
def get_state(self):
""" Get the impulse responses
"""
return {'impulse' : self.impulse,
'basis' : self.ibasis}
def initialize_basis(self):
"""
Initialize the basis by interpolating it at the resolution of the data
"""
# Interpolate basis at the resolution of the data
dt = self.model['dt']
(L,B) = self.basis.shape
Lt_int = self.imp_model['dt_max']/dt
t_int = np.linspace(0,1,Lt_int)
t_bas = np.linspace(0,1,L)
ibasis = np.zeros((len(t_int), B))
for b in np.arange(B):
ibasis[:,b] = np.interp(t_int, t_bas, self.basis[:,b])
# Normalize so that the interpolated basis has volume 1
if self.imp_model['basis']['norm']:
ibasis = ibasis / self.imp_model['dt_max']
# Normalize so that the interpolated basis has unit L1 norm
# if self.prms['basis']['norm']:
# ibasis = ibasis / np.tile(np.sum(ibasis,0),[Lt_int,1])
self.ibasis = theano.shared(value=ibasis)
def preprocess_data(self, data):
""" Set the shared memory variables that depend on the data
"""
ibasis = self.ibasis.get_value()
# Project the presynaptic spiking onto the basis
nT,Ns = data["S"].shape
assert Ns == self.N, "ERROR: Spike train must be (TxN) " \
"dimensional where N=%d" % self.N
fS = convolve_with_basis(data["S"], ibasis)
# Flatten this manually to be safe
# (there's surely a way to do this with numpy)
(nT,Nc,B) = fS.shape
assert Nc == self.N, "ERROR: Convolution with spike train " \
"resulted in incorrect shape: %s" % str(fS.shape)
data['fS'] = fS
def set_data(self, data):
self.ir.set_value(data['fS'])
class NormalizedBasisImpulses(Component):
""" Normalized impulse response functions. Here we make use of Theano's
broadcasting to sum up the currents from each presynaptic neuron.
"""
def __init__(self, model):
self.model = model
self.imp_model = model['impulse']
# Number of presynaptic neurons
self.N = model['N']
# Get parameters of the prior
self.alpha = self.imp_model['alpha']
# Create a basis for the impulse responses response
self.basis = create_basis(self.imp_model['basis'])
(_,self.B) = self.basis.shape
# The basis is interpolated once the data is specified
self.initialize_basis()
# Initialize memory for the filtered spike train
self.ir = theano.shared(name='ir',
value=np.zeros((1,self.N,self.B)))
# Define Dirichlet distributed weights by normalizing gammas
# The variables are log-gamma distributed
self.lng = T.dvector('w_lng')
self.g = T.exp(self.lng)
self.g2 = T.reshape(self.g, [self.N,self.B])
self.g_sum = T.reshape(T.sum(self.g2, axis=1), [self.N,1])
# Normalize the gammas to get a Dirichlet draw
T.addbroadcast(self.g_sum, 1)
self.w_ir2 = self.g2 / self.g_sum
self.w_ir2.name = 'w_ir'
# Repeat them (in a differentiable manner) to create a 3-tensor
self.w_ir3 = T.reshape(self.w_ir2, [1,self.N,self.B])
# Make w_ir3 broadcastable in the 1st dim
T.addbroadcast(self.w_ir3,0)
# Take the elementwise product of the filtered stimulus and
# the repeated weights to get the weighted impulse current along each
# impulse basis dimension. Then sum over bases to get the
# total coupling current from each presynaptic neurons at
# all time points
self.I_imp = T.sum(self.ir*self.w_ir3, axis=2)
# Log probability of a set of independent log-gamma r.v.'s
# This is log p(log(g)) under the prior. Since we are taking the
# log, we multiply by a factor of g to ensure normalization and
# thus the \alpha-1 in the exponent becomes \alpha
self.log_p = -self.B*self.N*scipy.special.gammaln(self.alpha) \
+ T.sum(self.alpha*self.lng) \
- T.sum(self.g)
# Define a helper variable for the impulse response
# after projecting onto the basis
self.impulse = T.dot(self.w_ir2, T.transpose(self.ibasis))
def get_variables(self):
""" Get the theano variables associated with this model.
"""
return {str(self.lng): self.lng}
def sample(self, acc):
"""
return a sample of the variables
"""
g = np.random.gamma(self.alpha,np.ones(self.N*self.B))
lng = np.log(g)
return {str(self.lng): lng}
def get_state(self):
""" Get the impulse responses
"""
return {'impulse' : self.impulse,
'basis' : self.ibasis}
def initialize_basis(self):
# Interpolate basis at the resolution of the data
dt = self.model['dt']
(L,B) = self.basis.shape
|
tiangolo/fastapi | docs_src/extra_models/tutorial001_py310.py | Python | mit | 899 | 0 | from fastapi import FastAPI
from pydantic import BaseModel, EmailStr
app = FastAPI()
class UserIn(BaseModel):
username: str
password: str
email: EmailStr
full_name: str | None = None
class UserOut(BaseModel):
username: str
email: EmailStr
full_name: str | None = None
class UserInDB(BaseModel):
username: str
hashed_password: str
email: EmailStr
full_name: str | None = None
def fake_password_hasher(raw_password: str):
return "supersecret" + raw_password
def fake_save_user(user_in: UserIn):
hashed_password = fake | _password_hasher(user_in.password)
user_in_db = UserInDB(**user_in.dict(), hashed_password=hashed_password)
print("User saved! ..not really")
return user_ | in_db
@app.post("/user/", response_model=UserOut)
async def create_user(user_in: UserIn):
user_saved = fake_save_user(user_in)
return user_saved
|
n3wb13/OpenNfrGui-5.0-1 | lib/python/Plugins/Extensions/MediaPortal/resources/update.py | Python | gpl-2.0 | 6,241 | 0.021798 | # -*- coding: utf-8 -*-
###############################################################################################
#
# MediaPortal for Dreambox OS
#
# Coded by MediaPortal Team (c) 2013-2015
#
# This plugin is open source but it is NOT free software.
#
# This plugin may only be distributed to and executed on hardware which
# is licensed by Dream Property GmbH. This includes commercial distribution.
# In other words:
# It's NOT allowed to distribute any parts of this plugin or its source code in ANY way
# to hardware which is NOT licensed by Dream Property GmbH.
# It's NOT allowed to execute this plugin and its source code or even parts of it in ANY way
# on hardware which is NOT licensed by Dream Property GmbH.
#
# This applies to the source code as a whole as well as to parts of it, unless
# explicitely stated otherwise.
#
# If you want to use or modify the code or parts of it,
# you have to keep OUR license and inform us about the modifications, but it may NOT be
# commercially distributed other than under the conditions noted above.
#
# As an exception regarding modifcations, you are NOT permitted to remove
# any copy protections implemented in this plugin or change them for means of disabling
# or working around the copy protections, unless the change has been explicitly permitted
# by the o | riginal authors. Also decompiling and modification of the closed source
# parts is NOT permitted.
#
# Advertising with this plugin is NOT allowed.
# For other uses, permission from the authors is necessary.
#
###############################################################################################
from Plugins.Extensions.MediaPortal.plugin import _
from imports import *
import mp_globals
from messageboxext import MessageBoxExt
gLogFile = None
class checkupdate:
def __init__(self, sess | ion):
self.session = session
def checkforupdate(self):
getPage("http://master.dl.sourceforge.net/project/e2-mediaportal/version.txt", timeout=15).addCallback(self.gotUpdateInfo).addErrback(self.gotError)
def gotError(self, error=""):
pass
def gotUpdateInfo(self, html):
if re.search(".*?<html", html):
return
self.html = html
tmp_infolines = html.splitlines()
remoteversion_ipk = tmp_infolines[0]
remoteversion_deb = tmp_infolines[2]
if mp_globals.isDreamOS:
self.updateurl = tmp_infolines[3]
remoteversion = remoteversion_deb
else:
self.updateurl = tmp_infolines[1]
remoteversion = remoteversion_ipk
if config.mediaportal.version.value < remoteversion:
self.session.openWithCallback(self.startUpdate,MessageBoxExt,_("An update is available for the MediaPortal Plugin!\nDo you want to download and install it now?"), MessageBoxExt.TYPE_YESNO)
return
else:
return
def startUpdate(self,answer):
if answer is True:
self.session.open(MPUpdateScreen,self.updateurl)
else:
return
class MPUpdateScreen(Screen):
def __init__(self, session, updateurl):
self.session = session
self.updateurl = updateurl
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/MPUpdate.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/MPUpdate.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
self["mplog"] = ScrollLabel()
Screen.__init__(self, session)
self['title'] = Label("MediaPortal Update")
self.setTitle("MediaPortal Update")
self.onLayoutFinish.append(self.__onLayoutFinished)
def __onLayoutFinished(self):
sl = self["mplog"]
sl.instance.setZPosition(1)
self["mplog"].setText(_("Starting update, please wait..."))
self.startPluginUpdate()
def startPluginUpdate(self):
self.container=eConsoleAppContainer()
if mp_globals.isDreamOS:
self.container.appClosed_conn = self.container.appClosed.connect(self.finishedPluginUpdate)
self.container.stdoutAvail_conn = self.container.stdoutAvail.connect(self.mplog)
self.container.execute("apt-get update ; wget -q -O /tmp/foobar %s ; dpkg --install --force-depends --force-overwrite /tmp/foobar ; apt-get -y -f install" % str(self.updateurl))
else:
self.container.appClosed.append(self.finishedPluginUpdate)
self.container.stdoutAvail.append(self.mplog)
#self.container.stderrAvail.append(self.mplog)
#self.container.dataAvail.append(self.mplog)
self.container.execute("opkg update ; opkg install --force-overwrite --force-depends " + str(self.updateurl))
def finishedPluginUpdate(self,retval):
self.container.kill()
if retval == 0:
config.mediaportal.filter.value = "ALL"
config.mediaportal.filter.save()
configfile.save()
self.session.openWithCallback(self.restartGUI, MessageBoxExt, _("MediaPortal successfully updated!\nDo you want to restart the Enigma2 GUI now?"), MessageBoxExt.TYPE_YESNO)
elif retval == 2:
self.session.openWithCallback(self.restartGUI2, MessageBoxExt, _("MediaPortal update failed! Please check free space on your root filesystem, at least 8MB are required for installation.\nCheck the update log carefully!\nThe Enigma2 GUI will restart now!"), MessageBoxExt.TYPE_ERROR)
else:
self.session.openWithCallback(self.returnGUI, MessageBoxExt, _("MediaPortal update failed! Check the update log carefully!"), MessageBoxExt.TYPE_ERROR)
def restartGUI(self, answer):
if answer is True:
self.session.open(TryQuitMainloop, 3)
else:
self.close()
def restartGUI2(self, answer):
self.session.open(TryQuitMainloop, 3)
def returnGUI(self, answer):
self.close()
def mplog(self,str):
self["mplog"].setText(str)
self.writeToLog(str)
def writeToLog(self, log):
global gLogFile
if gLogFile is None:
self.openLogFile()
now = datetime.datetime.now()
gLogFile.write(str(log) + "\n")
gLogFile.flush()
def openLogFile(self):
global gLogFile
baseDir = "/tmp"
logDir = baseDir + "/mediaportal"
now = datetime.datetime.now()
try:
os.makedirs(baseDir)
except OSError, e:
pass
try:
os.makedirs(logDir)
except OSError, e:
pass
gLogFile = open(logDir + "/MediaPortal_update_%04d%02d%02d_%02d%02d.log" % (now.year, now.month, now.day, now.hour, now.minute, ), "w") |
Azure/azure-sdk-for-python | sdk/containerregistry/azure-containerregistry/azure/containerregistry/_generated/_container_registry.py | Python | mit | 4,280 | 0.004439 | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.7.4, generator: @autorest/python@5.12.4)
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import TYPE_CHECKING
from azure.core import PipelineClient
from msrest import Deserializer, Serializer
from . import models
from ._configuration import ContainerRegistryConfiguration
from .operations import AuthenticationOperations, ContainerRegistryBlobOperations, ContainerRegistryOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
from azure.core.rest import HttpRequest, HttpResponse
class ContainerRegistry(object):
"""Metadata API definition for the Azure Container Registry runtime.
:ivar container_registry: ContainerRegistryOperations operations
:vartype container_registry: container_registry.operations.ContainerRegistryOperations
:ivar container_registry_blob: ContainerRegistryBlobOperations operations
:vartype container_registry_blob: contain | er_registry.operations.ContainerRegi | stryBlobOperations
:ivar authentication: AuthenticationOperations operations
:vartype authentication: container_registry.operations.AuthenticationOperations
:param url: Registry login URL.
:type url: str
:keyword api_version: Api Version. The default value is "2021-07-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(
self,
url, # type: str
**kwargs # type: Any
):
# type: (...) -> None
_base_url = '{url}'
self._config = ContainerRegistryConfiguration(url=url, **kwargs)
self._client = PipelineClient(base_url=_base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.container_registry = ContainerRegistryOperations(self._client, self._config, self._serialize, self._deserialize)
self.container_registry_blob = ContainerRegistryBlobOperations(self._client, self._config, self._serialize, self._deserialize)
self.authentication = AuthenticationOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request, # type: HttpRequest
**kwargs # type: Any
):
# type: (...) -> HttpResponse
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
path_format_arguments = {
"url": self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments)
return self._client.send_request(request_copy, **kwargs)
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> ContainerRegistry
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
|
itielshwartz/BackendApi | lib/oauth2client/devshell.py | Python | apache-2.0 | 4,410 | 0.001361 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OAuth 2.0 utitilies for Google Developer Shell environment."""
import json
import os
from oauth2client import client
DEVSHELL_ENV = 'DEVSHELL_CLIENT_PORT'
class Error(Exception):
"""Errors for this module."""
pass
class CommunicationError(Error):
"""Errors for communication with the Developer Shell server."""
class NoDevshellServer(Error):
"""Error when no Developer Shell server can be contacted."""
# The request for credential information to the Developer Shell client socket is
# always an empty PBLite-formatted JSON object, so just define it as a constant.
CREDENTIAL_INFO_REQUEST_JSON = '[]'
class CredentialInfoResponse(object):
"""Credential information response from Developer Shell server.
The credential information response from Developer Shell socket is a
PBLite-formatted JSON array with fields encoded by their index in the array:
* Index 0 - user email
* Index 1 - default project ID. None if the project context is not known.
* Index 2 - OAuth2 access token. None if there is no valid auth context.
"""
def __init__(self, json_string):
"""Initialize the response data from JSON PBLite array."""
pbl = json.loads(json_string)
if not isinstance(pbl, list):
raise ValueError('Not a list: ' + str(pbl))
pbl_len = len(pbl)
self.user_email = pbl[0] if pbl_len > 0 else None
self.project_id = pbl[1] if pbl_len > 1 else None
self.access_token = pbl[2] if pbl_len > 2 else None
def _SendRecv():
"""Communicate with the Developer Shell server socket."""
port = int(os.getenv(DEVSHELL_ENV, 0))
if port == 0:
raise NoDevshellServer()
import socket
sock = socket.socket()
sock.connect(('localhost', port))
data = CREDENTIAL_INFO_REQUEST_JSON
msg = '%s\n%s' % (len(data), data)
sock.sendall(msg.encode())
header = sock.recv(6).decode()
if '\n' not in header:
raise CommunicationError('saw no newline in the first 6 bytes')
len_str, json_str = header.split('\n' | , 1)
to_read = int(len_str) - len(json_str)
if to_read > 0:
json_str += sock.recv(to_read, socket.MSG_WAITALL).decode()
return CredentialInfoResponse(json_str)
clas | s DevshellCredentials(client.GoogleCredentials):
"""Credentials object for Google Developer Shell environment.
This object will allow a Google Developer Shell session to identify its user
to Google and other OAuth 2.0 servers that can verify assertions. It can be
used for the purpose of accessing data stored under the user account.
This credential does not require a flow to instantiate because it represents
a two legged flow, and therefore has all of the required information to
generate and refresh its own access tokens.
"""
def __init__(self, user_agent=None):
super(DevshellCredentials, self).__init__(
None, # access_token, initialized below
None, # client_id
None, # client_secret
None, # refresh_token
None, # token_expiry
None, # token_uri
user_agent)
self._refresh(None)
def _refresh(self, http_request):
self.devshell_response = _SendRecv()
self.access_token = self.devshell_response.access_token
@property
def user_email(self):
return self.devshell_response.user_email
@property
def project_id(self):
return self.devshell_response.project_id
@classmethod
def from_json(cls, json_data):
raise NotImplementedError(
'Cannot load Developer Shell credentials from JSON.')
@property
def serialization_data(self):
raise NotImplementedError(
'Cannot serialize Developer Shell credentials.')
|
smallyear/linuxLearn | salt/salt/modules/nspawn.py | Python | apache-2.0 | 42,749 | 0.000468 | # -*- coding: utf-8 -*-
'''
Manage nspawn containers
.. versionadded:: 2015.8.0
`systemd-nspawn(1)`__ is a tool used to manage lightweight namespace
containers. This execution module provides several functions to help manage
these containers.
.. __: http://www.freedesktop.org/software/systemd/man/systemd-nspawn.html
Minions running systemd >= 219 will place new containers in
``/var/lib/machines``, while those running systemd < 219 will place them in
``/var/lib/container``.
.. note:
``nsenter(1)`` is required to run commands within containers. It should
already be present on any systemd host, as part of the **util-linux**
package.
'''
# Import python libs
from __future__ import absolute_import
import errno
import functools
import logging
import os
import re
import shutil
import time
import tempfile
# Import Salt libs
import salt.defaults.exitcodes
import salt.utils
import salt.utils.systemd
from salt.exceptions import CommandExecutionError, SaltInvocationError
from salt.ext import six
from salt.ext.six.moves import range # pylint: disable=redefined-builtin
log = logging.getLogger(__name__)
__func_alias__ = {
'list_': 'list',
}
__virtualname__ = 'nspawn'
SEED_MARKER = '/nspawn.initial_seed'
WANT = '/etc/systemd/system/multi-user.target.wants/systemd-nspawn@{0}.service'
EXEC_DRIVER = 'nsenter'
def __virtual__():
'''
Only work on systems that have been booted with systemd
'''
if __grains__['kernel'] == 'Linux' \
and salt.utils.systemd.booted(__context__):
if salt.utils.systemd.version() is None:
log.error('nspawn: Unable to determine systemd version')
else:
return __virtualname__
return False
def _sd_version():
'''
Returns __context__.get('systemd.version', 0), avoiding duplication of the
call to dict.get and making it easier to change how we handle this context
var in the future
'''
return salt.utils.systemd.version(__context__)
def _ensure_exists(wrapped):
'''
Decorator to ensure that the named container exists.
'''
@functools.wraps(wrapped)
def check_exists(name, *args, **kwargs):
if not exists(name):
raise CommandExecutionError(
'Container \'{0}\' does not exist'.format(name)
)
return wrapped(name, *args, **salt.utils.clean_kwargs(**kwargs))
return check_exists
def _root(name='', all_roots=False):
'''
Return the container root directory. Starting with systemd 219, new
images go into /var/lib/machines.
'''
if _sd_version() >= 219:
if all_roots:
return [os.path.join(x, name)
for x in ('/var/lib/machines', '/var/lib/container')]
else:
return os.path.join('/var/lib/machines', name)
else:
ret = os.path.join('/var/lib/container', name)
if all_roots:
return [ret]
else:
return ret
def _make_container_root(name):
'''
Make the container root directory
'''
path = _root(name)
if os.path.exists(path):
__context__['retcode'] = salt.defaults.exitcodes.SALT_BUILD_FAIL
raise CommandExecutionError(
'Container {0} already exists'.format(name)
)
else:
try:
os.makedirs(path)
return path
except OSError as exc:
raise CommandExecutionError(
'Unable to make container root dire | ctory {0}: {1}'
.format(name, exc)
)
def _build_failed(dst, name):
try:
__context__['retcode'] = salt.defaults.exitcodes.SALT_BUILD_FAIL
shutil.rmtree(dst)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise CommandExecutionError(
'Unable to cleanup container r | oot dir {0}'.format(dst)
)
raise CommandExecutionError(
'Container {0} failed to build'.format(name)
)
def _bootstrap_arch(name, **kwargs):
'''
Bootstrap an Arch Linux container
'''
if not salt.utils.which('pacstrap'):
raise CommandExecutionError(
'pacstrap not found, is the arch-install-scripts package '
'installed?'
)
dst = _make_container_root(name)
cmd = 'pacstrap -c -d {0} base'.format(dst)
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
if ret['retcode'] != 0:
_build_failed(dst, name)
return ret
def _bootstrap_debian(name, **kwargs):
'''
Bootstrap a Debian Linux container (only unstable is currently supported)
'''
dst = _make_container_root(name)
cmd = 'debootstrap --arch=amd64 unstable {0}'.format(dst)
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
if ret['retcode'] != 0:
_build_failed(dst, name)
return ret
def _bootstrap_fedora(name, **kwargs):
'''
Bootstrap a Fedora container
'''
dst = _make_container_root(name)
if not kwargs.get('version', False):
if __grains__['os'].lower() == 'fedora':
version = __grains__['osrelease']
else:
version = '21'
else:
version = '21'
cmd = ('yum -y --releasever={0} --nogpg --installroot={1} '
'--disablerepo="*" --enablerepo=fedora install systemd passwd yum '
'fedora-release vim-minimal'.format(version, dst))
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
if ret['retcode'] != 0:
_build_failed(dst, name)
return ret
def _clear_context():
'''
Clear any lxc variables set in __context__
'''
for var in [x for x in __context__ if x.startswith('nspawn.')]:
log.trace('Clearing __context__[\'{0}\']'.format(var))
__context__.pop(var, None)
def _ensure_running(name):
'''
Raise an exception if the container does not exist
'''
if state(name) != 'running':
return True
else:
return start(name)
def _ensure_systemd(version):
'''
Raises an exception if the systemd version is not greater than the
passed version.
'''
try:
version = int(version)
except ValueError:
raise CommandExecutionError('Invalid version \'{0}\''.format(version))
try:
installed = _sd_version()
log.debug('nspawn: detected systemd {0}'.format(installed))
except (IndexError, ValueError):
raise CommandExecutionError('nspawn: Unable to get systemd version')
if installed < version:
raise CommandExecutionError(
'This function requires systemd >= {0} '
'(Detected version: {1}).'.format(version, installed)
)
def _machinectl(cmd,
output_loglevel='debug',
ignore_retcode=False,
use_vt=False):
'''
Helper function to run machinectl
'''
prefix = 'machinectl --no-legend --no-pager'
return __salt__['cmd.run_all']('{0} {1}'.format(prefix, cmd),
output_loglevel=output_loglevel,
ignore_retcode=ignore_retcode,
use_vt=use_vt)
@_ensure_exists
def _run(name,
cmd,
output=None,
no_start=False,
stdin=None,
python_shell=True,
preserve_state=False,
output_loglevel='debug',
ignore_retcode=False,
use_vt=False,
keep_env=None):
'''
Common logic for nspawn.run functions
'''
orig_state = state(name)
exc = None
try:
ret = __salt__['container_resource.run'](
name,
cmd,
container_type=__virtualname__,
exec_driver=EXEC_DRIVER,
output=output,
no_start=no_start,
stdin=stdin,
python_shell=python_shell,
output_loglevel=output_loglevel,
ignore_retcode=ignore_retcode,
use_vt=use_vt,
keep_env=keep_env)
except Exception:
raise
finally:
# Make sure we stop the container if necessary, even if an exception
# was raised.
if preserve_state \
and orig_state |
larmstrong12/portfolio | backend/apps/requests/urls.py | Python | apache-2.0 | 247 | 0.008097 | from django.conf.urls import patt | erns, url
from views import AddRequest, PricingList
urlpatterns = patterns('',
url('^request/$', AddRequest.as_view(), name='add- | request'),
url('^pricing/$', PricingList.as_view(), name='pricing-list'),
) |
dedoogong/asrada | utils/labelImg.py | Python | apache-2.0 | 57,731 | 0.001975 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import codecs
import distutils.spawn
import os.path
import platform
import re
import sys
import subprocess
from functools import partial
from collections import defaultdict
import keyboard #Using module keyboard
try:
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
except ImportError:
# needed for py3+qt4
# Ref:
# http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html
# http://stackoverflow.com/questions/21217399/pyqt4-qtcore-qvariant-object-instead-of-a-string
if sys.version_info.major >= 3:
import sip
sip.setapi('QVariant', 2)
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import resources
# Add internal libs
from libs.constants import *
from libs.lib import struct, newAction, newIcon, addActions, fmtShortcut, generateColorByText
from libs.settings import Settings
from libs.shape import Shape, DEFAULT_LINE_COLOR, DEFAULT_FILL_COLOR
from libs.canvas import Canvas
from libs.zoomWidget import ZoomWidget
from libs.labelDialog import LabelDialog
from libs.colorDialog import ColorDialog
from libs.labelFile import LabelFile, LabelFileError
from libs.toolBar import ToolBar
from libs.pascal_voc_io import PascalVocReader
from libs.pascal_voc_io import XML_EXT
from libs.yolo_io import YoloReader
from libs.yolo_io import TXT_EXT
from libs.ustr import ustr
from libs.version import __version__
__appname__ = 'labelImg'
# Utility functions and classes.
def have_qstring():
'''p3/qt5 get rid of QString wrapper as py3 has native unicode str type'''
return not (sys.version_info.major >= 3 or QT_VERSION_STR.startswith('5.'))
def util_qt_strlistclass():
return QStringList if have_qstring() else list
class WindowMixin(object):
def menu(self, title, actions=None):
menu = self.menuBar().addMenu(title)
if actions:
addActions(menu, actions)
return menu
def toolbar(self, title, actions=None):
toolbar = ToolBar(title)
toolbar.setObjectName(u'%sToolBar' % title)
# toolbar.setOrientation(Qt.Vertical)
toolbar.setToolButtonStyle(Qt.ToolButtonTextUnderIcon)
if actions:
addActions(toolbar, actions)
self.addToolBar(Qt.LeftToolBarArea, toolbar)
return toolbar
# PyQt5: TypeError: unhashable type: 'QListWidgetItem'
class HashableQListWidgetItem(QListWidgetItem):
def __init__(self, *args):
super(HashableQListWidgetItem, self).__init__(*args)
def __hash__(self):
return hash(id(self))
class MainWindow(QMainWindow, WindowMixin):
FIT_WINDOW, FIT_WIDTH, MANUAL_ZOOM = list(range(3))
def __init__(self, defaultFilename=None, defaultPrefdefClassFile=None, defaultSaveDir=None):
super(MainWindow, self).__init__()
self.setWindowTitle(__appname__)
# Load setting in the main thread
self.settings = Settings()
self.settings.load()
settings = self.settings
# Save as Pascal voc xml
self.defaultSaveDir = defaultSaveDir
self.usingPascalVocFormat = True
self.usingYoloFormat = False
# For loading all image under a directory
self.mImgList = []
self.dirname = None
self.labelHist = []
self.lastOpenDir = None
# Whether we need to save or not.
self.dirty = False
self._noSelectionSlot = False
self._beginner = True
self.screencastViewer = self.getAvailableScreencastViewer()
self.screencast = "https://youtu.be/p0nR2YsCY_U"
# Load predefined classes to the list
self.loadPredefinedClasses(defaultPrefdefClassFile)
# Main widgets and related state.
self.labelDialog = LabelDialog(parent=self, listItem=self.labelHist)
self.itemsToShapes = {}
self.shapesToItems = {}
self.prevLabelText = ''
listLayout = QVBoxLayout()
listLayout.setContentsMargins(0, 0, 0, 0)
# Create a widget for using default label
self.useDefaultLabelCheckbox = QCheckBox(u'Use default label')
self.useDefaultLabelCheckbox.setChecked(False)
self.defaultLabelTextLine = QLineEdit()
useDefaultLabelQHBoxLayout = QHBoxLayout()
useDefaultLabelQHBoxLayout.addWidget(self.useDefaultLabelCheckbox)
useDefaultLabelQHBoxLayout.addWidget(self.defaultLabelTextLine)
useDefaultLabelContainer = QWidget()
useDefaultLabelContainer.setLayout(useDefaultLabelQHBoxLayout)
# Create a widget for edit and diffc button
self.diffcButton = QCheckBox(u'difficult')
self.diffcButton.setChecked(False)
self.diffcButton.stateChanged.connect(self.btnstate)
self.editButton = QToolButton()
self.editButton.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
# Add some of widgets to listLayout
listLayout.addWidget(self.editButton)
listLayout.addWidget(self.diffcButton)
listLayout.addWidget(useDefaultLabelContainer)
# Create and add a widget for showing current label items
self.labelList = QListWidget()
labelListContainer = QWidget()
labelListContainer.setLayout(listLayout)
self.labelList.itemActivated.connect(self.labelSelectionChanged)
self.labelList.itemSelectionChanged.connect(self.labelSelectionChanged)
self.labelList.itemDoubleClicked.connect(self.editLabel)
# Connect to itemChanged to detect checkbox changes.
self.labelList.itemChanged.connect(self.labelItemChanged)
listLayout.addWidget(self.labelList)
self.dock = QDockWidget(u'Box Labels', self)
self.dock.setObjectName(u'Labels')
self.dock.setWidget(labelListContainer)
# Tzutalin 20160906 : Add file list and dock to move faster
self.fileListWidget = QListWidget()
self.fileListWidget.itemDoubleClicked.connect(self.fileitemDoubleClicked)
filelistLayout = QVBoxLayout()
filelistLayout.setCo | ntentsMargins(0, 0, 0, 0)
filelistLayout.addWidget(self.fileListWidget)
fileListContainer = QWidget()
fileListContainer.setLayout(filelistLayout)
self.filedock = QDockWidget(u'File List', self)
self.filedock.setObjectName(u'Files')
self.filedock.setWidget(fileListContainer)
self.zoomWi | dget = ZoomWidget()
self.colorDialog = ColorDialog(parent=self)
self.canvas = Canvas(parent=self)
self.canvas.zoomRequest.connect(self.zoomRequest)
scroll = QScrollArea()
scroll.setWidget(self.canvas)
scroll.setWidgetResizable(True)
self.scrollBars = {
Qt.Vertical: scroll.verticalScrollBar(),
Qt.Horizontal: scroll.horizontalScrollBar()
}
self.scrollArea = scroll
self.canvas.scrollRequest.connect(self.scrollRequest)
self.canvas.newShape.connect(self.newShape)
self.canvas.shapeMoved.connect(self.setDirty)
self.canvas.selectionChanged.connect(self.shapeSelectionChanged)
self.canvas.drawingPolygon.connect(self.toggleDrawingSensitive)
self.setCentralWidget(scroll)
self.addDockWidget(Qt.RightDockWidgetArea, self.dock)
# Tzutalin 20160906 : Add file list and dock to move faster
self.addDockWidget(Qt.RightDockWidgetArea, self.filedock)
self.filedock.setFeatures(QDockWidget.DockWidgetFloatable)
self.dockFeatures = QDockWidget.DockWidgetClosable | QDockWidget.DockWidgetFloatable
self.dock.setFeatures(self.dock.features() ^ self.dockFeatures)
# Actions
action = partial(newAction, self)
quit = action('&Quit', self.close,
'Ctrl+Q', 'quit', u'Quit application')
open = action('&Open', self.openFile,
'Ctrl+O', 'open', u'Open image or label file')
opendir = action('&Open Dir', self.openDirDialog,
'Ctrl+u', 'open', u'Open Dir')
changeSavedir = action('&Change Save Dir', self.changeSavedirDialog,
'Ctrl+r', 'open', u'Chan |
awemulya/fieldsight-kobocat | onadata/apps/eventlog/migrations/0003_auto_20170522_1154.py | Python | bsd-2-clause | 783 | 0.002554 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
c | lass Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('eventlog', '0002_auto_20170522_1134'),
]
oper | ations = [
migrations.AddField(
model_name='fieldsightlog',
name='source',
field=models.ForeignKey(related_name='log', to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AlterField(
model_name='fieldsightlog',
name='type',
field=models.IntegerField(default=0, choices=[(0, b'USER'), (1, b'FORM'), (2, b'SUBMISSION'), (3, b'Site')]),
),
]
|
j0hn/guicavane | setup.py | Python | gpl-3.0 | 5,235 | 0.012999 | #!/usr/bin/env python
# coding: utf-8
import os
import sys
from distutils.core import setup
from distutils.command.install import install
VERSION_NUMBER = "1.6.3"
class CustomInstall(install):
def run(self):
install.run(self)
for script in self.distribution.scripts:
script_path = os.path.join(self.install_scripts,
os.path.basename(script))
with open(script_path, "rb") as fh:
content = fh.read()
content = content.replace("@ INSTALLED_BASE_DIR @",
self._custom_data_dir)
with open(script_path, "wb") as fh:
fh.write(content)
def finalize_options(self):
install.finalize_options(self)
data_dir = os.path.join(self.prefix, "share", self.distribution.get_name())
if self.root is None:
build_dir = data_dir
else:
build_dir = os.path.join(self.root, data_dir[1:])
self.install_lib = build_dir
self._custom_data_dir = data_dir
def setup_linux():
hosts_dir = "guicavane/Hosts"
hosts = os.listdir(hosts_dir)
hosts = ["guicavane.Hosts." + x for x in hosts if os.path.isdir(
os.path.join(hosts_dir, x))]
translations_dir = "guicavane/Translations"
translations = []
for trans in os.listdir(translations_dir):
trans_path = os.path.join(translations_dir, trans)
if os.path.isdir(trans_path):
translations.append("Translations/" + trans + "/LC_MESSAGES/*")
setup(
name = "guicavane",
version = VERSION_NUMBER,
license = "GPL-3",
author = "Gonzalo García Berrotarán",
author_email = "j0hn.com.ar@gmail.com",
description = "Graphical user interface for www.cuevana.tv",
url = "http://www.github.com/j0hn/guicavane/",
packages = ["guicavane", "guicavane.Downloaders", "guicavane.Utils",
"guicavane.Accounts", "guicavane.Hosts"] + hosts,
package_data = {"guicavane": ["Glade/*.glade", "Images/*.png",
"Images/Downloaders/*.png"] + translations},
scripts = ["bin/guicavane"],
cmdclass = {"install": CustomInstall}
)
def setup_windows():
import py2exe
outdata_win = {
"script": "bin\\guicavane",
"dest_base": "guicavane",
"icon_resources": [(1, "guicavane\\Images\\logo.ico")]
}
outdata_con = outdata_win.copy()
outdata_con['dest_base'] = "guicavane_debug"
opts = {
'py2exe': {
'packages': 'encodings, gtk, guicavane, guicavane.Downloaders',
'includes': 'cairo, pangocairo, pango, atk, gobject, os, urllib,' \
'urllib2, cookielib, guicavane, gettext, gtk.glade, ' \
'gio, unicodedata, webbrowser, ' \
'guicavane.Downloaders, guicavane.Accounts, ' \
'guicavane.Utils',
'excludes': ["pywin", "pywin.debugger", "pywin.debugger.dbgcon",
"pywin.dialogs", "pywin.dialogs.list", "Tkconstants",
"Tkinter", "tcl", "doctest", "macpath", "pdb",
"ftplib", "win32wnet", "getopt",],
'dll_excludes': ["w9xpopen.exe"],
'dist_dir': './windows/build',
}
}
files = []
files.append(("Glade",
["guicavane\\Glade\\" + x for x in os.listdir("guicavane\\Glade")]))
files.append(("Images",
["guicavane\\Images\\" + x for x in os.listdir("guicavane\\Images") if \
not os.path.isdir("guicavane\\Images\\" + x)]))
files.append(("Images\\Downloaders\\",
["guicavane\\Images\\Downloaders\\" + x for x in os.listdir("guicavane\\Images\\Downloaders\\")]))
files.append(("Images\\Sites\\",
["guicavane\\Images\\Sites\\" + x for x in os.listdir("guicavane\\Images\\Sites\\")]))
for | translation in os.listdir("guicavane\\Translations\\"):
if not os.path.isdir("guicavane\\Translations\\" + translation):
continue
files.append(("Translations\\" + translation | + "\\LC_MESSAGES",
["guicavane\\Translations\\" + translation + "\\LC_MESSAGES\\" + \
x for x in os.listdir("guicavane\\Translations\\" + translation + "\\LC_MESSAGES")]))
hosts_dir = "guicavane\\Hosts"
hosts = os.listdir(hosts_dir)
hosts = [os.path.join(hosts_dir, x) for x in hosts if os.path.isdir(
os.path.join(hosts_dir, x))]
for host in hosts:
cleanhost = host.replace("guicavane\\", "")
files.append((cleanhost, [os.path.join(host, x) for x in os.listdir(host)]))
setup(
name = "Guicavane",
license = "GPL-3",
author = "Gonzalo García Berrotarán",
author_email = "j0hn.com.ar@gmail.com",
description = "Graphical user interface for www.cuevana.tv",
version = VERSION_NUMBER,
windows = [outdata_win],
console = [outdata_con],
options = opts,
data_files = files
)
if __name__ == "__main__":
path = os.path.dirname(sys.argv[0])
if path:
os.chdir(path)
if os.name == "nt":
setup_windows()
else:
setup_linux()
|
fabiand/igor | igor/reports.py | Python | lgpl-2.1 | 2,729 | 0 | #
# Copyright (C) 2012 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# b | ut WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If no | t, see <http://www.gnu.org/licenses/>.
#
# Author: Fabian Deutsch <fabiand@fedoraproject.org>
#
# -*- coding: utf-8 -*-
import os
import simplejson as json
from lxml import etree
import utils
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
REPORT_PATH = os.path.join(BASE_PATH, "data")
TRANSFORM_MAP = {
"job-rst": os.path.join(REPORT_PATH, "job-report.rst.xsl"),
"job-junit": os.path.join(REPORT_PATH, "job-report.junit.xsl"),
"testplan-rst": os.path.join(REPORT_PATH, "testplan-report.rst.xsl"),
"testplan-junit-xml": os.path.join(REPORT_PATH,
"testplan-report.junit.xsl"),
}
def job_status_to_report_json(txt):
"""Apply the plaintext report transformation to a json obj (str)
"""
d = json.loads(txt)
return job_status_to_report(d)
def job_status_to_report(d):
"""Transform a job status dict to a report
"""
return _map_transform(d, "job-rst")
def job_status_to_junit(d):
"""Transform a job status dict to a report
"""
return _map_transform(d, "job-junit", "job")
def testplan_status_to_report(d):
"""Transform a testplan status dict to a report
"""
return _map_transform(d, "testplan-rst")
def testplan_status_to_junit_report(d):
"""Transform a testplan status dict to a junit report
"""
return _map_transform(d, "testplan-junit-xml", "testplan")
def _map_transform(d, t, rootname="status"):
assert t in TRANSFORM_MAP, "Unknown transformation: %s" % t
return transform_dict(TRANSFORM_MAP[t], d, rootname)
def transform_dict(stylefile, d, rootname):
"""Apply a transformation to a dict
"""
xml = utils.obj2xml(rootname, d)
return transform_xml(stylefile, xml)
def transform_xml(stylefile, xml):
"""Transform an XML Object into another XML objcet using a stylesheet
"""
transform = etree.XSLT(etree.parse(stylefile))
report = transform(xml)
return report
def to_xml_str(etree_obj):
"""Convert a Tree into a str
"""
return etree.tostring(etree_obj, pretty_print=True)
|
youtube/cobalt | build/android/pylib/instrumentation/instrumentation_parser_test.py | Python | bsd-3-clause | 4,062 | 0.002216 | #!/usr/bin/env vpython
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for instrumentation.InstrumentationParser."""
import unittest
from pylib.instrumentation import instrumentation_parser
class InstrumentationParserTest(unittest.TestCase):
def testInstrumentationParser_nothing(self):
parser = instrumentation_parser.InstrumentationParser([''])
statuses = list(parser.IterStatus())
code, bundle = parser.GetResult()
self.assertEqual(None, code)
self.assertEqual({}, bundle)
self.assertEqual([], statuses)
def testInstrumentationParser_noMatchingStarts(self):
raw_output = [
'',
'this.is.a.test.package.TestClass:.',
'Test result for =.',
'Time: 1.234',
'',
'OK (1 test)',
]
parser = instrumentation_p | arser.InstrumentationParser(raw_output)
statuses = list(parser.IterStatus())
| code, bundle = parser.GetResult()
self.assertEqual(None, code)
self.assertEqual({}, bundle)
self.assertEqual([], statuses)
def testInstrumentationParser_resultAndCode(self):
raw_output = [
'INSTRUMENTATION_RESULT: shortMsg=foo bar',
'INSTRUMENTATION_RESULT: longMsg=a foo',
'walked into',
'a bar',
'INSTRUMENTATION_CODE: -1',
]
parser = instrumentation_parser.InstrumentationParser(raw_output)
statuses = list(parser.IterStatus())
code, bundle = parser.GetResult()
self.assertEqual(-1, code)
self.assertEqual(
{'shortMsg': 'foo bar', 'longMsg': 'a foo\nwalked into\na bar'}, bundle)
self.assertEqual([], statuses)
def testInstrumentationParser_oneStatus(self):
raw_output = [
'INSTRUMENTATION_STATUS: foo=1',
'INSTRUMENTATION_STATUS: bar=hello',
'INSTRUMENTATION_STATUS: world=false',
'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass',
'INSTRUMENTATION_STATUS: test=testMethod',
'INSTRUMENTATION_STATUS_CODE: 0',
]
parser = instrumentation_parser.InstrumentationParser(raw_output)
statuses = list(parser.IterStatus())
expected = [
(0, {
'foo': '1',
'bar': 'hello',
'world': 'false',
'class': 'this.is.a.test.package.TestClass',
'test': 'testMethod',
})
]
self.assertEqual(expected, statuses)
def testInstrumentationParser_multiStatus(self):
raw_output = [
'INSTRUMENTATION_STATUS: class=foo',
'INSTRUMENTATION_STATUS: test=bar',
'INSTRUMENTATION_STATUS_CODE: 1',
'INSTRUMENTATION_STATUS: test_skipped=true',
'INSTRUMENTATION_STATUS_CODE: 0',
'INSTRUMENTATION_STATUS: class=hello',
'INSTRUMENTATION_STATUS: test=world',
'INSTRUMENTATION_STATUS: stack=',
'foo/bar.py (27)',
'hello/world.py (42)',
'test/file.py (1)',
'INSTRUMENTATION_STATUS_CODE: -1',
]
parser = instrumentation_parser.InstrumentationParser(raw_output)
statuses = list(parser.IterStatus())
expected = [
(1, {'class': 'foo', 'test': 'bar',}),
(0, {'test_skipped': 'true'}),
(-1, {
'class': 'hello',
'test': 'world',
'stack': '\nfoo/bar.py (27)\nhello/world.py (42)\ntest/file.py (1)',
}),
]
self.assertEqual(expected, statuses)
def testInstrumentationParser_statusResultAndCode(self):
raw_output = [
'INSTRUMENTATION_STATUS: class=foo',
'INSTRUMENTATION_STATUS: test=bar',
'INSTRUMENTATION_STATUS_CODE: 1',
'INSTRUMENTATION_RESULT: result=hello',
'world',
'',
'',
'INSTRUMENTATION_CODE: 0',
]
parser = instrumentation_parser.InstrumentationParser(raw_output)
statuses = list(parser.IterStatus())
code, bundle = parser.GetResult()
self.assertEqual(0, code)
self.assertEqual({'result': 'hello\nworld\n\n'}, bundle)
self.assertEqual([(1, {'class': 'foo', 'test': 'bar'})], statuses)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
JulyKikuAkita/PythonPrac | cs15211/LongestMountaininArray.py | Python | apache-2.0 | 3,561 | 0.003371 | __source__ = 'https://leetcode.com/problems/longest-mountain-in-array/'
# Time: O(N)
# Space: O(1)
#
# Description: Leetcode # 845. Longest Mountain in Array
#
# Let's call any (contiguous) subarray B (of A) a mountain if the following properties hold:
#
# B.length >= 3
# There exists some 0 < i < B.length - 1
# such that B[ | 0] < B[1] < ... B[i-1] | < B[i] > B[i+1] > ... > B[B.length - 1]
# (Note that B could be any subarray of A, including the entire array A.)
#
# Given an array A of integers, return the length of the longest mountain.
#
# Return 0 if there is no mountain.
#
# Example 1:
#
# Input: [2,1,4,7,3,2,5]
# Output: 5
# Explanation: The largest mountain is [1,4,7,3,2] which has length 5.
# Example 2:
#
# Input: [2,2,2]
# Output: 0
# Explanation: There is no mountain.
# Note:
#
# 0 <= A.length <= 10000
# 0 <= A[i] <= 10000
# Follow up:
#
# Can you solve it using only one pass?
# Can you solve it in O(1) space?
#
import unittest
# 48ms 77.66%
class Solution(object):
def longestMountain(self, A):
"""
:type A: List[int]
:rtype: int
"""
N = len(A)
ans = base = 0
while base < N:
end = base
if end + 1 < N and A[end] < A[end + 1]: #if base is a left-boundary
#set end to the peak of this potential mountain
while end+1 < N and A[end] < A[end+1]:
end += 1
if end + 1 < N and A[end] > A[end + 1]: #if end is really a peak..
#set 'end' to right-boundary of mountain
while end+1 < N and A[end] > A[end+1]:
end += 1
#record candidate answer
ans = max(ans, end - base + 1)
base = max(end, base + 1)
return ans
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/longest-mountain-in-array/solution/
#
Approach #1: Two Pointer [Accepted]
Complexity Analysis
Time Complexity: O(N), where N is the length of A.
Space Complexity: O(1)
# 5ms 96.82%
class Solution {
public int longestMountain(int[] A) {
int N = A.length;
int ans = 0, base = 0;
while (base < N) {
int end = base;
// if base is a left-boundary
if (end + 1 < N && A[end] < A[end + 1]) {
// set end to the peak of this potential mountain
while (end + 1 < N && A[end] < A[end + 1]) end++;
// if end is really a peak..
if (end + 1 < N && A[end] > A[end + 1]) {
// set end to the right-boundary of mountain
while (end + 1 < N && A[end] > A[end + 1]) end++;
// record candidate answer
ans = Math.max(ans, end - base + 1);
}
}
base = Math.max(end, base + 1);
}
return ans;
}
}
# 5ms 96.82%
class Solution {
public int longestMountain(int[] A) {
int res = 0;
for (int i = 1; i < A.length - 1; i++) {
if (A[i - 1] < A[i] && A[i] > A[i + 1]) {
int left = i - 1;
int right = i + 1;
while (left > 0 && A[left - 1å < A[left]) left--;
while (right < A.length - 1 && A[right + 1] < A[right]) right++;
res = Math.max(res, right - left + 1);
}
}
return res;
}
}
'''
|
lorganthesorn/CryptoArb | Analysis/calculator.py | Python | mit | 603 | 0.011609 | from M | arkets import Poloniex
from Markets import YoBit
p = Poloniex.Poloniex(None, None)
y = YoBit.Yobit(None, None)
#pPairs = [i for i in p.getTickers().keys()]
#yPairs = [i for i in y.getTickers().keys()]
#pPairs.sort()
#yPairs.sort()
yBook = y.returnOrderBook('dash_btc')
pBook = p.returnOr | derBook('BTC_DASH')
yBid, yAsk = float(yBook['bids'][0][0]), float(yBook['asks'][0][0])
pBid, pAsk = float(pBook['bids'][0][0]), float(pBook['asks'][0][0])
print('arb: %0.4f, y: %0.4f, p: %0.4f'%((yAsk - pBid)/pBid, yAsk, pBid))
print('arb: %0.4f, y: %0.4f, p: %0.4f'%((pAsk - yBid)/yBid, pAsk, yBid))
|
hzdg/django-staticbuilder | staticbuilder/management/commands/collectforbuild.py | Python | mit | 5,220 | 0.001149 | from blessings import Terminal
from django.conf import settings
from django.contrib.staticfiles import finders, storage as djstorage
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.utils.encoding import smart_text
from optparse import make_option
import os
from ...storage import BuiltFileStorage
from ...utils import patched_settings, patched_finders
t = Terminal()
clean_option_kwargs = {
'action': 'store_true',
'dest': 'clean',
'default': False,
'help': 'Remove artifacts from previous builds',
}
class Command(BaseCommand):
"""
Collects all static files into ``STATICBUILDER_BUILD_ROOT``.
"""
help = 'Collect your static assets for building.'
requires_model_validation = False
@property
def option_list(self):
"""
For compatibility with Django<1.10
"""
try:
return BaseCommand.option_list + (
make_option('-c', '--clean', **clean_option_kwargs)
)
except:
return None
def add_arguments(self, parser):
parser.add_argument('-c', '--clean', **clean_option_kwargs)
def handle(self, *args, **options):
self.clean = options['clean']
self.verbosity = int(options.get('verbosity', '1'))
build_dir = settings.STATICBUILDER_BUILD_ROOT
if not build_dir:
raise ImproperlyConfigured('STATICBUILDER_BUILD_ROOT must be set.')
# Copy the static assets to a the build directory.
self.log(t.bold('Collecting static assets for building...'))
self.call_command_func(self.collect_for_build, build_dir)
def call_command_func(self, func, *args, **kwargs):
print(t.bright_black)
try:
result = func(*args, **kwargs)
finally:
print(t.normal)
return result
def collect_for_build(self, build_dir):
with patched_finders():
with patched_settings(STATICBUILDER_COLLECT_BUILT=False):
# Patch the static files storage used by collectstatic
storage = BuiltFileStorage()
old_storage = djstorage.staticfiles_storage
djstorage.staticfiles_storage = storage
try:
call_command('collectstatic',
verbosity=self.verbosity - 1,
interactive=False,
ignore_patterns=settings.STATICBUILDER_EXCLUDE_FILES)
finally:
djstorage.staticfiles_storage = old_storage
# Delete the files that have been removed.
if self.clean:
self.clean_built(storage)
def find_all(self, storage, dir=''):
"""
Find all files in the specified directory, recursively.
"""
all_dirs = set()
all_files = set()
with patched_settings(STATICBUILDER_COLLECT_BUILT=True):
dirs, files = storage.listdir(dir)
all_dirs.update(os.path.join(dir, d) for d in dirs)
all_files.update(os.path.join(dir, f) for f in files)
for d in dirs:
nested_dirs, nested_files = self.find_all(storage, os.path.join(dir, d))
all_dirs.update(nested_dirs)
all_files.update(nested_files)
return (all_dirs, all_files)
def clean_built(self, storage):
"""
Clear any static files that aren't from the apps.
" | ""
build_dirs, built_files = self.find_all(storage)
found_files = set()
for finder in finders.get_finders():
for path, s in finder.list([]):
# Prefix the relative path if the source storage contains it
if getattr(s, 'prefix', None):
prefixed_path = os.path.join(s.prefix, path)
| else:
prefixed_path = path
found_files.add(prefixed_path)
stale_files = built_files - found_files
for fpath in stale_files:
self.log(u"Deleting '%s'" % smart_text(fpath), level=1)
storage.delete(fpath)
found_dirs = set()
for f in found_files:
path = f
while True:
path = os.path.dirname(path)
found_dirs.add(path)
if not path:
break
stale_dirs = set(build_dirs) - found_dirs
for fpath in stale_dirs:
try:
storage.delete(fpath)
except OSError:
self.log(u"Couldn't remove empty directory '%s'" % smart_text(fpath), level=1)
else:
self.log(u"Deleted empty directory '%s'" % smart_text(fpath), level=1)
def log(self, msg, level=1):
"""
Log helper; from Django's collectstatic command.
"""
msg = smart_text(msg)
if not msg.endswith("\n"):
msg += "\n"
if level > 1:
msg = t.bright_black(msg)
if self.verbosity >= level:
self.stdout.write(msg)
|
nylas/flanker | tests/addresslib/address_test.py | Python | apache-2.0 | 32,254 | 0.001502 | # coding:utf-8
from nose.tools import assert_raises, eq_, ok_
from flanker.addresslib.address import (Address, AddressList, EmailAddress,
UrlAddress)
from flanker.addresslib.address import parse, parse_list
def test_addr_properties():
email = parse('name@host.com')
url = parse('http://host.com')
non_ascii = parse(u'Gonzalo Bañuelos<gonz@host.com>')
eq_(False, url.supports_routing)
eq_(True, email.supports_routing)
eq_(Address.Type.Email, email.addr_type)
eq_(Address.Type.Url, url.addr_type)
eq_(non_ascii, "gonz@host.com")
adr = parse("Zeka <EV@host.coM>")
eq_(str(adr), 'EV@host.com')
def test_address_compare():
a = EmailAddress("a@host.com")
b = EmailAddress("b@host.com")
also_a = EmailAddress("A@host.com")
ok_(a == also_a)
#eq_(False, a != "I am also A <a@HOST.com>")
ok_(a != 'crap')
ok_(a != None)
ok_(a != b)
u = UrlAddress("http://hello.com")
ok_(u == "http://hello.com")
# make sure it works for sets:
s = set()
s.add(a)
s.add(also_a)
eq_(1, len(s))
s.add(u)
s.add(u)
eq_(2, len(s))
# test string comparison
ok_(a == a.address)
ok_(not (a != a.address))
ok_(b != a.address)
ok_(not (b == a.address))
def test_local_url():
u = UrlAddress('http:///foo/bar')
eq_(None, u.hostname)
def test_addresslist_basics():
lst = parse_list("http://foo.com:1000; Biz@Kontsevoy.Com ")
eq_(2, len(lst))
eq_("http", lst[0].scheme)
eq_("kontsevoy.com", lst[1].hostname)
eq_("Biz", lst[1].mailbox)
ok_("biz@kontsevoy.com" in lst)
# test case-sensitivity: hostname must be lowercased, but the local-part needs
# to remain case-sensitive
ok_("Biz@kontsevoy.com" in str(lst))
# check parsing:
spec = '''http://foo.com:8080, "Ev K." <ev@host.com>, "Alex K" <alex@yahoo.net>; "Tom, S" <"tom+[a]"@s.com>'''
lst = parse_list(spec, True)
eq_(len(lst), 4)
eq_("http://foo.com:8080", lst[0].address)
eq_("ev@host.com", lst[1].address)
eq_("alex@yahoo.net", lst[2].address)
eq_('"tom+[a]"@s.com', lst[3].address)
# string-based persistence:
s = str(lst)
clone = parse_list(s)
eq_(lst, clone)
# now clone using full spec:
s = lst.full_spec()
clone = parse_list(s)
eq_(lst, clone)
# hostnames:
eq_(set(['host.com', 'foo.com', 'yahoo.net', 's.com']), lst.hostnames)
eq_(set(['url', 'email']), lst.addr_types)
# add:
result = lst + parse_list("ev@local.net") + ["foo@bar.com"]
ok_(isins | tance(result, AddressList))
eq_(len(result), len(lst)+2)
ok_("foo@bar.com" in result)
def test_addresslist_with_apostrophe():
s = '''"Allan G\'o" <allan@example.com>, "Os Wi" <oswi@example.com>'''
lst = parse_list(s)
eq_(2, len(lst))
eq_('Allan G\'o <allan@example.com>', lst[0].full_spec())
eq_('Os Wi <oswi@e | xample.com>', lst[1].full_spec())
lst = parse_list("Eugueny ώ Kontsevoy <eugueny@gmail.com>")
eq_('=?utf-8?q?Eugueny_=CF=8E_Kontsevoy?= <eugueny@gmail.com>', lst.full_spec())
eq_(u'Eugueny ώ Kontsevoy', lst[0].display_name)
def test_addresslist_non_ascii_list_input():
al = [u'Aurélien Berger <ab@example.com>', 'Os Wi <oswi@example.com>']
lst = parse_list(al)
eq_(2, len(lst))
eq_('=?utf-8?q?Aur=C3=A9lien_Berger?= <ab@example.com>', lst[0].full_spec())
eq_('Os Wi <oswi@example.com>', lst[1].full_spec())
def test_addresslist_address_obj_list_input():
al = [EmailAddress(u'Aurélien Berger <ab@example.com>'),
UrlAddress('https://www.example.com')]
lst = parse_list(al)
eq_(2, len(lst))
eq_('=?utf-8?q?Aur=C3=A9lien_Berger?= <ab@example.com>',
lst[0].full_spec())
eq_('https://www.example.com', lst[1].full_spec())
def test_edge_cases():
email = EmailAddress('"foo.bar@"@example.com')
eq_('"foo.bar@"@example.com', email.address)
def test_display_name__to_full_spec():
eq_('"foo (\\"bar\\") blah" <foo@bar.com>',
EmailAddress('foo ("bar") blah', 'foo@bar.com').full_spec())
eq_('foo. bar <foo@bar.com>',
EmailAddress('foo. bar', 'foo@bar.com').full_spec())
eq_('"\\"\\"" <foo@bar.com>',
EmailAddress('""', 'foo@bar.com').full_spec()),
eq_('=?utf-8?b?0J/RgNC40LLQtdGCINCc0LXQtNCy0LXQtA==?= <foo@bar.com>',
EmailAddress(u'Привет Медвед', 'foo@bar.com').full_spec())
def test_address_convertible_2_ascii():
for i, tc in enumerate([{
'desc': 'display_name=empty, domain=ascii',
'addr': 'Foo@Bar.com',
'display_name': u'',
'ace_display_name': '',
'hostname': u'bar.com',
'ace_hostname': 'bar.com',
'address': u'Foo@bar.com',
'ace_address': 'Foo@bar.com',
'repr': 'Foo@bar.com',
'str': 'Foo@bar.com',
'unicode': u'Foo@bar.com',
'full_spec': 'Foo@bar.com',
}, {
'desc': 'display_name=ascii, domain=ascii',
'addr': 'Blah <Foo@Bar.com>',
'display_name': u'Blah',
'ace_display_name': 'Blah',
'hostname': u'bar.com',
'ace_hostname': 'bar.com',
'address': u'Foo@bar.com',
'ace_address': 'Foo@bar.com',
'repr': 'Blah <Foo@bar.com>',
'str': 'Foo@bar.com',
'unicode': u'Blah <Foo@bar.com>',
'full_spec': 'Blah <Foo@bar.com>',
}, {
'desc': 'display_name=utf8, domain=ascii',
'addr': u'Федот <Foo@Bar.com>',
'display_name': u'Федот',
'ace_display_name': '=?utf-8?b?0KTQtdC00L7Rgg==?=',
'hostname': u'bar.com',
'ace_hostname': 'bar.com',
'address': u'Foo@bar.com',
'ace_address': 'Foo@bar.com',
'repr': 'Федот <Foo@bar.com>',
'str': 'Foo@bar.com',
'unicode': u'Федот <Foo@bar.com>',
'full_spec': '=?utf-8?b?0KTQtdC00L7Rgg==?= <Foo@bar.com>',
}, {
'desc': 'display_name=encoded-utf8, domain=ascii',
'addr': '=?utf-8?b?0KTQtdC00L7Rgg==?= <Foo@Bar.com>',
'display_name': u'Федот',
'ace_display_name': '=?utf-8?b?0KTQtdC00L7Rgg==?=',
'hostname': u'bar.com',
'ace_hostname': 'bar.com',
'address': u'Foo@bar.com',
'ace_address': 'Foo@bar.com',
'repr': 'Федот <Foo@bar.com>',
'str': 'Foo@bar.com',
'unicode': u'Федот <Foo@bar.com>',
'full_spec': '=?utf-8?b?0KTQtdC00L7Rgg==?= <Foo@bar.com>',
}, {
'desc': 'display_name=bad-encoding, domain=ascii',
'addr': '=?blah0KTQtdC00L7Rgg==?= <Foo@Bar.com>',
'display_name': u'=?blah0KTQtdC00L7Rgg==?=',
'ace_display_name': '=?blah0KTQtdC00L7Rgg==?=',
'hostname': u'bar.com',
'ace_hostname': 'bar.com',
'address': u'Foo@bar.com',
'ace_address': 'Foo@bar.com',
'repr': '=?blah0KTQtdC00L7Rgg==?= <Foo@bar.com>',
'str': 'Foo@bar.com',
'unicode': u'=?blah0KTQtdC00L7Rgg==?= <Foo@bar.com>',
'full_spec': '=?blah0KTQtdC00L7Rgg==?= <Foo@bar.com>',
}, {
'desc': 'display_name=empty, domain=utf8',
'addr': u'Foo@Почта.рф',
'display_name': u'',
'ace_display_name': '',
'hostname': u'почта.рф',
'ace_hostname': 'xn--80a1acny.xn--p1ai',
'address': u'Foo@почта.рф',
'ace_address': 'Foo@xn--80a1acny.xn--p1ai',
'repr': 'Foo@почта.рф',
'str': 'Foo@почта.рф',
'unicode': u'Foo@почта.рф',
'full_spec': 'Foo@xn--80a1acny.xn--p1ai',
}, {
'desc': 'display_name=ascii, domain=utf8',
'addr': u'Blah <Foo@Почта.рф>',
'display_name': u'Blah',
'ace_display_name': 'Blah',
'hostname': u'п |
talon-one/talon_one.py | talon_one/models/new_attribute.py | Python | mit | 13,648 | 0 | # coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class NewAttribute(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'entity': 'str',
'event_type': 'str',
'name': 'str',
'title': 'str',
'type': 'str',
'description': 'str',
'suggestions': 'list[str]',
'editable': 'bool',
'subscribed_applications_ids': 'list[int]'
}
attribute_map = {
'entity': 'entity',
'event_type': 'eventType',
'name': 'name',
'title': 'title',
'type': 'type',
'description': 'description',
'suggestions': 'suggestions',
'editable': 'editable',
'subscribed_applications_ids': 'subscribedApplicationsIds'
}
def __init__(self, entity=None, event_type=None, name=None, title=None, type=None, description=None, suggestions=None, editable=None, subscribed_applications_ids=None, local_vars_configuration=None): # noqa: E501
"""NewAttribute - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._entity = None
self._event_type = None
self._name = None
self._title = None
self._type = None
self._description = None
self._suggestions = None
self._editable = None
self._subscribed_applications_ids = None
self.discriminator = None
self.entity = entity
if event_type is not None:
self.event_type = event_type
self.name = name
self.title = title
self.type = type
self.description = description
self.suggestions = suggestions
self.editable = editable
if subscribed_applications_ids is not None:
self.subscribed_applications_ids = subscribed_applications_ids
@property
def entity(self):
"""Gets the entity of this NewAttribute. # noqa: E501
The name of the entity that can have this attribute. When creating or updating the entities of a given type, you can include an `attributes` object with keys corresponding to the `name` of the custom attributes for that type. # noqa: E501
:return: The entity of this NewAttribute. # noqa: E501
:rtype: str
"""
return self._entity
@entity.setter
def entity(self, entity):
"""Sets the entity of this NewAttribute.
The name of the entity that can have this attribute. When creating or updating the entities of a given type, you can include an `attributes` object with keys corresponding to the `name` of the custom attributes for that type. # noqa: E501
:param entity: The entity of this NewAttribute. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and entity is None: # noqa: E501
raise ValueError("Invalid value for `entity`, must not be `None`") # noqa: E501
allowed_values = ["Account", "Application", "Campaign", "CustomerProfile", "CustomerSession", "CartItem", "Coupon", "Event"] # noqa: E501
if self.local_vars_configuration.client_side_validation and entity not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `entity` ({0}), must be one of {1}" # noqa: E501
.forma | t(entity, allowed_values)
)
self._entity = entity
@property
def event_type(self):
"""Gets the event_type of this NewAttribute. # noqa: E501
:return: The event_type of this NewAttribute. # noqa: E501
:rtype: str
"""
return self._event_type
@event_type.setter
def event_type(self, event_type):
"""Sets the event_type of this NewAttribute.
:param event_type: The event_type of this NewAttribut | e. # noqa: E501
:type: str
"""
self._event_type = event_type
@property
def name(self):
"""Gets the name of this NewAttribute. # noqa: E501
The attribute name that will be used in API requests and Talang. E.g. if `name == \"region\"` then you would set the region attribute by including an `attributes.region` property in your request payload. # noqa: E501
:return: The name of this NewAttribute. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this NewAttribute.
The attribute name that will be used in API requests and Talang. E.g. if `name == \"region\"` then you would set the region attribute by including an `attributes.region` property in your request payload. # noqa: E501
:param name: The name of this NewAttribute. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and name is None: # noqa: E501
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def title(self):
"""Gets the title of this NewAttribute. # noqa: E501
The human-readable name for the attribute that will be shown in the Campaign Manager. Like `name`, the combination of entity and title must also be unique. # noqa: E501
:return: The title of this NewAttribute. # noqa: E501
:rtype: str
"""
return self._title
@title.setter
def title(self, title):
"""Sets the title of this NewAttribute.
The human-readable name for the attribute that will be shown in the Campaign Manager. Like `name`, the combination of entity and title must also be unique. # noqa: E501
:param title: The title of this NewAttribute. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and title is None: # noqa: E501
raise ValueError("Invalid value for `title`, must not be `None`") # noqa: E501
self._title = title
@property
def type(self):
"""Gets the type of this NewAttribute. # noqa: E501
The data type of the attribute, a `time` attribute must be sent as a string that conforms to the [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) timestamp format. # noqa: E501
:return: The type of this NewAttribute. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this NewAttribute.
The data type of the attribute, a `time` attribute must be sent as a string that conforms to the [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) timestamp format. # noqa: E501
:param type: The type of this NewAttribute. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type |
SysTheron/adhocracy | src/adhocracy/lib/cli.py | Python | agpl-3.0 | 10,730 | 0.000186 | import itertools
from logging import getLogger
import os
import signal
import threading
import time
import paste.script
import paste.fixture
import paste.registry
import paste.deploy.config
from paste.deploy import loadapp
from paste.script.command import Command
import rq
from adhocracy import model
from adhocracy.lib import search
from adhocracy.lib import queue
log = getLogger(__name__)
class AdhocracyCommand(Command):
parser = Command.standard_parser(verbose=True)
parser.add_option('-c', '--config', dest='config',
default='etc/adhocracy.ini', help='Config file to use.')
default_verbosity = 1
group_name = 'adhocracy'
def _load_config(self):
from paste.deploy import appconfig
if not self.options.config:
msg = 'No config file supplied'
raise self.BadCommand(msg)
self.filename = os.path.abspath(self.options.config)
self.logging_file_config(self.filename)
conf = appconfig('config:' + self.filename)
conf.update(dict(app_conf=conf.local_conf,
global_conf=conf.global_conf))
paste.deploy.config.CONFIG.push_thread_config(conf)
wsgiapp = loadapp('config:' + self.filename)
test_app = paste.fixture.TestApp(wsgiapp)
tresponse = test_app.get('/_test_vars')
request_id = int(tresponse.body)
test_app.pre_request_hook = lambda self: \
paste.registry.restorer.restoration_end()
test_app.post_request_hook = lambda self: \
paste.registry.restorer.restoration_begin(request_id)
paste.registry.restorer.restoration_begin(request_id)
def _setup_app(self):
cmd = paste.script.appinstall.SetupCommand('setup-app')
cmd.run([self.filename])
class AdhocracyTimer(object):
timer_duration = 60
periodicals = {
'minutely': dict(delay=60.0, task=queue.minutely),
'hourly': dict(delay=3600.0, task=queue.hourly),
'daily': dict(delay=86400.0, task=queue.daily)}
def __init__(self, redis, queue_name):
self.redis = redis
self.queue_name = queue_name
@property
def lock_key(self):
return '%s.guard.lock' % self.queue_name
@property
def schedules_key(self):
return '%s.shedules' % self.queue_name
def guard(self):
'''
check if any of our peridical functions has to be called.
This will set up a timer to call itself every 60 seconds.
'''
if self.get_lock():
self.run_periodicals()
self.setup_timer(self.timer_duration, self.guard)
def run_periodicals(self):
'''
Run the periodical functions and do schedule the next
execution times if necessary.
'''
hash_name = self.schedules_key
now = time.time()
for key_name, periodical in self.periodicals.items():
log.debug('periodical: %s' % str(periodical))
self.redis.hsetnx(hash_name, key_name, 0)
next_run = self.redis.hget(hash_name, key_name)
log.debug('next_run: %s' % next_run)
if float(next_run) < (now + 1):
log.debug('run now.') |
periodical['task'].enqueue()
next_run = float(now + periodical['delay'])
self.redis.hset(hash_name, key_name, next_run)
# expire our schedules hash an our after the next sheduled run
max_duration = max([p['delay'] for p in self.periodicals.values()])
expire = max_duration + 3600
self.redis.expire(hash_name, int(expire))
def get_lock(self):
'''
Return `True` if we have | or can set a lock in redis. The lock
will be set or extended for the given *duration* from (from
the time it is set or renewed) and is valid for the current
process.
'''
redis = self.redis
key = self.lock_key
duration = self.timer_duration + 1
pid = self.pid
log.debug('get_lock, pid: %s...' % pid)
# set a new lock if it does not exist
if redis.setnx(key, pid):
redis.expire(key, duration)
log.debug('new')
return True
# Get the current lock and check if it is ours:
current_value = redis.get(key)
log.debug('current value: %s, type: %s' % (current_value,
type(current_value)))
if int(current_value) == pid:
redis.expire(key, duration)
#log.debug('extended')
return True
log.debug('nope')
return False
def setup_timer(self, interval, func):
timer = threading.Timer(interval, func)
timer.daemon = True
timer.start()
def start(self):
self.pid = os.getpid()
self.guard()
class Timer(AdhocracyCommand):
'''
Schedule periodical jobs.
'''
summary = __doc__.split('\n')[0]
usage = __doc__
max_args = None
min_args = None
def command(self):
self._load_config()
redis = queue.rq_config.connection
if not redis:
log.error('No redis connection available')
exit(1)
self.timer = AdhocracyTimer(redis, queue.rq_config.queue_name)
self.timer.start() # this will setup a timer thread
signal.signal(signal.SIGTERM, lambda signum, frame: exit(0))
signal.signal(signal.SIGINT, lambda signum, frame: exit(0))
signal.pause()
class Worker(AdhocracyCommand):
'''Run Adhocracy background jobs.'''
summary = __doc__.split('\n')[0]
usage = __doc__
max_args = None
min_args = None
redis = None
queue = None
def command(self):
self._load_config()
queue.rq_config.in_worker = True
connection = queue.rq_config.connection
if not connection:
log.error('No redis connection available')
exit(1)
queue_ = queue.rq_config.queue
if not queue_:
log.error('No queue available.')
exit(1)
worker = rq.Worker([queue_], connection=connection)
worker.work()
class Index(AdhocracyCommand):
"""Re-create Adhocracy's search index."""
summary = __doc__.split('\n')[0]
max_args = 999
min_args = None
DROP = 'DROP'
INDEX = 'INDEX'
errors = False
_indexed_classes = None
def get_instances(self, args):
names = []
INSTANCES_KEYWORD = '-I'
remaining_args = args[:]
if INSTANCES_KEYWORD in args:
index = args.index(INSTANCES_KEYWORD)
names = self.args[index + 1:]
remaining_args = args[:index]
instances = []
for name in names:
instance = model.Instance.find(name, include_deleted=True)
if not instance:
print 'Instance "%s" not found.\n' % name
self.errors = True
else:
instances.append(instance)
return (remaining_args, instances)
def get_classes(self, args):
classes = []
for name in args:
cls = self.indexed_classes.get(name)
if cls is None:
print 'Unknown content type "%s"' % name
self.errors = True
else:
classes.append(cls)
return classes
def get_actions(self, args):
actions = []
for action in [self.DROP, self.INDEX]:
if action in args:
actions.append(action)
args.remove(action)
if not actions:
print 'No actions specified.'
self.errors = True
return args, actions
def command(self):
self._load_config()
# b/w compatibity
if not self.args:
self.start([self.INDEX], [], [])
exit(0)
args = self.args[:]
args, instances = self.get_instances(args)
args, actions = self.get_actions(args)
classes = self.get_classes(args)
if self.errors:
exit(1)
else:
self.start(actions, classes, instances |
coreynicholson/youtube-dl | youtube_dl/extractor/npo.py | Python | unlicense | 21,110 | 0.001895 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_HTTPError,
compat_str,
)
from ..utils import (
determine_ext,
ExtractorError,
fix_xml_ampersands,
orderedSet,
parse_duration,
qualities,
strip_jsonp,
unified_strdate,
)
class NPOBaseIE(InfoExtractor):
def _get_token(self, video_id):
return self._download_json(
'http://ida.omroep.nl/app.php/auth', video_id,
note='Downloading token')['token']
class NPOIE(NPOBaseIE):
IE_NAME = 'npo'
IE_DESC = 'npo.nl and ntr.nl'
_VALID_URL = r'''(?x)
(?:
npo:|
https?://
(?:www\.)?
(?:
npo\.nl/(?!(?:live|radio)/)(?:[^/]+/){2}|
ntr\.nl/(?:[^/]+/){2,}|
omroepwnl\.nl/video/fragment/[^/]+__|
zapp\.nl/[^/]+/[^/]+/
)
)
(?P<id>[^/?#]+)
'''
_TESTS = [{
'url': 'http://www.npo.nl/nieuwsuur/22-06-2014/VPWON_1220719',
'md5': '4b3f9c429157ec4775f2c9cb7b911016',
'info_dict': {
'id': 'VPWON_1220719',
'ext': 'm4v',
'title': 'Nieuwsuur',
'description': 'Dagelijks tussen tien en elf: nieuws, sport en achtergronden.',
'upload_date': '20140622',
},
}, {
'url': 'http://www.npo.nl/de-mega-mike-mega-thomas-show/27-02-2009/VARA_101191800',
'md5': 'da50a5787dbfc1603c4ad80f31c5120b',
'info_dict': {
'id': 'VARA_101191800',
'ext': 'm4v',
'title': 'De Mega Mike & Mega Thomas show: The best of.',
'description': 'md5:3b74c97fc9d6901d5a665aac0e5400f4',
'upload_date': '20090227',
'duration': 2400,
},
}, {
'url': 'http://www.npo.nl/tegenlicht/25-02-2013/VPWON_1169289',
'md5': 'f8065e4e5a7824068ed3c7e783178f2c',
'info_dict': {
'id': 'VPWON_1169289',
'ext': 'm4v',
'title': 'Tegenlicht: Zwart geld. De toekomst komt uit Afrika',
'description': 'md5:52cf4eefbc96fffcbdc06d024147abea',
'upload_date': '20130225',
'duration': 3000,
} | ,
}, {
'url': 'http://www.npo.nl/de-nieuwe-mens-deel-1/21-07-2010/WO_VPRO_043706',
'info_dict': {
'id': 'WO_VPRO_043706',
'ext': 'm4v',
'title': 'De nieuwe mens - Deel 1',
'description': 'md5:518ae51ba1293ffb80d8d8ce90b74e4b',
| 'duration': 4680,
},
'params': {
'skip_download': True,
}
}, {
# non asf in streams
'url': 'http://www.npo.nl/hoe-gaat-europa-verder-na-parijs/10-01-2015/WO_NOS_762771',
'info_dict': {
'id': 'WO_NOS_762771',
'ext': 'mp4',
'title': 'Hoe gaat Europa verder na Parijs?',
},
'params': {
'skip_download': True,
}
}, {
'url': 'http://www.ntr.nl/Aap-Poot-Pies/27/detail/Aap-poot-pies/VPWON_1233944#content',
'info_dict': {
'id': 'VPWON_1233944',
'ext': 'm4v',
'title': 'Aap, poot, pies',
'description': 'md5:c9c8005d1869ae65b858e82c01a91fde',
'upload_date': '20150508',
'duration': 599,
},
'params': {
'skip_download': True,
}
}, {
'url': 'http://www.omroepwnl.nl/video/fragment/vandaag-de-dag-verkiezingen__POMS_WNL_853698',
'info_dict': {
'id': 'POW_00996502',
'ext': 'm4v',
'title': '''"Dit is wel een 'landslide'..."''',
'description': 'md5:f8d66d537dfb641380226e31ca57b8e8',
'upload_date': '20150508',
'duration': 462,
},
'params': {
'skip_download': True,
}
}, {
# audio
'url': 'http://www.npo.nl/jouw-stad-rotterdam/29-01-2017/RBX_FUNX_6683215/RBX_FUNX_7601437',
'info_dict': {
'id': 'RBX_FUNX_6683215',
'ext': 'mp3',
'title': 'Jouw Stad Rotterdam',
'description': 'md5:db251505244f097717ec59fabc372d9f',
},
'params': {
'skip_download': True,
}
}, {
'url': 'http://www.zapp.nl/de-bzt-show/gemist/KN_1687547',
'only_matching': True,
}, {
'url': 'http://www.zapp.nl/de-bzt-show/filmpjes/POMS_KN_7315118',
'only_matching': True,
}, {
'url': 'http://www.zapp.nl/beste-vrienden-quiz/extra-video-s/WO_NTR_1067990',
'only_matching': True,
}, {
# live stream
'url': 'npo:LI_NL1_4188102',
'only_matching': True,
}, {
'url': 'http://www.npo.nl/radio-gaga/13-06-2017/BNN_101383373',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
return self._get_info(video_id)
def _get_info(self, video_id):
metadata = self._download_json(
'http://e.omroep.nl/metadata/%s' % video_id,
video_id,
# We have to remove the javascript callback
transform_source=strip_jsonp,
)
# For some videos actual video id (prid) is different (e.g. for
# http://www.omroepwnl.nl/video/fragment/vandaag-de-dag-verkiezingen__POMS_WNL_853698
# video id is POMS_WNL_853698 but prid is POW_00996502)
video_id = metadata.get('prid') or video_id
# titel is too generic in some cases so utilize aflevering_titel as well
# when available (e.g. http://tegenlicht.vpro.nl/afleveringen/2014-2015/access-to-africa.html)
title = metadata['titel']
sub_title = metadata.get('aflevering_titel')
if sub_title and sub_title != title:
title += ': %s' % sub_title
token = self._get_token(video_id)
formats = []
urls = set()
quality = qualities(['adaptive', 'wmv_sb', 'h264_sb', 'wmv_bb', 'h264_bb', 'wvc1_std', 'h264_std'])
items = self._download_json(
'http://ida.omroep.nl/app.php/%s' % video_id, video_id,
'Downloading formats JSON', query={
'adaptive': 'yes',
'token': token,
})['items'][0]
for num, item in enumerate(items):
item_url = item.get('url')
if not item_url or item_url in urls:
continue
urls.add(item_url)
format_id = self._search_regex(
r'video/ida/([^/]+)', item_url, 'format id',
default=None)
def add_format_url(format_url):
formats.append({
'url': format_url,
'format_id': format_id,
'quality': quality(format_id),
})
# Example: http://www.npo.nl/de-nieuwe-mens-deel-1/21-07-2010/WO_VPRO_043706
if item.get('contentType') in ('url', 'audio'):
add_format_url(item_url)
continue
try:
stream_info = self._download_json(
item_url + '&type=json', video_id,
'Downloading %s stream JSON'
% item.get('label') or item.get('format') or format_id or num)
except ExtractorError as ee:
if isinstance(ee.cause, compat_HTTPError) and ee.cause.code == 404:
error = (self._parse_json(
ee.cause.read().decode(), video_id,
fatal=False) or {}).get('errorstring')
if error:
raise ExtractorError(error, expected=True)
raise
# Stream URL instead of JSON, example: npo:LI_NL1_4188102
if isinstance(stream_info, compat_str):
if not stream_info.startswith('http'):
co |
neerajvashistha/pa-dude | lib/python2.7/site-packages/sphinx/ext/pngmath.py | Python | mit | 9,187 | 0.000544 | # -*- coding: utf-8 -*-
"""
sphinx.ext.pngmath
~~~~~~~~~~~~~~~~~~
Render math in HTML via dvipng. This extension has been deprecated; please
use sphinx.ext.imgmath instead.
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import codecs
import shutil
import tempfile
import posixpath
from os import path
from subprocess import Popen, PIPE
from hashlib import sha1
from six import text_type
from docutils import nodes
import sphinx
from sphinx.errors import SphinxError, ExtensionError
from sphinx.util.png import read_png_depth, write_png_depth
from sphinx.util.osutil import ensuredir, ENOENT, cd
from sphinx.util.pycompat import sys_encoding
from sphinx.ext.mathbase import setup_math as mathbase_setup, wrap_displaymath
class MathExtError(SphinxError):
category = 'Math extension error'
def __init__(self, msg, stderr=None, stdout=None):
if stderr:
msg += '\n[stderr]\n' + stderr.decode(sys_encoding, 'replace')
if stdout:
msg += '\n[stdout]\n' + stdout.decode(sys_encoding, 'replace')
SphinxError.__init__(self, msg)
DOC_HEAD = r'''
\documentclass[12pt]{article}
\usepackage[utf8x]{inputenc}
\usepackage{amsmath}
\usepackage{amsthm}
\usepackage{amssymb}
\usepackage{amsfonts}
\usepackage{bm}
\pagestyle{empty}
'''
DOC_BODY = r'''
\begin{document}
%s
\end{document}
'''
DOC_BODY_PREVIEW = r'''
\usepackage[active]{preview}
\begin{document}
\begin{preview}
%s
\end{preview}
\end{document}
'''
depth_re = re.compile(br'\[\d+ depth=(-?\d+)\]')
def render_math(self, math):
"""Render the LaTeX math expression *math* using latex and dvipng.
Return the filename relative to the built document and the "depth",
that is, the distance of image bottom and baseline in pixels, if the
option to use preview_latex is switched on.
Error handling may seem strange, but follows a pattern: if LaTeX or
dvipng aren't available, only a warning is generated (since that enables
people on machines without these programs to at least build the rest
of the docs successfully). If the programs are there, however, they
may not fail since that indicates a problem in the math source.
"""
use_preview = self.builder.config.pngmath_use_preview
latex = DOC_HEAD + self.builder.config.pngmath_latex_preamble
latex += (use_preview and DOC_BODY_PREVIEW or DOC_BODY) % math
shasum = "%s.png" % sha1(latex.encode('utf-8')).hexdigest()
relfn = posixpath.join(self.builder.imgpath, 'math', shasum)
outfn = path.join(self.builder.outdir, self.builder.imagedir, 'math', shasum)
if path.isfile(outfn):
depth = read_png_depth(outfn)
return relfn, depth
# if latex or dvipng has failed once, don't bother to try again
if hasattr(self.builder, '_mathpng_warned_latex') or \
hasattr(self.builder, '_mathpng_warned_dvipng'):
return None, None
# use only one tempdir per build -- the use of a directory is cleaner
# than using temporary files, since we can clean up everything at once
# just removing the whole directory (see cleanup_tempdir)
if not hasattr(self.builder, '_mathpng_tempdir'):
tempdir = self.builder._mathpng_tempdir = tempfile.mkdtemp()
else:
tempdir = self.builder._mathpng_tempdir
tf = codecs.open(path.join(tempdir, 'math.tex'), 'w', 'utf-8')
tf.write(latex)
tf.close()
# build latex command; old versions of latex don't have the
# --output-directory option, so we have to manually chdir to the
# temp dir to run it.
ltx_args = [self.builder.config.pngmath_latex, '--interaction=nonstopmode']
# add custom args from the config file
ltx_args.extend(self.builder.config.pngmath_latex_args)
ltx_args.append('math.tex')
with cd(tempdir):
try:
p = Popen(ltx_args, stdout=PIPE, stderr=PIPE)
except OSError as err:
if err.errno != ENOENT: # No such file or directory
raise
self.builder.warn('LaTeX command %r cannot be run (needed for math '
'display), check the pngmath_latex setting' %
self.builder.config.pngmath_latex)
self.builder._mathpng_warned_latex = True
return None, None
stdout, stderr = p.communicate()
if p.returncode != 0:
raise MathExtError('latex exited with error', stderr, stdout)
ensuredir(path.dirname(outfn))
# use some standard dvipng arguments
dvipng_args = [self.builder.config.pngmath_dvipng]
dvipng_args += ['-o', outfn, '-T', 'tight', '-z9']
# add custom ones from config value
dvipng_args.extend(self.builder.config.pngmath_dvipng_args)
if use_preview:
dvipng_args.append('--depth')
# last, the input file name
dvipng_args.append(path.join(tempdir, 'math.dvi'))
try:
p = Popen(dvipng_args, stdout=PIPE, stderr=PIPE)
except OSError as err:
if err.errno != ENOENT: # No such file or directory
raise
self.builder.warn('dvipng command %r cannot be run (needed for math '
'display), check the pngmath_dvipng setting' %
self.builder.config.pngmath_dvipng)
self.builder._mathpng_warned_dvipng = True
return None, None
stdout, stderr = p.communicate()
if p.returncode != 0:
raise MathExtError('dvipng exited with error', stderr, stdout)
depth = None
if use_preview:
for line in stdout.splitlines():
m = depth_re.match(line)
if m:
depth = int(m.group(1))
write_png_depth(outfn, depth)
break
return relfn, depth
def cleanup_tempdir(app, exc):
if exc:
return
if not hasattr(ap | p.builder, '_mathpng_tempdir'):
return
try:
shutil.rmtree(app.builder._mathpng_tempdir)
except Exception:
pass
def get_tooltip(self, node):
if self.builder.config.pngmath_add_tooltips:
return ' alt="%s"' % self.encode(node['latex']).strip()
return ''
def html_visit_math(self, node):
try:
fname, depth = render_math(self, '$'+node['latex']+'$')
except MathExtError as exc:
msg = text_type(exc)
| sm = nodes.system_message(msg, type='WARNING', level=2,
backrefs=[], source=node['latex'])
sm.walkabout(self)
self.builder.warn('display latex %r: ' % node['latex'] + msg)
raise nodes.SkipNode
if fname is None:
# something failed -- use text-only as a bad substitute
self.body.append('<span class="math">%s</span>' %
self.encode(node['latex']).strip())
else:
c = ('<img class="math" src="%s"' % fname) + get_tooltip(self, node)
if depth is not None:
c += ' style="vertical-align: %dpx"' % (-depth)
self.body.append(c + '/>')
raise nodes.SkipNode
def html_visit_displaymath(self, node):
if node['nowrap']:
latex = node['latex']
else:
latex = wrap_displaymath(node['latex'], None,
self.builder.config.math_number_all)
try:
fname, depth = render_math(self, latex)
except MathExtError as exc:
sm = nodes.system_message(str(exc), type='WARNING', level=2,
backrefs=[], source=node['latex'])
sm.walkabout(self)
self.builder.warn('inline latex %r: ' % node['latex'] + str(exc))
raise nodes.SkipNode
self.body.append(self.starttag(node, 'div', CLASS='math'))
self.body.append('<p>')
if node['number']:
self.body.append('<span class="eqno">(%s)</span>' % node['number'])
if fname is None:
# something failed -- use text-only as a bad substitute
self.body.append('<span class="math">%s</span></p>\n</div>' %
self.encode(node['latex']).strip())
else:
self.body.append(('<img src="%s"' % fname) + get_tooltip(self, node) +
'/></p>\n</div>')
|
SummerLW/Perf-Insight-Report | dependency_manager/dependency_manager/manager.py | Python | bsd-3-clause | 7,128 | 0.003788 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from dependency_manager import base_config
from dependency_manager import exceptions
DEFAULT_TYPE = 'default'
class DependencyManager(object):
def __init__(self, configs, supported_config_types=None):
"""Manages file dependencies found locally or in cloud_storage.
Args:
configs: A list of instances of BaseConfig or it's subclasses, passed
in decreasing order of precedence.
supported_config_types: A list of whitelisted config_types.
No restrictions if None is specified.
Raises:
ValueError: If |configs| is not a list of instances of BaseConfig or
its subclasses.
UnsupportedConfigFormatError: If supported_config_types is specified and
configs contains a config not in the supported config_types.
Example: DependencyManager([config1, config2, config3])
No requirements on the type of Config, and any dependencies that have
local files for the same platform will first look in those from
config1, then those from config2, and finally those from config3.
"""
if configs is None or type(configs) != list:
raise ValueError(
'Must supply a list of config files to DependencyManager')
# self._lookup_dict is a dictionary with the following format:
# { dependency1: {platform1: dependency_info1,
# platform2: dependency_i | nfo2}
# dependency2: {platform1: dependency_info3,
# ...}
# ...}
#
# Where the dependencies and platforms are strings, and the
# dependency_info's are DependencyInfo instances.
self._lookup_dict = {}
s | elf.supported_configs = supported_config_types or []
for config in configs:
self._UpdateDependencies(config)
def FetchPath(self, dependency, platform):
"""Get a path to an executable for |dependency|, downloading as needed.
A path to a default executable may be returned if a platform specific
version is not specified in the config(s).
Args:
dependency: Name of the desired dependency, as given in the config(s)
used in this DependencyManager.
platform: Name of the platform the dependency will run on. Often of the
form 'os_architecture'. Must match those specified in the config(s)
used in this DependencyManager.
Returns:
A path to an executable of |dependency| that will run on |platform|,
downloading from cloud storage if needed.
Raises:
NoPathFoundError: If a local copy of the executable cannot be found and
a remote path could not be downloaded from cloud_storage.
CredentialsError: If cloud_storage credentials aren't configured.
PermissionError: If cloud_storage credentials are configured, but not
with an account that has permission to download the remote file.
NotFoundError: If the remote file does not exist where expected in
cloud_storage.
ServerError: If an internal server error is hit while downloading the
remote file.
CloudStorageError: If another error occured while downloading the remote
path.
FileNotFoundError: If an attempted download was otherwise unsuccessful.
"""
dependency_info = self._GetDependencyInfo(dependency, platform)
if not dependency_info:
raise exceptions.NoPathFoundError(dependency, platform)
path = dependency_info.GetLocalPath()
if not path or not os.path.exists(path):
path = dependency_info.GetRemotePath()
if not path or not os.path.exists(path):
raise exceptions.NoPathFoundError(dependency, platform)
return path
def LocalPath(self, dependency, platform):
"""Get a path to a locally stored executable for |dependency|.
A path to a default executable may be returned if a platform specific
version is not specified in the config(s).
Will not download the executable.
Args:
dependency: Name of the desired dependency, as given in the config(s)
used in this DependencyManager.
platform: Name of the platform the dependency will run on. Often of the
form 'os_architecture'. Must match those specified in the config(s)
used in this DependencyManager.
Returns:
A path to an executable for |dependency| that will run on |platform|.
Raises:
NoPathFoundError: If a local copy of the executable cannot be found.
"""
dependency_info = self._GetDependencyInfo(dependency, platform)
if not dependency_info:
raise exceptions.NoPathFoundError(dependency, platform)
local_path = dependency_info.GetLocalPath()
if not local_path or not os.path.exists(local_path):
raise exceptions.NoPathFoundError(dependency, platform)
return local_path
def _UpdateDependencies(self, config):
"""Add the dependency information stored in |config| to this instance.
Args:
config: An instances of BaseConfig or a subclasses.
Raises:
UnsupportedConfigFormatError: If supported_config_types was specified
and config is not in the supported config_types.
"""
if not isinstance(config, base_config.BaseConfig):
raise ValueError('Must use a BaseConfig or subclass instance with the '
'DependencyManager.')
if (self.supported_configs and
config.GetConfigType() not in self.supported_configs):
raise exceptions.UnsupportedConfigFormatError(config.GetConfigType(),
config.config_path)
for dep_info in config.IterDependencyInfo():
dependency = dep_info.dependency
platform = dep_info.platform
if dependency not in self._lookup_dict:
self._lookup_dict[dependency] = {}
if platform not in self._lookup_dict[dependency]:
self._lookup_dict[dependency][platform] = dep_info
else:
self._lookup_dict[dependency][platform].Update(dep_info)
def _GetDependencyInfo(self, dependency, platform):
"""Get information for |dependency| on |platform|, or a default if needed.
Args:
dependency: Name of the desired dependency, as given in the config(s)
used in this DependencyManager.
platform: Name of the platform the dependency will run on. Often of the
form 'os_architecture'. Must match those specified in the config(s)
used in this DependencyManager.
Returns: The dependency_info for |dependency| on |platform| if it exists.
Or the default version of |dependency| if it exists, or None if neither
exist.
"""
if not self._lookup_dict or dependency not in self._lookup_dict:
return None
dependency_dict = self._lookup_dict[dependency]
device_type = platform
if not device_type in dependency_dict:
device_type = DEFAULT_TYPE
return dependency_dict.get(device_type)
|
hujiaweibujidao/XSolutions | python/APythonTest.py | Python | apache-2.0 | 2,492 | 0.002097 | # # coding=utf-8
# import os
# def tree(top):
# #path,folder list,file list
# for path, names, fnames in os.walk(top):
# for fname in fnames:
# yield os.path.join(path, fname)
#
# for name in tree(os.getcwd()):
# print name
#
# import time
# from functools import wraps
#
# def timethis(func):
# '''
# Decorator that reports the execution time.
# '''
# @wraps(func)
# def wrapper(*args, **kwargs):
# start = time.time()
# result = func(*args, **kwargs)
# end = time.time()
# print(func.__name__, end-start)
# return result
# return wrapper
#
# @timethis
# def countdown(n):
# while n > 0:
# n -= 1
#
# countdown(100000)
#
# class demo(object):
# pass
#
# obj = demo()
#
# print "Class of obj is {0}".format(obj.__class__)
# print "Class of obj is {0}".format(demo.__class__)
# # Class of obj is <class '__main__.demo'>
# | # Class of obj is <type 'type'>
# # print(obj.__metaclass__) #
#
# def temp(x):
# return x+2
#
# print(temp.func_code)
#
# import dis
# print([ord(b) for b in temp.func_code.co_code])
# dis.dis(temp.func_code)
#
# #写一个程序,打印数字1到100,3的倍数打印“Fizz”来替换 | 这个数,5的倍数打印“Buzz”,
# #对于既是3的倍数又是5的倍数的数字打印“FizzBuzz”。
# for x in range(101):
# #4 is string length
# print "fizz"[x%3*4::]+"buzz"[x%5*4::] or x
#
# class decorator(object):
#
# def __init__(self, f):
# print("inside decorator.__init__()")
# # f() # Prove that function definition has completed
# self.f=f
#
# def __call__(self):
# print("inside decorator.__call__() begin")
# self.f()
# print("inside decorator.__call__() end")
#
# @decorator
# def function():
# print("inside function()")
#
# print("Finished decorating function()")
#
# function()
#
# # inside decorator.__init__()
# # Finished decorating function()
# # inside decorator.__call__() begin
# # inside function()
# # inside decorator.__call__() end
import os
def tree(top):
#path,folder list,file list
for path, names, fnames in os.walk(top):
for fname in fnames:
# yield os.path.join(path, fname)
yield fname
for name in tree(os.getcwd()):
print name
for name in tree('/Users/hujiawei/Desktop/csu/'):
print ''
if __name__ == '__main__':
pass
|
mzdaniel/oh-mainline | vendor/packages/mock/tests/testmagicmethods.py | Python | agpl-3.0 | 10,336 | 0.003289 | # Copyright (C) 2007-2011 Michael Foord & the mock team
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# http://www.voidspace.org.uk/python/mock/
from tests.support import unittest2, inPy3k
try:
unicode
except NameError:
# Python 3
unicode = str
long = int
import inspect
from mock import Mock, MagicMock, _magics
class TestMockingMagicMethods(unittest2.TestCase):
def testDeletingMagicMethods(self):
mock = Mock()
self.assertFalse(hasattr(mock, '__getitem__'))
mock.__getitem__ = Mock()
self.assertTrue(hasattr(mock, '__getitem__'))
del mock.__getitem__
self.assertFalse(hasattr(mock, '__getitem__'))
def testMagicMethodWrapping(self):
mock = Mock()
def f(self, name):
return self, 'fish'
mock.__getitem__ = f
self.assertFalse(mock.__getitem__ is f)
self.assertEqual(mock['foo'], (mock, 'fish'))
# When you pull the function back of the *instance*
# the first argument (self) is removed
def instance_f(name):
pass
self.assertEqual(inspect.getargspec(mock.__getitem__), inspect.getargspec(instance_f))
mock.__getitem__ = mock
self.assertTrue(mock.__getitem__ is mock)
def testMagicMethodsIsolatedBetweenMocks(self):
mock1 = Mock()
mock2 = Mock()
mock1.__iter__ = Mock(return_value=iter([]))
self.assertEqual(list(mock1), [])
self.assertRaises(TypeError, lambda: list(mock2))
def testRepr(self):
mock = Mock()
self.assertEqual(repr(mock), object.__repr__(mock))
mock.__repr__ = lambda s: 'foo'
self.assertEqual(repr(mock), 'foo')
def testStr(self):
mock = Mock()
self.assertEqual(str(mock), object.__str__(mock))
mock.__str__ = lambda s: 'foo'
self.assertEqual(str(mock), 'foo')
@unittest2.skipIf(inPy3k, "no unicode in Python 3")
def testUnicode(self):
mock = Mock()
self.assertEqual(unicode(mock), unicode(str(mock)))
mock.__unicode__ = lambda s: unicode('foo')
self.assertEqual(unicode(mock), unicode('foo'))
def testDictMethods(self):
mock = Mock()
self.assertRaises(TypeError, lambda: mock['foo'])
def _del():
del mock['foo']
def _set():
mock['foo'] = 3
self.assertRaises(TypeError, _del)
self.assertRaises(TypeError, _set)
_dict = {}
def getitem(s, name):
return _dict[name]
def setitem(s, name, value):
_dict[name] = value
def delitem(s, name):
del _dict[name]
mock.__setitem__ = setitem
mock.__getitem__ = getitem
mock.__delitem__ = delitem
self.assertRaises(KeyError, lambda: mock['foo'])
mock['foo'] = 'bar'
self.assertEqual(_dict, {'foo': 'bar'})
self.assertEqual(mock['foo'], 'bar')
del mock['foo']
self.assertEqual(_dict, {})
def testNumeric(self):
original = mock = Mock()
mock.value = 0
self.assertRaises(TypeError, lambda: mock + 3)
def add(self, other):
mock.value += other
return self
mock.__add__ = add
self.assertEqual(mock + 3, mock)
self.assertEqual(mock.value, 3)
del mock.__add__
def iadd(mock):
mock += 3
self.assertRaises(TypeError, iadd, mock)
mock.__iadd__ = add
mock += 6
self.assertEqual(mock, original)
self.assertEqual(mock.value, 9)
self.assertRaises(TypeError, lambda: 3 + mock)
mock.__radd__ = add
self.assertEqual(7 + mock, mock)
self.assertEqual(mock.value, 16)
def testHash(self):
mock = Mock()
# test delegation
self.assertEqual(hash(mock), Mock.__hash__(mock))
def _hash(s):
return 3
mock.__hash__ = _hash
self.assertEqual(hash(mock), 3)
def testNonZero(self):
m = Mock()
self.assertTrue(bool(m))
nonzero = lambda s: False
if not inPy3k:
m.__nonzero__ = nonzero
else:
m.__bool__ = nonzero
self.assertFalse(bool(m))
def testComparison(self):
if not inPy3k:
# incomparable in Python 3
self. assertEqual(Mock() < 3, object() < 3)
self. assertEqual(Mock() > 3, object() > 3)
self. assertEqual(Mock() <= 3, object() <= 3)
self. assertEqual(Mock() >= 3, object() >= 3)
mock = Mock()
def comp(s, o):
return True
mock.__lt__ = mock.__gt__ = mock.__le__ = mock.__ge__ = comp
self. assertTrue(mock < 3)
self. assertTrue(mock > 3)
self. assertTrue(mock <= 3)
self. assertTrue(mock >= 3)
def test_equality(self):
for mock in Mock(), MagicMock():
self.assertEqual(mock == mock, True)
self.assertEqual(mock != mock, False)
self. | assertEqual(mock == object(), False)
self.ass | ertEqual(mock != object(), True)
def eq(self, other):
return other == 3
mock.__eq__ = eq
self.assertTrue(mock == 3)
self.assertFalse(mock == 4)
def ne(self, other):
return other == 3
mock.__ne__ = ne
self.assertTrue(mock != 3)
self.assertFalse(mock != 4)
mock = MagicMock()
mock.__eq__.return_value = True
self.assertEqual(mock == 3, True)
mock.__ne__.return_value = False
self.assertEqual(mock != 3, False)
def testLenContainsIter(self):
mock = Mock()
self.assertRaises(TypeError, len, mock)
self.assertRaises(TypeError, iter, mock)
self.assertRaises(TypeError, lambda: 'foo' in mock)
mock.__len__ = lambda s: 6
self.assertEqual(len(mock), 6)
mock.__contains__ = lambda s, o: o == 3
self.assertTrue(3 in mock)
self.assertFalse(6 in mock)
mock.__iter__ = lambda s: iter('foobarbaz')
self.assertEqual(list(mock), list('foobarbaz'))
def testMagicMock(self):
mock = MagicMock()
mock.__iter__.return_value = iter([1, 2, 3])
self.assertEqual(list(mock), [1, 2, 3])
if inPy3k:
mock.__bool__.return_value = False
self.assertFalse(hasattr(mock, '__nonzero__'))
else:
mock.__nonzero__.return_value = False
self.assertFalse(hasattr(mock, '__bool__'))
self.assertFalse(bool(mock))
for entry in _magics:
self.assertTrue(hasattr(mock, entry))
self.assertFalse(hasattr(mock, '__imaginery__'))
def testMagicMockDefaults(self):
mock = MagicMock()
self.assertEqual(int(mock), 1)
self.assertEqual(complex(mock), 1j)
self.assertEqual(float(mock), 1.0)
self.assertEqual(long(mock), long(1))
self.assertNotIn(object(), mock)
self.assertEqual(len(mock), 0)
self.assertEqual(list(mock), [])
self.assertEqual(hash(mock), object.__hash__(mock))
self.assertEqual(str(mock), object.__str__(mock))
self.assertEqual(unicode(mock), object.__str__(mock))
self.assertIsInstance(unicode(mock), unicode)
self.assertTrue(bool(mock))
if not inPy3k:
self.assertEqual(oct(mock), '1')
else:
# in Python 3 oct and hex use __index__
# so these tests are for __index__ in py3k
self.assertEqual(oct(mock), '0o1')
self.assertEqual(hex(mock), '0x1')
# how to test __sizeof__ ?
@unittest2.skipIf(inPy3k, "no __cmp__ in Python 3")
def testNonDefaultMagicMethods(self):
mock = MagicMock()
self.assertRaises(AttributeError, lambda: mock.__cmp__)
mock = Mock()
mock.__cmp__ = lambda s, o: 0
self.assertEqual(mock, object())
def testMagicMethodsAndSpec(self):
class Iterable(object):
def __iter__(self):
pass
m |
haku86/happyowlweb | happyowlweb/happyowlweb/wsgi.py | Python | mit | 1,570 | 0.001274 | """
WSGI config for happyowlweb project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from | os.path import abspath, dirname
from sys import path
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "jajaja.setting | s"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "happyowlweb.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
sharph/lifx-python | lights_on.py | Python | agpl-3.0 | 71 | 0.014085 | #!/usr/bin/env python3
import lifx
lifx.set_power(lifx.BCAST, T | rue)
| |
RobertoPrevato/flask-three-template | bll/membership/__init__.py | Python | mit | 1,482 | 0.006073 | """
* Copyright 2015, Roberto Prevato roberto.prevato@gmail.com
* https://github.com/RobertoPrevato/Flask-three-template
*
* Licensed under the MIT license:
* http://www.opensou | rce.org/licenses/MIT
"""
from bll.membership.membershipprov | ider import MembershipProvider
def register_membership(app):
"""
Initialize an application global membership provider.
NB:
an alternative, if needed, is to initialize a membership provider for each logical area (e.g. admin, public, etc.);
which allows to use different database structures to store accounts and sessions for different parts of
the application and to have different, parallel authentication mechanisms.
This template includes a global membership provider, because it is a simpler model that suffices in most cases.
"""
DAL_PROJECT = app.config["DAL_PROJECT"]
# initialize an application membership provider
# NB: an alternative, if needed, is to initialize a membership provider for each area (e.g. admin and public areas)
membership_store = None
if DAL_PROJECT == "dalmongo":
from dalmongo.membership.membershipstore import MembershipStore
membership_store = MembershipStore()
else:
raise Exception("MembershipStore for `{}` implemented".format(DAL_PROJECT))
# instantiate the membership provider
provider = MembershipProvider({ "store": membership_store })
# attach to the application
setattr(app, "membership", provider)
|
derekmoyes/opsy | opsy/plugins/monitoring/main.py | Python | mit | 1,477 | 0.001354 | from flask import Blueprint, render_template, redirect, url_for, current_app
monitoring_main = Blueprint('monitoring_main', __name__, # pylint: disable=invalid-name
template_folder='templates',
static_url_path='/static',
static_folder='static')
@monitoring_main.context_processor
def inject_da | ta():
data = {
'dashboards': current_app.config['monitoring']['dashboards'],
'uchiwa_url': current_app.config['monitoring']['uchiwa_url'],
}
return data
@monitoring_main.route('/')
def index():
return redirect(url_for('monitoring_main.events'))
@monitorin | g_main.route('/events')
def events():
return render_template('events.html', title='Events')
@monitoring_main.route('/checks')
def checks():
return render_template('checks.html', title='Checks')
@monitoring_main.route('/clients')
def clients():
return render_template('clients.html', title='Clients')
@monitoring_main.route('/clients/<zone>/<client_name>')
def client(zone, client_name):
return render_template('client_details.html', zone=zone,
client=client_name, title='Client Details')
@monitoring_main.route('/clients/<zone>/<client_name>/events/<check>')
def client_event(zone, client_name, check):
return render_template('client_event_details.html', zone=zone,
client=client_name, check=check, title='Event Details')
|
seecr/meresco-distributed | test/failover/matchesversiontest.py | Python | gpl-2.0 | 7,957 | 0.003896 | ## begin license ##
#
# "Meresco Distributed" has components for group management based on "Meresco Components."
#
# Copyright (C) 2018, 2021 Seecr (Seek You Too B.V.) https://seecr.nl
# Copyright (C) 2021 Data Archiving and Network Services https://dans.knaw.nl
# Copyright (C) 2021 SURF https://www.surf.nl
# Copyright (C) 2021 Stichting Kennisnet https://www.kennisnet.nl
# Copyright (C) 2021 The Netherlands Institute for Sound and Vision https://beeldengeluid.nl
#
# This file is part of "Meresco Distributed"
#
# "Meresco Distributed" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Meresco Distributed" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Meresco Distributed"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
from os.path import join, isfile
from uuid import uuid4
from seecr.test import SeecrTestCase, CallTrace
from weightless.core import be, asString, consume, NoneOfTheObserversRespond, retval
from meresco.core import Observable
from meresco.distributed.constants import WRITABLE, READABLE
from meresco.distributed.utils import usrSharePath
from meresco.distributed.failover import MatchesVersion, Proxy, ServiceConfig
from meresco.distributed.failover._matchesversion import betweenVersionCondition
class MatchesVersionTest(SeecrTestCase):
def setUp(self):
SeecrTestCase.setUp(self)
self.matchesVersion = MatchesVersion(minVersion='1', untilVersion='3')
self.observer = CallTrace('observer', methods=dict(somemessage=lambda: (x for x in ['result'])), emptyGeneratorMethods=['updateConfig'])
self.top = be((Observable(),
(self.matchesVersion,
(self.observer,)
)
))
def testDoesNotMatchNoConfig(self):
self.assertEqual('', asString(self.top.all.somemessage()))
self.assertEqual([], self.observer.calledMethodNames())
def testDoesNotMatchNoVersion(self):
consume(self.matchesVersion.updateConfig(config={'foo': 'bar'}))
self.assertEqual('', asString(self.top.all.somemessage()))
self.assertEqual(['updateConfig'], self.observer.calledMethodNames())
def testDoesNotMatch(self):
consume(self.matchesVersion.updateConfig(**{'software_version': '0.1', 'config':{'foo': 'bar'}}))
self.assertEqual('', asString(self.top.all.somemessage()))
self.assertEqual(['updateConfig'], self.observer.calledMethodNames())
def testDoesMatch(self):
consume(self.matchesVersion.updateConfig(software_version='2'))
self.assertEqual('result', asString(self.top.all.somemessage()))
self.assertEqual(['updateConfig', 'somemessage'], self.observer.calledMethodNames())
def testDeterminesConfig(self):
newId = lambda: str(uuid4())
services = {
newId(): {'type': 'service1', 'ipAddress': '10.0.0.2', 'infoport': 1234, 'active': True, 'readable': True, 'writable': True, 'data': {'VERSION': '1.5'}},
newId(): {'type': 'service2', 'ipAddress': '10.0.0.3', 'infoport': 1235, 'active': True, 'readable': True, 'writable': True, 'data': {'VERSION': '1.8'}},
}
config = {
'service1.frontend': {
'fqdn': 'service1.front.example.org',
'ipAddress': '1.2.3.4',
},
'service2.frontend': {
'fqdn': 'service2.front.example.org',
'ipAddress': '1.2.3.5',
},
}
configFile = join(self.tempdir, 'server.conf')
top = be(
(Proxy(nginxConfigFile=configFile),
(MatchesVersion(
minVersion='1.4',
untilVersion='2.0'),
(ServiceConfig(
type='service1',
minVersion='1.4',
untilVersion='2.0',
flag=WRITABLE),
),
),
(MatchesVersion(
minVersion='1.4',
untilVersion='4.0'),
(ServiceConfig(
type='service2',
minVersion='1.4',
untilVersion='2.0',
flag=READABLE),
)
)
)
)
mustUpdate, sleeptime = top.update(software_version='3.0', config=config, services=services, verbose=False)
self.assertTrue(mustUpdate)
self.assertEqual(30, sleeptime)
self.assertTrue(isfile(configFile))
with open(configFile) as fp:
self.assertEqualText("""## Generated by meresco.distributed.failover.Proxy
upstream __var_3ff29304e7437997bf4171776e1fe282_service2 {
server 10.0.0.3:1235;
}
server {
listen 1.2.3.5:80;
server_name service2.front.example.org;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
location / {
proxy_pass http://__var_3ff29304e7437997bf4171776e1fe282_service2;
}
error_page 500 502 503 504 =503 /unavailable.html;
location /unavailable.html {
root %s/failover;
}
client_max_body_size 0;
}
""" % usrSharePath, fp.read())
# MatchesVersion is expected to be invoked with 'all', but testing for 'do', 'call' and 'any' invocation just in case
def testDoesNotMatchDo(self):
consume(self.matchesVersion.updateConfig(**{'software_version': '0.1'}))
self.top.do.somemessage()
self.assertEqual(['updateConfig'], self.observer.calledMethodNames())
def testDoesMatchDo(self):
consume(self.matchesVersion.updateConfig(**{'software_version': '2'}))
self.top.do.anothermessage()
self.assertEqual(['updateConfig', 'anothermessage'], self.observer.calledMethodNames())
def testDoesNotMatchCall(self):
consume(self.matchesVersion.updateConfig(**{'software_versi | on': '0.1'}))
try:
_ = self.top.call.somemessage()
self.fail()
except NoneOfTheObserversRespond:
pass
self.assertEqual(['updateConfig'], self.observer.calledMethodNames())
def testDoesMatchCall(self):
consume(self.matchesVersion.updateConfig(**{'software_version': '2'}))
_ = self.top.call.somemessage()
self.assertEqual(['updateConfig', 'somemessage'], self.observer.calledMethodNames())
de | f testDoesNotMatchAny(self):
consume(self.matchesVersion.updateConfig(**{'software_version': '0.1'}))
try:
_ = retval(self.top.any.somemessage())
self.fail()
except NoneOfTheObserversRespond:
pass
self.assertEqual(['updateConfig'], self.observer.calledMethodNames())
def testDoesMatchAny(self):
consume(self.matchesVersion.updateConfig(**{'software_version': '2'}))
_ = retval(self.top.any.somemessage())
self.assertEqual(['updateConfig', 'somemessage'], self.observer.calledMethodNames())
def testBetweenVersionCondition(self):
inbetween = betweenVersionCondition('1.3', '8')
self.assertTrue(inbetween('1.3'))
self.assertTrue(inbetween('1.3.x'))
self.assertTrue(inbetween('7.9'))
self.assertFalse(inbetween('8.0'))
self.assertFalse(inbetween('8'))
self.assertFalse(inbetween('77'))
self.assertFalse(inbetween('1.2.x'))
|
comsaint/legco-watch | app/raw/processors/library_member.py | Python | mit | 5,346 | 0.002806 | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.timezone import now
import json
import logging
import os
import re
import shutil
import warnings
from raw.models import RawCouncilAgenda, LANG_EN, LANG_CN, RawMember, GENDER_M, GENDER_F
from raw import utils
from raw.processors.base import BaseProcessor, file_wrapper
logger = logging.getLogger('legcowatch')
class LibraryMemberProcessor(BaseProcessor):
"""
Processes the results of a library_member spider crawl.
The crawl results in an item for each member/language bio combination, so each member
will have two items, one for English, one for Chinese.
This will create RawMember items for each member and combine these records
"""
def process(self, *args, **kwargs):
logger.info("Processing file {}".format(self.items_file_path))
counter = 0
for item in file_wrapper(self.items_file_path):
counter += 1
self._process_member(item)
logger.info("{} items processed, {} created, {} updated".format(counter, self._count_created, self._count_updated))
def _process_member(self, item):
uid = self._generate_uid(item)
obj = self._get_member_object(uid)
if obj is None:
logger.warn(u'Could not process member item: {}'.format(item))
return
obj.last_parsed = now()
lang = item[u'language']
if lang == 'e':
# | English only items
keys_to_copy = [u'year_of_birth', u'place_of_birth', u'homepage']
for k in keys_to_copy:
val = item.get(k, | None)
if val is not None:
setattr(obj, k, val.strip())
if item[u'gender'] == u'M':
obj.gender = GENDER_M
else:
obj.gender = GENDER_F
# Copy and rename the photo to the app
# Unless it is the generic photo
if 'photo.jpg' not in item[u'files'][0][u'url']:
try:
source_photo_path = utils.get_file_path(item[u'files'][0][u'path'])
new_photo_path = 'member_photos/{}.jpg'.format(uid)
new_photo_abspath = os.path.abspath(os.path.join('.', 'raw', 'static', 'member_photos', '{}.jpg'.format(uid)))
# This should be moved up in the process, since we don't need to check if the directory exsts
# for each photo
if not os.path.exists(os.path.dirname(new_photo_abspath)):
os.makedirs(os.path.dirname(new_photo_abspath))
if not os.path.exists(new_photo_abspath) and os.path.exists(source_photo_path):
shutil.copyfile(source_photo_path, new_photo_abspath)
obj.photo_file = new_photo_path
except RuntimeError:
# Photo didn't download for some reason
logger.warn(u'Photo for {} did not download properly to path'.format(uid, item[u'files'][0][u'path']))
else:
# Clear old photos
obj.photo_file = ''
obj.crawled_from = item[u'source_url']
if self.job:
obj.last_crawled = self.job.completed
# All other items
keys_to_copy = [u'name', u'title', u'honours']
for k in keys_to_copy:
target = u'{}_{}'.format(k, lang)
val = item.get(k, None)
if val is not None:
# in some cases, the name field contains double-space ' ' instead of ' '
# which fails the NameMatcher later on
# e.g. 'LEE Wing-tat' instead of desired 'LEE Wing-tat'
# since we cannot modify the web page, we replace it here
val = val.replace(u' ',u' ')
setattr(obj, target, val.strip())
json_objects_to_copy = [u'service', u'education', u'occupation']
for k in json_objects_to_copy:
target = u'{}_{}'.format(k, lang)
val = item.get(k, None)
if val is not None:
setattr(obj, target, json.dumps(val))
obj.save()
def _get_member_object(self, uid):
try:
obj = RawMember.objects.get(uid=uid)
self._count_updated += 1
except RawMember.DoesNotExist:
obj = RawMember(uid=uid)
self._count_created += 1
except RawMember.MultipleObjectsReturned:
warnings.warn("Found more than one item with raw id {}".format(uid), RuntimeWarning)
obj = None
return obj
def _generate_uid(self, item):
"""
Generate a uid for members
The library database already has an internal ID for each member
We can use these for now, until we can think of a better one
ex: member-<library_id>
"""
pattern = ur'member_detail.aspx\?id=(\d+)'
url = item.get('source_url', None)
if url is None:
logger.warn('Could not generate uid, no source url')
match = re.search(pattern, url)
if match is None:
logger.warn('Could not generate uid, url did not match: {}'.format(url))
uid = match.group(1)
return 'member-{}'.format(uid)
|
mkaiserpm/emonpython | emontranslator.py | Python | mit | 2,232 | 0.010305 | #!/usr/bin/python
'''
Created on 01.05.2017
@author: mario
Emontranslator
Receive messages from serial/uart
Generate JSON Emon Input Messages
Insert via EMON API / APIKEY to emoncms on locahost (running on pi)
'''
import serial
import httplib
import time
domain = "localhost"
emoncmspath = "emoncms"
apikey = "2eba96e51f6b41534f52110ad063b0c8"
domain2 ="piboxmet.local"
apikey2 = "1ed78821a7e18f9b1b41ab30c3ffad0b"
nodeid = 10
conn = httplib.HTTPConnection(domain)
conn2 = httplib.HTTPConnection(domain2)
# Set this to the serial port of your emontx and baud rate, 9600 is standa | rd emontx baud rate
ser = serial.Serial('/dev/ttyS0', 9600)
def parseLine(linestr):
nodeid = None
temp = 0
humid = 0
voltage = 0
if "BAD-CRC" n | ot in linestr:
if len(linestr) > 2:
data = linestr.split(" ")
print linestr
print data
nodeid = int(data[0])
temp = float(data[1])
temp = temp/ 100.
humid = float(data[2])
humid = humid / 100.
voltage = float(data[3])
voltage = voltage / 100.
return nodeid,temp,humid,voltage
while 1:
try:
# Read in line of readings from serial / uart
linestr = ser.readline()
linestr = linestr.rstrip()
#print linestr
nodeid,temp,humid,voltage=parseLine(linestr)
if nodeid:
params = ("{temp:%.2f,humid:%.2f,voltage:%.2f}"%(temp,humid,voltage))
#print params
print "nodeid:"+str(nodeid)
# Send to emoncms
conn.connect()
conn.request("GET", "/"+emoncmspath+"/input/post.json?&node="+str(nodeid)+"&json="+params+"&apikey="+apikey)
response = conn.getresponse()
print response.read()
conn2.connect()
conn2.request("GET", "/"+emoncmspath+"/input/post.json?&node="+str(nodeid)+"&json="+params+"&apikey="+apikey2)
response2 = conn2.getresponse()
print response2.read()
except KeyboardInterrupt:
raise
except Exception as e:
print e.__doc__
print e.message
pass
time.sleep(1)
|
sfam/netdisco | netdisco/tellstick.py | Python | mit | 1,815 | 0.000551 | """Tellstick device discovery."""
import socket
import threading
from datetime import timedelta
DISCOVERY_PORT = 30303
DISCOVERY_ADDRESS = '<broadcast>'
DISCOVERY_PAYLOAD = b"D"
DISCOVERY_TIMEOUT = timedelta(seconds=5)
class Tellstick(object):
"""Base class to discover Tellstick devices."""
def __init__(self):
"""Initialize the TEllstick discovery."""
self.entries = []
self._lock = threading.RLock()
def scan(self):
"""Scan the network."""
with self._lock:
self.update()
def all(self):
"""Scan and return all found entries."""
| self.scan()
return self.entries
def update(self):
"""Scan network for Tellstick devices."""
entri | es = []
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(DISCOVERY_TIMEOUT.seconds)
sock.sendto(DISCOVERY_PAYLOAD, (DISCOVERY_ADDRESS, DISCOVERY_PORT))
while True:
try:
data, (address, _) = sock.recvfrom(1024)
entry = data.decode("ascii").split(":")
# expecting product, mac, activation code, version
if len(entry) != 4:
continue
entry = (address,) + tuple(entry)
entries.append(entry)
except socket.timeout:
break
self.entries = entries
sock.close()
def main():
"""Test Tellstick discovery."""
from pprint import pprint
tellstick = Tellstick()
pprint("Scanning for Tellstick devices..")
tellstick.update()
pprint(tellstick.entries)
if __name__ == "__main__":
main()
|
leafnode/npp_markdown_script | lib/markdown/__main__.py | Python | bsd-3-clause | 3,376 | 0.004443 | """
COMMAND-LINE SPECIFIC STUFF
=============================================================================
"""
import markdown
import sys
import optparse
import logging
from logging import DEBUG, INFO, CRITICAL
logger = logging.getLogger('MARKDOWN')
def parse_options():
"""
Define and parse `optparse` options for command-line usage.
"""
usage = """%prog [options] [INPUTFILE]
(STDIN is assumed if no INPUTFILE is given)"""
desc = "A Python implementation of John Gruber's Markdown. " \
"http://www.freewisdom.org/projects/python-markdown/"
ver = "%%prog %s" % markdown.version
parser = optparse.OptionParser(usage=usage, description=desc, version=ver)
parser.add_option("-f", "--file", dest="filename", default=sys.stdout,
help="Write output to OUTPUT_FILE. Defaults to STDOUT.",
metavar="OUTPUT_FILE")
parser.add_option("-e", "--encoding", dest="encoding",
help="Encoding for input and output files.",)
parser.add_option("-q", "--quiet", default = CRITICAL,
action="store_const", const=CRITICAL+10, dest="verbose",
help="Suppress all warnings.")
parser.add_option("-v", "--verbose",
action="store_const", const=INFO, dest="verbose",
help="Print all warnings.")
parser.add_option("-s", "--safe", dest="safe", default=False,
metavar="SAFE_MODE",
help="'replace', 'remove' or 'escape' HTML tags in input")
parser.add_option("-o", "--output_format", dest="output_format",
default='xhtml1', metavar="OUTPUT_FORMAT",
help="'xhtml1' (default), 'html4' or 'html5'.")
parser.add_option("--noisy",
action="store_const", const=DEBUG, dest="verbose",
help="Print debug messages.")
parser.add_option("-x", "--extension", action="append", dest="extensions",
help = "Load extension EXTENSION.", metavar="EXTENSION")
parser.add_option("-n", "--no_lazy_ol", dest="lazy_ol",
action='store_false', default=True,
help="Observe number of first item of ordered lists.")
(options, args) = parser.parse_args()
if len(args) == 0:
inpu | t_file = sys.stdin
else:
input_file = args[0]
| if not options.extensions:
options.extensions = []
return {'input': input_file,
'output': options.filename,
'safe_mode': options.safe,
'extensions': options.extensions,
'encoding': options.encoding,
'output_format': options.output_format,
'lazy_ol': options.lazy_ol}, options.verbose
def run():
"""Run Markdown from the command line."""
# Parse options and adjust logging level if necessary
options, logging_level = parse_options()
if not options: sys.exit(2)
logger.setLevel(logging_level)
logger.addHandler(logging.StreamHandler())
# Run
markdown.markdownFromFile(**options)
if __name__ == '__main__':
# Support running module as a commandline command.
# Python 2.5 & 2.6 do: `python -m markdown.__main__ [options] [args]`.
# Python 2.7 & 3.x do: `python -m markdown [options] [args]`.
run()
|
NinjaMSP/crossbar | crossbar/adapter/mqtt/test/interop.py | Python | agpl-3.0 | 6,672 | 0.002398 | #####################################################################################
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Unless a separate license agreement exists between you and Crossbar.io GmbH (e.g.
# you have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
"""
Server interop tests, making sure Crossbar's MQTT adapter responds the same as
other MQTT servers.
"""
from __future__ import print_function
import click
import attr
import sys
from collections import deque
from texttable import Texttable
from twisted.internet.protocol import Protocol, ClientFactory
from crossbar.adapter.mqtt.protocol import MQTTClientParser
@attr.s
class Frame(object):
send = attr.ib()
data = attr.ib()
class ConnectionLoss(object):
send = False
data = b""
@attr.s
class Result(object):
name = attr.ib()
success = attr.ib()
reason = attr.ib()
transcript = attr.ib()
@click.command()
@click.option("--host")
@click.option("--port")
def run(host, port):
port = int(port)
from . import interop_tests
test_names = [x for x in dir(interop_tests) if x.startswith("test_")]
tests = [getattr(interop_tests, test_name) for test_name in test_names]
results = []
with click.progressbar(tests, label="Running interop tests...") as _tests:
for test in _tests:
results.append(test(host, port))
fmt_results = []
for r in results:
fmt_results.append((r.name,
"True" if r.success else "False", r.reason if r.reason else "", r.transcript))
t = Texttable()
t.set_cols_width([20, 10, 80, 60])
rows = [["Name", "Successful", "Reason", "Client Transcript"]]
rows.extend(fmt_results)
t.add_rows(rows)
print(t.draw(), file=sys.__stdout__)
failures = []
for x in results:
if not x.success:
failures.append(False)
if failures:
sys.exit(len(failures))
sys.exit(0)
class ReplayProtocol(Protocol):
def __init__(self, factory):
self.factory = factory
self._record = deque(self.factory.record)
self._waiting_for_nothing = None
self._client = MQTTClientParser()
def connectionMade(self):
if self._record[0].send:
to_send = self._record.popleft()
if isinstance(to_send.data, bytes):
self.transport.write(to_send.data)
else:
self.transport.write(to_send.data.serialise())
def dataReceived(self, data):
self.factory._timer.reset(7)
got_data = self._client.data_received(data)
self.factory.client_transcript.extend(got_data)
if self._waiting_for_nothing:
if data == b"":
got_data.append(b"")
self._waiting_for_nothing = None
else:
self.factory.reason = "Got unexpected data " + repr(got_data)
self.factory.success = False
self.factory.reactor.stop()
return
if len(self._record) > 0 and got_data:
for x in got_data:
reading = self._r | ecord.popleft()
if x == reading.data:
pass
elif isinstance(reading.data, list) and x in reading.data:
reading.data.remove(x)
| else:
self.factory.success = False
self.factory.reason = (x, reading.data)
self.factory.reactor.stop()
return
if len(self._record) > 0:
while len(self._record) > 0 and self._record[0].send:
to_send = self._record.popleft()
if isinstance(to_send.data, bytes):
self.transport.write(to_send.data)
else:
self.transport.write(to_send.data.serialise())
if isinstance(reading.data, list):
if reading.data:
self._record.appendleft(reading)
if len(self._record) > 0:
# Then if we are supposed to wait...
if isinstance(self._record[0], Frame) and self._record[0].send is False and self._record[0].data == b"":
def wait():
self.dataReceived(b"")
self._waiting_for_nothing = self.factory.reactor.callLater(2, wait)
return
def connectionLost(self, reason):
if self.factory.reactor.running:
if self._record and isinstance(self._record[0], ConnectionLoss):
self.factory.success = True
else:
self.factory.success = False
self.factory.reason = "Premature disconnection"
self.factory.reactor.stop()
@attr.s
class ReplayClientFactory(ClientFactory):
reactor = attr.ib()
record = attr.ib()
success = attr.ib(default=None)
reason = attr.ib(default=None)
protocol = ReplayProtocol
noisy = False
def buildProtocol(self, addr):
self.client_transcript = []
p = self.protocol(self)
def disconnect():
self.reason = "Timeout (remaining assertions were " + repr(p._record) + ")"
self.reactor.stop()
self._timer = self.reactor.callLater(7, disconnect)
return p
if __name__ == "__main__":
run()
|
badloop/SickRage | sickrage/show/Show.py | Python | gpl-3.0 | 5,537 | 0.002167 | # This file is part of SickRage.
#
# URL: https://www.sickrage.tv
# Git: https://github.com/SickRage/SickRage.git
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import sickbeard
from datetime import date
from sickbeard.common import Quality, SKIPPED, WANTED
from sickbeard.db import DBConnection
from sickbeard.helpers import findCertainShow
from sickrage.helper.exceptions import CantRefreshShowException, CantRemoveShowException, ex
from sickrage.helper.exceptions import MultipleShowObjectsException
class Show:
def __init__(self):
pass
@staticmethod
def delete(indexer_id, remove_files=False):
"""
Try to delete a show
:param indexer_id: The unique id of the show to delete
:param remove_files: ``True`` to remove the files associated with the show, ``False`` otherwise
:return: A tuple containing:
- an error message if the show could not be deleted, ``None`` o | therwise
- the show object that was deleted, if it exists, ``None`` otherwise
"""
error, show = Show._validate_indexer_id(indexer_id)
if error is not None:
return error, show
try:
sickbeard.showQueueScheduler.action.removeShow(show, bool(remove_files))
except CantRemoveShowException as exception:
| return ex(exception), show
return None, show
@staticmethod
def overall_stats():
db = DBConnection()
shows = sickbeard.showList
today = str(date.today().toordinal())
downloaded_status = Quality.DOWNLOADED + Quality.ARCHIVED
snatched_status = Quality.SNATCHED + Quality.SNATCHED_PROPER
total_status = [SKIPPED, WANTED]
results = db.select(
'SELECT airdate, status '
'FROM tv_episodes '
'WHERE season > 0 '
'AND episode > 0 '
'AND airdate > 1'
)
stats = {
'episodes': {
'downloaded': 0,
'snatched': 0,
'total': 0,
},
'shows': {
'active': len([show for show in shows if show.paused == 0 and show.status == 'Continuing']),
'total': len(shows),
},
}
for result in results:
if result['status'] in downloaded_status:
stats['episodes']['downloaded'] += 1
stats['episodes']['total'] += 1
elif result['status'] in snatched_status:
stats['episodes']['snatched'] += 1
stats['episodes']['total'] += 1
elif result['airdate'] <= today and result['status'] in total_status:
stats['episodes']['total'] += 1
return stats
@staticmethod
def pause(indexer_id, pause=None):
"""
Change the pause state of a show
:param indexer_id: The unique id of the show to update
:param pause: ``True`` to pause the show, ``False`` to resume the show, ``None`` to toggle the pause state
:return: A tuple containing:
- an error message if the pause state could not be changed, ``None`` otherwise
- the show object that was updated, if it exists, ``None`` otherwise
"""
error, show = Show._validate_indexer_id(indexer_id)
if error is not None:
return error, show
if pause is None:
show.paused = not show.paused
else:
show.paused = pause
show.saveToDB()
return None, show
@staticmethod
def refresh(indexer_id):
"""
Try to refresh a show
:param indexer_id: The unique id of the show to refresh
:return: A tuple containing:
- an error message if the show could not be refreshed, ``None`` otherwise
- the show object that was refreshed, if it exists, ``None`` otherwise
"""
error, show = Show._validate_indexer_id(indexer_id)
if error is not None:
return error, show
try:
sickbeard.showQueueScheduler.action.refreshShow(show)
except CantRefreshShowException as exception:
return ex(exception), show
return None, show
@staticmethod
def _validate_indexer_id(indexer_id):
"""
Check that the provided indexer_id is valid and corresponds with a known show
:param indexer_id: The indexer id to check
:return: A tuple containing:
- an error message if the indexer id is not correct, ``None`` otherwise
- the show object corresponding to ``indexer_id`` if it exists, ``None`` otherwise
"""
if indexer_id is None:
return 'Invalid show ID', None
try:
show = findCertainShow(sickbeard.showList, int(indexer_id))
except MultipleShowObjectsException:
return 'Unable to find the specified show', None
return None, show
|
edoburu/django-parler-rest | testproj/migrations/0001_initial.py | Python | apache-2.0 | 1,819 | 0.003299 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
| ('country_code', models.CharField(unique=True, max_length=2, verbose_name='country code', db_index=True)),
| ],
options={
'verbose_name': 'country',
'verbose_name_plural': 'countries',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CountryTranslation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('language_code', models.CharField(max_length=15, verbose_name='Language', db_index=True)),
('name', models.CharField(max_length=200, verbose_name='name')),
('url', models.URLField(verbose_name='webpage', blank=True)),
('master', models.ForeignKey(related_name='translations', editable=False, to='testproj.Country', null=True, on_delete=models.CASCADE)),
],
options={
'managed': True,
'db_table': 'testproj_country_translation',
'db_tablespace': '',
'default_permissions': (),
'verbose_name': 'country Translation',
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='countrytranslation',
unique_together=set([('language_code', 'master')]),
),
]
|
google-research/tensor2robot | utils/train_eval_test_utils.py | Python | apache-2.0 | 5,835 | 0.005656 | # coding=utf-8
# Copyright 2022 The Tensor2Robot Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as python3
"""Utility functions for train_eval tests for new models."""
import errno
import logging
import os
from typing import Callable, Optional, Text, List
import gin
from tensor2robot.utils import train_eval
import tensorflow.compat.v1 as tf
DEFAULT_TRAIN_FILENAME_PATTERNS = [
'operative_config-0.gin', 'model.ckpt-0.data-*', 'model.ckpt-0.meta',
'model.ckpt-0.index', 'checkpoint', 'graph.pbtxt'
]
DEFAULT_EVAL_FILENAME_PATTERNS = ['eval/events.*']
def assert_output_files(
test_case,
model_dir,
expected_output_filename_patterns = None):
"""Verify that the expected output files are generated.
Args:
test_case: The instance of the test used to assert that the output files are
generated.
model_dir: The path where the model should be stored.
expected_output_filename_patterns: All patterns of files which should exist
after train_and_eval, train, or eval. If None, the default expected
| filename patterns are used.
"""
if expected_output_filename_patterns is None:
expected_output_filename_patterns = (
DEFAULT_TRAIN_FILENAME_PATTERNS + DEFAULT_EVAL_FILENAME_PATTERNS)
# Check that expected files have been written.
for pattern in expected_output_filename_patterns:
filename_pattern = os.path.join(model_dir, pattern)
tf.logging.info('file_pattern: %s', filename_pattern)
filenames = tf.io.gfile.glob(filename_pattern)
tf.logging.info('filenames: %s' | , filenames)
filenames_dir = tf.io.gfile.listdir(model_dir)
tf.logging.info('filenames_dir: %s', filenames_dir)
test_case.assertNotEmpty(
filenames, msg='No files found with pattern "%s"' % filename_pattern)
for filename in filenames:
with tf.io.gfile.GFile(filename) as f:
test_case.assertGreater(f.size(), 0, msg='%s is empty' % filename)
def test_train_eval_gin(test_case,
model_dir,
full_gin_path,
max_train_steps,
eval_steps,
gin_overwrites_fn = None,
assert_train_output_files = True,
assert_eval_output_files = True):
"""Train and eval a runnable gin config.
Until we have a proper gen_rule to create individual targets for every gin
file automatically, gin files can be tested using the pattern below.
Please, use 'test_train_eval_gin' as the test function name such that it
is easy to convert these tests as soon as the gen_rule is available.
@parameterized.parameters(
('first.gin',),
('second.gin',),
('third.gin',),
)
def test_train_eval_gin(self, gin_file):
full_gin_path = os.path.join(FLAGS.test_srcdir, BASE_GIN_PATH, gin_file)
model_dir = os.path.join(FLAGS.test_tmpdir, 'test_train_eval_gin', gin_file)
train_eval_test_utils.test_train_eval_gin(
test_case=self,
model_dir=model_dir,
full_gin_path=full_gin_path,
max_train_steps=MAX_TRAIN_STEPS,
eval_steps=EVAL_STEPS)
Args:
test_case: The instance of the test used to assert that the output files are
generated.
model_dir: The path where the model should be stored.
full_gin_path: The path of the gin file which parameterizes train_eval.
max_train_steps: The maximum number of training steps, should be small since
this is just for testing.
eval_steps: The number of eval steps, should be small since this is just for
testing.
gin_overwrites_fn: Optional function which binds gin parameters to
overwrite.
assert_train_output_files: If True, the expected output files of the
training run are checked, otherwise this check is skipped. If only
evaluation is performed this should be set to False.
assert_eval_output_files: If True, the output expected files of the
evaluation run are checked, otherwise this check is skipped. If only
training is performed this should be set to False. Note, if
assert_train_output_files is set to False the model_dir is not deleted
in order to load the model from training.
"""
# We clear all prior parameters set by gin to ensure that we can call this
# function sequentially for all parameterized tests.
gin.clear_config(clear_constants=True)
gin.parse_config_file(full_gin_path, print_includes_and_imports=True)
gin.bind_parameter('train_eval_model.model_dir', model_dir)
if gin_overwrites_fn is not None:
gin_overwrites_fn()
# Make sure that the model dir is empty. This is important for running
# tests locally.
if tf.io.gfile.exists(model_dir) and assert_train_output_files:
tf.io.gfile.rmtree(model_dir)
train_eval.train_eval_model(
model_dir=model_dir,
max_train_steps=max_train_steps,
eval_steps=eval_steps,
create_exporters_fn=None)
if assert_train_output_files:
assert_output_files(
test_case=test_case,
model_dir=model_dir,
expected_output_filename_patterns=DEFAULT_TRAIN_FILENAME_PATTERNS)
if assert_eval_output_files:
assert_output_files(
test_case=test_case,
model_dir=model_dir,
expected_output_filename_patterns=DEFAULT_EVAL_FILENAME_PATTERNS)
|
fayf/pyload | module/plugins/hoster/SpeedyshareCom.py | Python | gpl-3.0 | 1,259 | 0.009531 | # -*- coding: utf-8 -*-
#
# Test links:
# http://speedy.sh/ep2qY/Zapp-Brannigan.jpg
import re
import urlparse
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class SpeedyshareCom(SimpleHoster):
__name__ = "SpeedyshareCom"
__type__ = "hoster"
__version__ = "0.06"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?(speedyshare\.com|speedy\.sh)/\w+'
__config__ = [("use_premium", "bool", "Use premium account if available", True)]
__description__ = """Speedyshare.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zapp-brannigan", "fuerst.reinje@web.de")]
NAME_PATTERN = r'class=downloadfilename>(?P<N>.*)</span></td>'
SIZE_PATTERN = r'class=sizetagtext>(?P<S>.*) (?P<U>[kKmM]?[iI]?[bB]?)</div>'
OFFLINE_PATTERN = r'class=downloadfilenamenotfound>.*</span>'
LINK_FREE_PATTERN = r'<a href=\'(.*)\'><img src=/gf/slowdownload\.png alt=\'Slow Download\' border=0'
def setup(self):
self.multiDL = False
self.chunk_limit = 1
def handle_fre | e(self, pyfile):
m = re.search(self.LINK_FREE_PATTERN, se | lf.html)
if m is None:
self.link = m.group(1)
getInfo = create_getInfo(SpeedyshareCom)
|
brave/muon | script/bump-version.py | Python | mit | 1,507 | 0.013271 | #!/usr/bin/env python
import os
import re
import sys
from lib.config import SOURCE_ROOT, get_electron_version, get_chrome_version
from lib.util import execute, parse_version, scoped_cwd
def main():
if len(sys.argv) != 2 or sys.argv[1] == '-h':
print 'Usage: bump-version.py [<version> | major | minor | patch]'
return 1
option = sys.argv[1]
increments = ['major', 'minor', 'patch']
if option in increments:
version = get_electron_version()
versions = parse_version(version.split('-')[0])
versions = increase_version(versions, increments.index(option))
chrome = get_chrome_version()
else:
versions = parse_version(option)
chrome = versions[3]
version = '.'.join(versions[:3])
version = vers | ion + '+' + chrome
with scoped_cwd(SOURCE_ROOT):
update_package_json(version)
tag_version(version)
def increase_version(versions, index):
for i in range(index + 1, 3):
versions[i] = '0'
versions[index] = str(int(versions[index]) + 1)
return versions
def update_package_json(version):
package_json = 'package.json'
with open(package_json | , 'r') as f:
lines = f.readlines()
for i in range(0, len(lines)):
line = lines[i];
if 'version' in line:
lines[i] = ' "version": "{0}",\n'.format(version)
break
with open(package_json, 'w') as f:
f.write(''.join(lines))
def tag_version(version):
execute(['git', 'commit', '-a', '-m', 'Bump v{0}'.format(version)])
if __name__ == '__main__':
sys.exit(main())
|
ttindell2/openshift-ansible | roles/openshift_health_checker/openshift_checks/logging/logging_index_time.py | Python | apache-2.0 | 4,995 | 0.003003 | """
Check for ensuring logs from pods can be queried in a reasonable amount of time.
"""
import json
import time
from uuid import uuid4
from openshift_checks import OpenShiftCheckException
from openshift_checks.logging.logging import LoggingCheck
ES_CMD_TIMEOUT_SECONDS = 30
class LoggingIndexTime(LoggingCheck):
"""Check that pod logs are aggregated and indexed in ElasticSearch within a reasonable amount of time."""
name = "logging_index_time"
tags = ["health", "logging"]
def run(self):
"""Add log entry by making unique request to Kibana. Check for unique entry in the ElasticSearch pod logs."""
try:
log_index_timeout = int(
self.get_var("openshift_check_logging_index_timeout_seconds", default=ES_CMD_TIMEOUT_SECONDS)
)
except ValueError:
raise OpenShiftCheckException(
'InvalidTimeout',
'Invalid value provided for "openshift_check_logging_index_timeout_seconds". '
'Value must be an integer representing an amount in seconds.'
)
running_component_pods = dict()
# get all component pods
for component, n | ame in (['kibana', 'Kibana'], ['es', 'Elasticsearch']):
pods = self.get_pods_for_component(component)
running_pods = self.running_pods(pods)
if not running_pods:
raise OpenShiftCheckException(
| component + 'NoRunningPods',
'No {} pods in the "Running" state were found.'
'At least one pod is required in order to perform this check.'.format(name)
)
running_component_pods[component] = running_pods
uuid = self.curl_kibana_with_uuid(running_component_pods["kibana"][0])
self.wait_until_cmd_or_err(running_component_pods["es"][0], uuid, log_index_timeout)
return {}
def wait_until_cmd_or_err(self, es_pod, uuid, timeout_secs):
"""Retry an Elasticsearch query every second until query success, or a defined
length of time has passed."""
deadline = time.time() + timeout_secs
interval = 1
while not self.query_es_from_es(es_pod, uuid):
if time.time() + interval > deadline:
raise OpenShiftCheckException(
"NoMatchFound",
"expecting match in Elasticsearch for message with uuid {}, "
"but no matches were found after {}s.".format(uuid, timeout_secs)
)
time.sleep(interval)
def curl_kibana_with_uuid(self, kibana_pod):
"""curl Kibana with a unique uuid."""
uuid = self.generate_uuid()
pod_name = kibana_pod["metadata"]["name"]
exec_cmd = "exec {pod_name} -c kibana -- curl --max-time 30 -s http://localhost:5601/{uuid}"
exec_cmd = exec_cmd.format(pod_name=pod_name, uuid=uuid)
error_str = self.exec_oc(exec_cmd, [])
try:
error_code = json.loads(error_str)["statusCode"]
except (KeyError, ValueError):
raise OpenShiftCheckException(
'kibanaInvalidResponse',
'invalid response returned from Kibana request:\n'
'Command: {}\nResponse: {}'.format(exec_cmd, error_str)
)
if error_code != 404:
raise OpenShiftCheckException(
'kibanaInvalidReturnCode',
'invalid error code returned from Kibana request.\n'
'Expecting error code "404", but got "{}" instead.'.format(error_code)
)
return uuid
def query_es_from_es(self, es_pod, uuid):
"""curl the Elasticsearch pod and look for a unique uuid in its logs."""
pod_name = es_pod["metadata"]["name"]
exec_cmd = (
"exec {pod_name} -- curl --max-time 30 -s -f "
"--cacert /etc/elasticsearch/secret/admin-ca "
"--cert /etc/elasticsearch/secret/admin-cert "
"--key /etc/elasticsearch/secret/admin-key "
"https://logging-es:9200/project.{namespace}*/_count?q=message:{uuid}"
)
exec_cmd = exec_cmd.format(pod_name=pod_name, namespace=self.logging_namespace(), uuid=uuid)
result = self.exec_oc(exec_cmd, [])
try:
count = json.loads(result)["count"]
except (KeyError, ValueError):
raise OpenShiftCheckException(
'esInvalidResponse',
'Invalid response from Elasticsearch query:\n'
' {}\n'
'Response was:\n{}'.format(exec_cmd, result)
)
return count
@staticmethod
def running_pods(pods):
"""Filter pods that are running."""
return [pod for pod in pods if pod['status']['phase'] == 'Running']
@staticmethod
def generate_uuid():
"""Wrap uuid generator. Allows for testing with expected values."""
return str(uuid4())
|
roofit-dev/parallel-roofit-scripts | profiling/vincemark/analyze_g.py | Python | apache-2.0 | 13,421 | 0.005663 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author: Patrick Bos
# @Date: 2016-11-16 16:23:55
# @Last Modified by: E. G. Patrick Bos
# @Last Modified time: 2017-09-05 17:27:17
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
from pathlib import Path
import itertools
import load_timing
pd.set_option("display.width", None)
def savefig(factorplot, fp):
try:
g.savefig(fp)
print("saved figure using pathlib.Path, apparently mpl is now pep 519 compatible! https://github.com/matplotlib/matplotlib/pull/8481")
except TypeError:
g.savefig(fp.__str__())
"""
cd ~/projects/apcocsm/code/profiling/workbench && rsync --progress --include='*/' --include='*/*/' --include='timing*.json' --exclude='*' -zavr nikhef:project_atlas/apcocsm_code/profiling/workbench/vincemark_outputAugust09 ./ && cd -
"""
basepath = Path.home() / 'projects/apcocsm/code/profiling/workbench/vincemark_outputAugust09'
savefig_dn = basepath / 'analysis'
savefig_dn.mkdir(parents=True, exist_ok=True)
#### LOAD DATA FROM FILES
fpgloblist = [basepath.glob('%i.allier.nikhef.nl/*.json' % i)
for i in range(18607668, 18607805)]
# for i in itertools.chain(range(18445438, 18445581),
# range(18366732, 18367027))]
drop_meta = ['parallel_interleave', 'seed', 'print_level', 'timing_flag',
'optConst', 'workspace_filepath', 'time_num_ints']
skip_on_match = ['timing_RRMPFE_serverloop_p*.json', # skip timing_flag 8 output (contains no data)
]
if Path('df_numints.hdf').exists():
skip_on_match.append('timings_numInts.json')
dfs_sp, dfs_mp_sl, dfs_mp_ma = load_timing.load_dfs_coresplit(fpgloblist, skip_on_match=skip_on_match, drop_meta=drop_meta)
# #### TOTAL TIMINGS (flag 1)
df_totals_real = pd.concat([dfs_sp['full_minimize'], dfs_mp_ma['full_minimize']])
# combine cpu and wall timings into one time_s column and add a cpu/wall column
df_totals_wall = df_totals_real[df_totals_real.walltime_s.notnull()].drop("cputime_s", axis=1).rename_axis({"walltime_s": "time_s"}, axis="columns")
df_totals_cpu = df_totals_real[df_totals_real.cputime_s.notnull()].drop("walltime_s", axis=1).rename_axis({"cputime_s": "time_s"}, axis="columns")
df_totals_wall['cpu/wall'] = 'wall'
df_totals_cpu['cpu/wall'] = 'cpu'
df_totals_really = pd.concat([df_totals_wall, df_totals_cpu])
# ### ADD IDEAL TIMING BASED ON SINGLE CORE RUNS
df_totals_ideal = load_timing.estimate_ideal_timing(df_totals_really, groupby=['N_events', 'segment',
'N_chans', 'N_nuisance_parameters', 'N_bins', 'cpu/wall'],
time_col='time_s')
df_totals = load_timing.combine_ideal_and_real(df_totals_really, df_totals_ideal)
# remove summed timings, they show nothing new
df_totals = df_totals[df_totals.segment != 'migrad+hesse+minos']
# combine timing_type and cpu/wall
df_totals['cpu|wall / real|ideal'] = df_totals['cpu/wall'].astype(str) + '/' + df_totals.timing_type.astype(str)
# # add combination of two categories
# df_totals['timeNIs/Nevents'] = df_totals.time_num_ints.astype(str) + '/' + df_totals.N_events.astype(str)
# df_totals['timeNIs/Nbins'] = df_totals.time_num_ints.astype(str) + '/' + df_totals.N_bins.astype(str)
# df_totals['timeNIs/Nnps'] = df_totals.time_num_ints.astype(str) + '/' + df_totals.N_nuisance_parameters.astype(str)
# df_totals['timeNIs/Nchans'] = df_totals.time_num_ints.astype(str) + '/' + df_totals.N_chans.astype(str)
#### ANALYSIS
# full timings
# g = sns | .factorplot(x='num_cpu', y='walltime_s', col='N_bins', hue='timing_type', row='segment', estimator=np.min, data=df_totals, legend_out | =False, sharey='row')
# plt.subplots_adjust(top=0.93)
# g.fig.suptitle(f'total wallclock timing of migrad, hesse and minos')
# savefig(g, savefig_dn / f'total_timing.png')
plot_stuff = input("press ENTER to plot stuff, type n and press ENTER to not plot stuff. ")
if plot_stuff != "n":
g = sns.factorplot(x='N_bins', y='time_s', col='num_cpu', hue='cpu|wall / real|ideal', row='segment', estimator=np.min, data=df_totals, legend_out=False, sharey='row', order=range(1, 1001))
plt.subplots_adjust(top=0.93)
g.fig.suptitle(f'total timings of migrad, hesse and minos')
savefig(g, savefig_dn / f'total_timing_vs_bins.png')
g = sns.factorplot(x='N_chans', y='time_s', col='num_cpu', hue='cpu|wall / real|ideal', row='segment', estimator=np.min, data=df_totals, legend_out=False, sharey='row')
plt.subplots_adjust(top=0.93)
g.fig.suptitle(f'total timings of migrad, hesse and minos')
savefig(g, savefig_dn / f'total_timing_vs_chans.png')
# Use the 1 channel 100 bins 1 nps runs as a special case, since these should scale linearly (i.e. no costs, no benefits)
subset = df_totals[(df_totals.N_chans == 1) & (df_totals.N_bins == 100) & (df_totals.N_nuisance_parameters == 1)]
g = sns.factorplot(x='num_cpu', y='time_s', hue='cpu|wall / real|ideal', row='segment', data=subset, legend_out=False)
plt.subplots_adjust(top=0.93)
g.fig.suptitle(f'total timings for only the 1 channel 100 bins 1 nps runs')
savefig(g, savefig_dn / f'total_timing_vs_1chan100bins1nps.png')
# make a plot per unique combination of parameters (looping is too complicated, since the combination space is sparse)
# # https://stackoverflow.com/a/35268906/1199693
# # for name, group in df_totals.groupby([]):
# for chans in df_totals.N_chans.unique():
# for events in df_totals.N_events.unique():
# for nps in df_totals.N_nuisance_parameters.unique():
# data = df_totals[(df_totals.N_chans == chans) & (df_totals.N_events == events) & (df_totals.N_nuisance_parameters == nps)]
# if len(data) > 0:
# g = sns.factorplot(x='num_cpu', y='walltime_s', col='N_bins', hue='timing_type', row='segment', estimator=np.min, data=data, legend_out=False, sharey='row')
# plt.subplots_adjust(top=0.93)
# g.fig.suptitle(f'total wallclock timing of migrad, hesse and minos --- N_channels = {chans}, N_events = {events}, N_nps = {nps}')
# savefig(g, savefig_dn / f'total_timing_chan{chans}_event{events}_np{nps}.png')
print("Something is not going right with the numerical integral added iteration columns... are they structured the way I thought at all?")
raise SystemExit
#### NUMERICAL INTEGRAL TIMINGS
if not Path('df_numints.hdf').exists():
df_numints = dfs_mp_sl['numInts']
df_numints.to_hdf('df_numints.hdf', 'vincemark_a_numint_timings')
else:
print("loading numerical integral timings from HDF file...")
df_numints = pd.read_hdf('df_numints.hdf', 'vincemark_a_numint_timings')
print("...done")
load_timing.add_iteration_column(df_numints)
df_numints_min_by_iteration = df_numints.groupby('iteration').min()
df_numints_max_by_iteration = df_numints.groupby('iteration').max()
"""
#### RooRealMPFE TIMINGS
### MPFE evaluate @ client (single core) (flags 5 and 6)
mpfe_eval = pd.concat([v for k, v in dfs_mp_ma.items() if 'wall_RRMPFE_evaluate_client' in k] +
[v for k, v in dfs_mp_ma.items() if 'cpu_RRMPFE_evaluate_client' in k])
### add MPFE evaluate full timings (flag 4)
mpfe_eval_full = pd.concat([v for k, v in dfs_mp_ma.items() if 'RRMPFE_evaluate_full' in k])
mpfe_eval_full.rename(columns={'RRMPFE_evaluate_wall_s': 'time s'}, inplace=True)
mpfe_eval_full['cpu/wall'] = 'wall+INLINE'
mpfe_eval_full['segment'] = 'all'
mpfe_eval = mpfe_eval.append(mpfe_eval_full)
### total time per run (== per pid, but the other columns are also grouped-by to prevent from summing over them)
mpfe_eval_total = mpfe_eval.groupby(['pid', 'N_events', 'num_cpu', 'cpu/wall', 'segment', 'force_num_int'], as_index=False).sum()
#### ADD mpfe_eval COLUMN OF CPU_ID, ***PROBABLY***, WHICH SEEMS TO EXPLAIN DIFFERENT TIMINGS QUITE WELL
mpfe_eval_cpu_split = pd.DataFrame(columns=mpfe_eval.columns)
for num_cpu in range(2, 9):
mpfe_eval_num_cpu = mpfe_eval[(mpfe_eval.segment == 'all') * (mpfe_eval.num_cpu == num_cpu)]
mpfe_eval_num_cpu['cpu_id'] = None
|
python27/NetworkControllability | NetworkControllability/strutral_controllability.py | Python | bsd-2-clause | 6,085 | 0.009696 | """
structral controllability measure, driver nodes
"""
# Copyright (C) 2014 by
# Xin-Feng Li <silfer.lee@gmail.com>
# All rights reserved
# BSD license
import networkx as nx
import matplotlib.pyplot as plt
__author__ = """Xin-Feng Li (silfer.lee@gmail.com)"""
def get_driver_nodes(DG):
'''Return the driver nodes number and driver nodes from a DiGraph DG
Basic Idea:
Given a graph DG, create a new undirected bipartite graph, BG
suppose DG has n nodes, the the BG has 2*n nodes, the first n nodes [0, n)
form the left parts of BG, the [n, 2*n) nodes form the right part of BG,
for each edge form DG, say, 1-->3, add edges in BG 1---(3+n)
then call the maximum matching algorithm find the matched nodes
All the unmatched nodes are the driver nodes we are looking for
Parameters
----------
DG: networkx.DiGraph, directed grap | h, node number start from 0
Returns
-------
driver node num: the number of | driver nodes
driver nodes: the driver nodes we are looking for
Notes:
-------
The index of nodes in DG must start from 0, 1, 2, 3...
References:
-----------
[1] Yang-Yu Liu, Jean-Jacques Slotine, Albert L. Barabasi. Controllability
of complex networks. Nature, 2011.
'''
assert(nx.is_directed(DG))
nodeNum = DG.number_of_nodes()
edgeNum = DG.number_of_edges()
# convert to a bipartite graph
G = nx.Graph()
left_nodes = ['a'+str(node) for node in G.nodes()]
right_nodes = ['b'+str(node) for node in G.nodes()]
G.add_nodes_from(left_nodes)
G.add_nodes_from(right_nodes)
for edge in DG.edges():
da = 'a' + str(edge[0])
db = 'b' + str(edge[1])
G.add_edge(da, db)
assert(nx.is_bipartite(G))
# maximum matching algorithm
matched_edges = nx.maximal_matching(G)
# find all the matched and unmatched nodes
matched_nodes = [int(edge[0][1:]) if edge[0][0] == 'b' else int(edge[1][1:]) for edge in matched_edges]
unmatched_nodes = [node for node in DG.nodes() if node not in matched_nodes]
unmatched_nodes_num = len(unmatched_nodes)
# perfect matching
isPerfect = False
if unmatched_nodes_num == 0:
print '>>> Perfect Match Found ! <<<'
isPerfect = True
unmatched_nodes_num = 1
unmatched_nodes = [0]
return (isPerfect, unmatched_nodes_num, unmatched_nodes)
###################################################
#
# The follwoing functions come from github
#
####################################################
def to_bipartite(G):
"""Converts Directed graph G to an undirected bipartite graph
H."""
if not G.is_directed():
raise nx.NetworkXError("G must be a directed Graph")
if G.is_multigraph():
H = nx.MultiGraph()
else:
H = nx.Graph()
for n in G:
H.add_node((n,'+'))
H.add_node((n,'-'))
for (u,v) in G.edges_iter():
H.add_edge((u,'+'),(v,'-'))
return H
def control_nodes(G):
H = to_bipartite(G)
M = nx.max_weight_matching(H,True)
matched = set(v for (v,sign) in M.values() if sign == '-') | \
set(u for (u,sign) in M.keys() if sign == '-')
n_D = set(G) - matched
if len(n_D) == 0:
return [G.nodes_iter().next()]
else:
return list(n_D)
def controllability(G):
return len(control_nodes(G))/float(len(G))
# test this function
if __name__ == "__main__":
DG = nx.DiGraph()
DG_edges = [(0,2), (0,3), (0,4), (0,5), (1,4), (1,5)]
DG.add_edges_from(DG_edges)
#n, nodes = get_driver_nodes(DG)
nodes = control_nodes(DG)
n = len(nodes)
print "\n"
print "node num:", n
print "nodes:", nodes
# test from Nature paper, Figure1 c
# Expected Result: 1
# Matched Edges: (1, 2) (2, 3) (3, 4)
G2 = nx.DiGraph()
G2.add_nodes_from([0,1,2,3])
G2.add_edge(1-1,2-1)
G2.add_edge(2-1,3-1)
G2.add_edge(3-1,4-1)
#n, nodes = get_driver_nodes(G2)
nodes = control_nodes(G2)
n = len(nodes)
print "\n"
print "G2 nodes:", G2.nodes();
print "node num:", n
print "nodes:", nodes
# test from Nature paper, Figure1 f
# Expected Result 1: (1, 2, 3)
# Matched Edges 1: (1, 4)
# Expected Result 2: (1, 3, 4)
# Matched Edges 2: (1, 2)
# Expected Result 3: (1, 2, 4)
# Matched Edges 3: (1, 3)
G3 = nx.DiGraph()
G3.add_nodes_from([0,1,2,3])
G3.add_edge(1-1,2-1)
G3.add_edge(1-1,3-1)
G3.add_edge(1-1,4-1)
#n, nodes = get_driver_nodes(G3)
nodes = control_nodes(G3)
n = len(nodes)
print "\n"
print "G3 nodes:", G3.nodes();
print "node num:", n
print "nodes:", nodes
# test from Nature paper, Figure1 i
# Expected Results 1: (1, 2, 3, 4)
# Matched Edges 1: (1, 5) (2, 6)
# Expected Results 2: (1, 3, 4, 5)
# Matched Edges 2: (1, 2) (2, 6)
# Expected Results 3: (1, 2, 4, 5)
# Matched Edges 3: (1, 3) (2, 6)
# Expected Results 4: (1, 2, 3, 5)
# Matched Edges 4: (1, 4) (2, 6)
G4 = nx.DiGraph()
G4.add_nodes_from([0,1,2,3,4,5])
G4.add_edge(1-1,2-1)
G4.add_edge(1-1,3-1)
G4.add_edge(1-1,4-1)
G4.add_edge(1-1,5-1)
G4.add_edge(1-1,6-1)
G4.add_edge(2-1,6-1)
#n, nodes = get_driver_nodes(G4)
nodes = control_nodes(G4)
n = len(nodes)
print "\n"
print "G4 nodes:", G4.nodes();
print "node num:", n
print "nodes:", nodes
# test compelte graph
# Expeced Results: (0) OR (1) OR (2) OR (3)
G5 = nx.DiGraph()
G5.add_nodes_from([0, 1, 2, 3])
G5.add_edge(1-1, 2-1)
G5.add_edge(1-1, 3-1)
G5.add_edge(1-1, 4-1)
G5.add_edge(2-1, 1-1)
G5.add_edge(2-1, 3-1)
G5.add_edge(2-1, 4-1)
G5.add_edge(3-1, 1-1)
G5.add_edge(3-1, 2-1)
G5.add_edge(3-1, 4-1)
G5.add_edge(4-1, 1-1)
G5.add_edge(4-1, 2-1)
G5.add_edge(4-1, 3-1)
#n, nodes = get_driver_nodes(G5)
nodes = control_nodes(G5)
n = len(nodes)
print "\n"
print "G5 nodes:", G5.nodes();
print "node num:", n
print "nodes:", nodes |
CMPUT404W17/FoundBook | fb/tests.py | Python | mit | 12,320 | 0.008929 | from django.test import TestCase
from fb.models import Author, Post, Comment, Config
from django.contrib.auth.models import User
from fb.services.post_service import PostService
from django.test.utils import setup_test_environment
import json
from BeautifulSoup import BeautifulSoup
class ClientTest(TestCase):
def setUp(self):
setup_test_environment()
self.user = User.objects.create_superuser(
username='temp_user', email='testman@test.fake.gov', password='qwerty123')
c = Config(key="PUBLIC_API_ACCESS", value=False)
c.save()
self.user.author = Author(verified=True) # we need a verified author to bootstrap any testing
self.user.author.save()
response = self.client.get('/', follow=True)
self.assertEqual(response.status_code, 200)
parsed = BeautifulSoup(response.content)
self.assertEqual(parsed.title.string, 'Foundbook - Login')
response = self.client.post('/login/', {"username": "tester", "password": "qwerty123"})
self.assertEqual(response.status_code, 401)
response = self.client.post('/login/', {"username": "temp_user", "password": "qwerty123"})
self.assertEqual(response.status_code, 302) # we're currently redirecting on login success
def test_making_and_updating_post(self):
response = self.client.get('/')
parsed = BeautifulSoup(response.content)
self.assertEqual(parsed.title.string, 'Foundbook')
response = self.client.get('/author/posts/', HTTP_HOST='localhost:8000')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()['posts'], [])
myPost = {
"post": {
"title": "this is a title", "description": "this is the description",
"content": "Here is the post content", "contentType": "text/plain",
"visibility": "PUBLIC", "unlisted": "False", "source": "localhost:8000", "origin": "localhost:8000"}}
stringifiedPost = json.dumps(myPost)
resp = self.client.post('/posts/', stringifiedPost, content_type='application/json', **{'HTTP_HOST': 'localhost:8000'})
self.assertEqual(response.status_code, 200)
response = self.client.get('/author/posts/',HTTP_HOST='localhost:8000')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()['posts'][0]['content'], "Here is the post content")
#updating post (this is in this test because we know the post is by this author)
post_id = response.json()['posts'][0]['id']
response = self.client.put('/posts/'+str(post_id)+'/', json.dumps({"post":{"title":"edit a post","description":"sdfsdf","content":"sdfsdf"}}),HTTP_HOST='localhost:8000', content_type='application/json')
self.assertEqual(response.status_code, 200)
class PostTestCase(TestCase):
def setUp(self):
setup_test_environment()
self.user = User.objects.create_superuser(
username='me', email='me@me.com', password='abadpassword')
c = Config(key="PUBLIC_API_ACCESS", value=False)
c.save()
self.user.author = Author(verified=True) # we need a verified author to bootstrap any testing
self.user.author.save()
users = [
#{"username": 'me', "email": 'me@me.com', "password": 'abadpassword'},
{"username": 'notme', "email": 'notme@notme.com', "password": 'abadpassword'},
{"username": 'friend', "email": 'friend@friendly.com', "password": 'abadpassword'},
{"username": 'fof', "email": 'fof@friendly.com', "password": 'abadpassword'},
]
self.users = [User(**a) for a in users]
c = Config(key="PUBLIC_API_ACCESS", value=False)
c.save()
for u in self.users:
u.save()
u.author = Author(verified=True)
u.author.save()
self.users.insert(0,self.user)
# add friendships
self.users[2].author.friends.add(self.users[0].author, self.users[3].author)
self.users[2].author.save()
posts = [
{"title": "MyPost 1", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "r", "unlisted": False,
"author_id": self.users[0].author.id},
{"title": "MyPost unlisted 2", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "r", "unlisted": True,
"author_id": self.users[0].author.id},
{"title": "random public 3", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "p", "unlisted": False,
"author_id": self.users[1].author.id},
{"title": "random server only 4", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "s", "unlisted": False,
"author_id": self.users[1].author.id},
{"title": "random friends 5", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "f", "unlisted": False,
"author_id": self.users[1].author.id},
{"title": "random fof 6", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "a", "unlisted": False,
"author_id": self.users[1].author.id},
{"title": "random private 7", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "r", "unlisted": False,
"author_id": self.users[1].author.id},
{"title": "random public unlisted 8", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "p", "unlisted": True,
"author_id": self.users[1].author.id},
{"title": "random private viewer 9", "source": "http://", "origin": "http://" | ,
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "r", "unlisted": False,
"author_id": self.users[1].author.id},
{"title": "friend only 10", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "f", "unlisted": False,
" | author_id": self.users[2].author.id},
{"title": "friend private 11", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "r", "unlisted": False,
"author_id": self.users[2].author.id},
{"title": "friend only 12", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "f", "unlisted": False,
"author_id": self.users[2].author.id},
{"title": "friend only 13", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "a", "unlisted": False,
"author_id": self.users[3].author.id},
{"title": "friend only 14", "source": "http://", "origin": "http://",
"contentType": "text/plain", "description": "",
"content": "dont care", "visibility": "a", "unlisted": False,
"author_id": self.users[2].author.id},
]
self.posts = [Post(**p) for p in posts]
for p in self.posts:
p.save()
self.posts[8].viewers.add(self.users[0].author)
self.posts[8].save()
self.service = PostService(context(self.users[0] |
Shaps/ansible | test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/net_logging.py | Python | gpl-3.0 | 2,503 | 0.001199 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
|
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["deprecated"],
"supported_by": "network",
}
DOCUMENTATION = """module: net_logging
author: Ganesh Nalawade (@ganeshrn)
short_description: Manage logging on network devices
description:
- This module provides declarative management of logging on network devices.
deprecated:
removed_in: '2.13' |
alternative: Use platform-specific "[netos]_logging" module
why: Updated modules released with more functionality
extends_documentation_fragment:
- ansible.netcommon.network_agnostic
options:
dest:
description:
- Destination of the logs.
choices:
- console
- host
name:
description:
- If value of C(dest) is I(host) it indicates file-name the host name to be notified.
facility:
description:
- Set logging facility.
level:
description:
- Set logging severity levels.
aggregate:
description: List of logging definitions.
purge:
description:
- Purge logging not defined in the I(aggregate) parameter.
default: false
state:
description:
- State of the logging configuration.
default: present
choices:
- present
- absent
"""
EXAMPLES = """
- name: configure console logging
net_logging:
dest: console
facility: any
level: critical
- name: remove console logging configuration
net_logging:
dest: console
state: absent
- name: configure host logging
net_logging:
dest: host
name: 192.0.2.1
facility: kernel
level: critical
- name: Configure file logging using aggregate
net_logging:
dest: file
aggregate:
- name: test-1
facility: pfe
level: critical
- name: test-2
facility: kernel
level: emergency
- name: Delete file logging using aggregate
net_logging:
dest: file
aggregate:
- name: test-1
facility: pfe
level: critical
- name: test-2
facility: kernel
level: emergency
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always, except for the platforms that use Netconf transport to manage the device.
type: list
sample:
- logging console critical
"""
|
pepitogithub/PythonScripts | Letras.py | Python | gpl-2.0 | 550 | 0.049091 | class Letras:
"""
- Contenedor de letras (no incluye la enie).
- Tiene tres set():
- vocales.
- consonantes.
- letras.
- Tener en cuenta que al ser Set(), no tienen orden,
por lo | que de ser necesario, se deben castear a list()
y ordenar el nuevo objeto con el metodo conveniente.
"""
vocales = set(["a","e","i","o","u"]) |
consonantes = set(["b","c","d","f","g","h","j","k","l","m","n","p","q","r","s","t","v","w","x","y","z"])
letras = vocales.union(consonantes) |
PyQwt/PyQwt4 | qt3examples/MultiDemo.py | Python | gpl-2.0 | 4,825 | 0.001865 | #!/usr/bin/env python
# Plot of Numeric & numarray arrays and lists & tuples of Python floats.
import sys
from qt import *
from Qwt4.Qwt import *
def drange(start, stop, step):
start, stop, step = float(start), float(stop), float(step)
size = int(round((stop-start)/step))
result = [start]*size
for i in xrange(size):
result[i] += i*step
return result
# drange()
def lorentzian(x):
return 1.0/(1.0+(x-5.0)**2)
# lorentzian()
class MultiDemo(QWidget):
def __init__(self, *args):
QWidget.__init__(self, *args)
grid = QGridLayout(self, 2, 2)
# try to create a plot widget for NumPy arrays
try:
# import does_not_exist
import numpy
numpy_plot = QwtPlot('Plot -- NumPy arrays', self)
numpy_plot.plotLayout().setCanvasMargin(0)
numpy_plot.plotLayout().setAlignCanvasToScales(1)
numpy_x = numpy.arange(0.0, 10.0, 0.01)
numpy_y = lorentzian(numpy_x)
# insert a curve, make it red and copy the arrays
key = numpy_plot.insertCurve('y = lorentzian(x)')
numpy_plot.setCurvePen(key, QPen(Qt.red))
numpy_plot.setCurveData(key, numpy_x, numpy_y)
grid.addWidget(numpy_plot, 0, 0)
numpy_plot.replot()
except ImportError, message:
print "%s: %s" % (ImportError, message)
print "In | stall NumPy to plot NumPy arrays"
except TypeError, message:
print "%s: %s" % (TypeError, message)
print "Rebuild PyQwt to plot NumPy arrays"
self.removeChild(numpy_plot)
# try to create a plot widget for Numeric arrays
try:
# i | mport does_not_exist
import Numeric
numeric_plot = QwtPlot('Plot -- Numeric arrays', self)
numeric_plot.plotLayout().setCanvasMargin(0)
numeric_plot.plotLayout().setAlignCanvasToScales(1)
numeric_x = Numeric.arange(0.0, 10.0, 0.01)
numeric_y = lorentzian(numeric_x)
# insert a curve, make it red and copy the arrays
key = numeric_plot.insertCurve('y = lorentzian(x)')
numeric_plot.setCurvePen(key, QPen(Qt.red))
numeric_plot.setCurveData(key, numeric_x, numeric_y)
grid.addWidget(numeric_plot, 0, 1)
numeric_plot.replot()
except ImportError, message:
print "%s: %s" % (ImportError, message)
print "Install Numeric to plot Numeric arrays"
except TypeError, message:
print "%s: %s" % (TypeError, message)
print "Rebuild PyQwt to plot Numeric arrays"
self.removeChild(numeric_plot)
# try to create a plot widget for numarray arrays
try:
# import does_not_exist
import numarray
numarray_plot = QwtPlot('Plot -- numarray arrays', self)
numarray_plot.plotLayout().setCanvasMargin(0)
numarray_plot.plotLayout().setAlignCanvasToScales(1)
numarray_x = numarray.arange(0.0, 10.0, 0.01)
numarray_y = lorentzian(numarray_x)
# insert a curve, make it red and copy the arrays
key = numarray_plot.insertCurve('y = lorentzian(x)')
numarray_plot.setCurvePen(key, QPen(Qt.red))
numarray_plot.setCurveData(key, numarray_x, numarray_y)
grid.addWidget(numarray_plot, 1, 0)
numarray_plot.replot()
except ImportError, message:
print "%s: %s" % (ImportError, message)
print "Install numarray to plot numarray arrays"
except TypeError, message:
print "%s: %s" % (TypeError, message)
print "Rebuild PyQwt to plot numarray arrays"
self.removeChild(numarray_plot)
# create a plot widget for lists of Python floats
list_plot = QwtPlot('Plot -- List of Python floats', self)
list_plot.plotLayout().setCanvasMargin(0)
list_plot.plotLayout().setAlignCanvasToScales(1)
list_x = drange(0.0, 10.0, 0.01)
list_y = map(lorentzian, list_x)
# insert a curve, make it red and copy the lists
key = list_plot.insertCurve('y = lorentzian(x)')
list_plot.setCurvePen(key, QPen(Qt.red))
list_plot.setCurveData(key, list_x, list_y)
grid.addWidget(list_plot, 1, 1)
list_plot.replot()
# __init__()
# class MultiDemo
def main(args):
app = QApplication(args)
demo = make()
app.setMainWidget(demo)
sys.exit(app.exec_loop())
# main()
def make():
demo = MultiDemo()
demo.resize(400, 600)
demo.show()
return demo
# Admire!
if __name__ == '__main__':
main(sys.argv)
# Local Variables: ***
# mode: python ***
# End: ***
|
adaptive-learning/proso-apps | proso_concepts/models.py | Python | mit | 14,974 | 0.002872 | from collections import defaultdict
from datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.db import models
from django.db.models import Q, Count, Sum, Max, Min
from django.db.models.signals import pre_save
from django.dispatch import receiver
from hashlib import sha1
from proso.dict import group_keys_by_value_lists
from proso.django.cache import cache_pure
from proso.list import flatten
from proso_common.models import get_config
from proso_models.models import Answer, Item, get_environment, get_mastery_trashold, get_predictive_model, get_time_for_knowledge_overview
from time import time as time_lib
import json
import logging
LOGGER = logging.getLogger('django.request')
class TagManager(models.Manager):
def prepare_related(self):
return self.prefetch_related('concepts')
class Tag(models.Model):
"""
Arbitrary tag for concepts.
"""
type = models.CharField(max_length=50)
value = models.CharField(max_length=200)
lang = models.CharField(max_length=2)
type_name = models.CharField(max_length=100)
value_name = models.CharField(max_length=100)
objects = TagManager()
class Meta:
unique_together = ("type", "value", "lang")
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "tag",
"type": self.type,
"value": self.value,
"lang": self.lang,
"type_name": self.type_name,
"value_name": self.value_name,
}
if not nested:
data["concepts"] = [concept.to_json(nested=True) for concept in self.concepts.all()]
return data
def __str__(self):
return "{}: {}".format(self.type, self.value)
class ConceptManager(models.Manager):
def prepare_related(self):
return self.prefetch_related('tags', 'actions')
@cache_pure()
def get_concept_item_mapping(self, concepts=None, lang=None):
"""
Get mapping of concepts to items belonging to concept.
Args:
concepts (list of Concept): Defaults to None meaning all concepts
lang (str): language of concepts, if None use language of concepts
Returns:
dict: concept (int) -> list of item ids (int)
"""
if concepts is None:
concepts = self.filter(active=True)
if lang is not None:
concepts = concepts.filter(lang=lang)
if lang is None:
languages = set([concept.lang for concept in concepts])
if len(languages) > 1:
raise Exception('Concepts has multiple languages')
lang = list(languages)[0]
item_lists = Item.objects.filter_all_reachable_leaves_many([json.loads(concept.query)
for concept in concepts], lang)
return dict(zip([c.pk for c in concepts], item_lists))
@cache_pure()
def get_item_concept_mapping(self, lang):
""" Get mapping of items_ids to concepts containing these items
Args:
lang (str): language of concepts
Returns:
dict: item (int) -> set of concepts (int)
"""
concepts = self.filter(active=True, lang=lang)
return group_keys_by_value_lists(Concept.objects.get_concept_item_mapping(concepts, lang))
def get_concepts_to_recalculate(self, users, lang, concepts=None):
"""
Get concept which have same changes and have to be recalculated
Args:
users (list of users or user): users whose user stats we are interesting in
lang (str): language of used concepts
concepts (Optional[list of concepts]): list of primary keys of concepts or concepts
Defaults to None meaning all concepts.
Returns:
dict: user -> set of concepts (int) - in case of list of users
list of stats (str) - in case of one user
"""
only_one_user = False
if not isinstance(users, list):
only_one_user = True
users = [users]
mapping = self.get_item_concept_mapping(lang)
current_user_stats = defaultdict(lambda: {})
user_stats_qs = UserStat.objects.filter(user__in=users, stat="answer_count") # we need only one type
if concepts is not None:
user_stats_qs = user_stats_qs.filter(concept__in=concepts)
for user_stat in user_stats_qs:
current_user_stats[user_stat.user_id][user_stat.concept_id] = user_stat
concepts_to_recalculate = defaultdict(lambda: set())
for user, item, time in Answer.objects.filter(user__in=users)\
.values_list("user_id", "item").annotate(Max("time")):
if item not in mapping:
# in reality th | is should by corner case, so it is efficient to not filter Answers
continue # item is not in concept
time_expiration_lower_bound = get_config('proso_models', 'knowledge_overview.time_shift_hours', default=4)
time_expiration_factor = get_config('proso_models', 'knowledge_overview.time_expiration_factor', defaul | t=2)
for concept in mapping[item]:
if user in current_user_stats and concept in current_user_stats[user] \
and current_user_stats[user][concept].time > time:
if not self.has_time_expired(current_user_stats[user][concept].time, time, time_expiration_lower_bound, time_expiration_factor):
continue # cache is up to date
if concepts is None or concept in ([c.pk for c in concepts] if type(concepts[0]) == Concept else Concept):
concepts_to_recalculate[user].add(concept)
if only_one_user:
return concepts_to_recalculate[users[0]]
return concepts_to_recalculate
def has_time_expired(self, cache_time, last_answer_time, lower_bound, expiration_factor):
cache_timedelta = cache_time - last_answer_time
if cache_timedelta > timedelta(days=365):
return False
if cache_timedelta < timedelta(hours=lower_bound):
return False
return cache_timedelta < expiration_factor * (datetime.now() - cache_time)
class Concept(models.Model):
"""
Model concepts for open learner model
"""
identifier = models.CharField(max_length=20, blank=True)
query = models.TextField()
name = models.CharField(max_length=200)
lang = models.CharField(max_length=2)
tags = models.ManyToManyField(Tag, related_name="concepts", blank=True)
active = models.BooleanField(default=True)
objects = ConceptManager()
class Meta:
unique_together = ("identifier", "lang")
def to_json(self, nested=False):
data = {
"id": self.pk,
"object_type": "concept",
"identifier": self.identifier,
"name": self.name,
"query": self.query,
"lang": self.lang,
}
if not nested:
data["tags"] = [tag.to_json(nested=True) for tag in self.tags.all()]
data["actions"] = [action.to_json(nested=True) for action in self.actions.all()]
return data
@staticmethod
def create_identifier(query):
"""
Crete identifier of concept
Args:
query (str): query defining concept
Returns:
str: identifier of length 20
"""
return sha1(query.encode()).hexdigest()[:20]
def __str__(self):
return self.name
def __repr__(self):
return "{}-{}".format(self.identifier, self.lang)
class ActionManager(models.Manager):
def prepare_related(self):
return self.select_related('concept')
class Action(models.Model):
"""
Actions which can be done with concept
"""
concept = models.ForeignKey(Concept, related_name="actions")
identifier = models.CharField(max_length=50)
name = models.CharField(max_length=200)
url = models.CharField(max_length=200)
objects = ActionManager()
def t |
OpenMined/PySyft | packages/syft/src/syft/lib/misc/__init__.py | Python | apache-2.0 | 6,217 | 0.002091 | # stdlib
from collections import defaultdict
import sys
from typing import Any as TypeAny
from typing import Callable
from typing import Dict
from typing import KeysView
from typing import List as TypeList
from typing import Set
# third party
from cachetools import cached
from cachetools.keys import hashkey
# relative
from ...ast import add_classes
from ...ast import add_methods
from ...ast import add_modules
from ...ast import globals
from ...logger import traceback_and_raise
from .union import lazy_pairing
def get_cache() -> Dict:
return dict()
@cached(cache=get_cache(), key=lambda path, lib_ast: hashkey(path))
def solve_ast_type_functions(path: str, lib_ast: globals.Globals) -> KeysView:
root = lib_ast
for path_element in path.split("."):
root = getattr(root, path_element)
return root.attrs.keys()
def get_allowed_functions(
lib_ast: globals.Globals, union_types: TypeList[str]
) -> Dict[str, bool]:
"""
This function generates a set of functions that can go into a union type.
A function has to meet the following requirements to be present on a union type:
1. If it's present on all Class attributes associated with the union types
on the ast, add it.
2. If it's not present on all Class attributes associated with the union
types, check if they exist on the original type functions list. If they
do exist, drop it, if not, add it.
Args:
lib_ast (Globals): the AST on which we want to generate the union pointer.
union_types (List[str]): the qualnames of the types on which we want a union.
Returns:
allowed_functions (dict): The keys of the dict are function names (str)
and the values are Bool (if they are allowed or not).
"""
allowed_functions: Dict[str, bool] = defaultdict(lambda: True)
def solve_real_type_functions(path: str) -> Set[str]:
parts = path.split(".")
klass_name = parts[-1]
# TODO: a better way. Loot at https://github.com/OpenMined/PySyft/issues/5249
# A way to walkaround the problem we can't `import torch.return_types` and
# get it from `sys.modules`.
if parts[-2] == "return_types":
modu = getattr(sys.modules["torch"], "return_types")
else:
modu = sys.modules[".".join(parts[:-1])]
return set(dir(getattr(modu, klass_name)))
for union_type in union_types:
real_type_function_set = solve_real_type_functions(union_type)
ast_type_function_set = solve_ast_type_functions(union_type, lib_ast)
rejected_function_set = real_type_function_set - ast_type_function_set
for accepted_function in ast_type_function_set:
allowed_functions[accepted_function] &= True
for rejected_function in rejected_function_set:
allowed_functions[rejected_function] = False
return allowed_functions
def create_union_ast(
lib_ast: globals.Globals, client: TypeAny = None
) -> globals.Globals:
ast = globals.Globals(client)
modules = ["syft", "syft.lib", "syft.lib.misc", "syft.lib.misc.union"]
classes = []
methods = []
for klass in lazy_pairing.keys():
classes.append(
(
f"syft.lib.misc.union.{klass.__name__}",
f"syft.lib.misc.union.{klass.__name__}",
klass,
)
)
union_types = lazy_pairing[klass]
allowed_functions = get_allowed_functions(lib_ast, union_types)
| for tar | get_method, allowed in allowed_functions.items():
if not allowed:
continue
def generate_func(target_method: str) -> Callable:
def func(self: TypeAny, *args: TypeAny, **kwargs: TypeAny) -> TypeAny:
func = getattr(self, target_method, None)
if func:
return func(*args, **kwargs)
else:
traceback_and_raise(
ValueError(
f"Can't call {target_method} on {klass} with the instance type of {type(self)}"
)
)
return func
def generate_attribute(target_attribute: str) -> TypeAny:
def prop_get(self: TypeAny) -> TypeAny:
prop = getattr(self, target_attribute, None)
if prop is not None:
return prop
else:
ValueError(
f"Can't call {target_attribute} on {klass} with the instance type of {type(self)}"
)
def prop_set(self: TypeAny, value: TypeAny) -> TypeAny:
setattr(self, target_attribute, value)
return property(prop_get, prop_set)
# TODO: Support dynamic properties for types in AST
# torch.Tensor.grad and torch.Tensor.data are not in the class
# Issue: https://github.com/OpenMined/PySyft/issues/5338
if target_method == "grad" and "Tensor" in klass.__name__:
setattr(klass, target_method, generate_attribute(target_method))
methods.append(
(
f"syft.lib.misc.union.{klass.__name__}.{target_method}",
"torch.Tensor",
)
)
continue
elif target_method == "data" and "Tensor" in klass.__name__:
setattr(klass, target_method, generate_attribute(target_method))
else:
setattr(klass, target_method, generate_func(target_method))
methods.append(
(
f"syft.lib.misc.union.{klass.__name__}.{target_method}",
"syft.lib.python.Any",
)
)
add_modules(ast, modules)
add_classes(ast, classes)
add_methods(ast, methods)
for ast_klass in ast.classes:
ast_klass.create_pointer_class()
ast_klass.create_send_method()
ast_klass.create_storable_object_attr_convenience_methods()
return ast
|
Lekensteyn/Solaar | lib/logitech_receiver/settings.py | Python | gpl-2.0 | 12,856 | 0.025124 | # -*- python-mode -*-
# -*- coding: UTF-8 -*-
## Copyright (C) 2012-2013 Daniel Pavel
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License along
## with this program; if not, write to the Free Software Foundation, Inc.,
## 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from __future__ import absolute_import, division, print_function, unicode_literals
from logging import getLogger, DEBUG as _DEBUG
_log = getLogger(__name__)
del getLogger
from copy import copy as _copy
import math
from .common import (
NamedInt as _NamedInt,
NamedInts as _NamedInts,
bytes2int as _bytes2int,
int2bytes as _int2bytes,
)
#
#
#
KIND = _NamedInts(toggle=0x01, choice=0x02, range=0x04)
class Setting(object):
"""A setting descriptor.
Needs to be instantiated for each specific device."""
__slots__ = ('name', 'label', 'description', 'kind', 'persister', 'device_kind',
'_rw', '_validator', '_device', '_value')
def __init__(self, name, rw, validator, kin | d=None, label=None, description=None, device_kind=None):
assert name
self.name = name
self.label = label or name
self.description = description
self.device_kind = device_kind
self._rw = rw
self._validator = validator
ass | ert kind is None or kind & validator.kind != 0
self.kind = kind or validator.kind
self.persister = None
def __call__(self, device):
assert not hasattr(self, '_value')
assert self.device_kind is None or device.kind in self.device_kind
p = device.protocol
if p == 1.0:
# HID++ 1.0 devices do not support features
assert self._rw.kind == RegisterRW.kind
elif p >= 2.0:
# HID++ 2.0 devices do not support registers
assert self._rw.kind == FeatureRW.kind
o = _copy(self)
o._value = None
o._device = device
return o
@property
def choices(self):
assert hasattr(self, '_value')
assert hasattr(self, '_device')
return self._validator.choices if self._validator.kind & KIND.choice else None
@property
def range(self):
assert hasattr(self, '_value')
assert hasattr(self, '_device')
if self._validator.kind == KIND.range:
return (self._validator.min_value, self._validator.max_value)
def read(self, cached=True):
assert hasattr(self, '_value')
assert hasattr(self, '_device')
if self._value is None and self.persister:
# We haven't read a value from the device yet,
# maybe we have something in the configuration.
self._value = self.persister.get(self.name)
if cached and self._value is not None:
if self.persister and self.name not in self.persister:
# If this is a new device (or a new setting for an old device),
# make sure to save its current value for the next time.
self.persister[self.name] = self._value
return self._value
if self._device.online:
reply = self._rw.read(self._device)
if reply:
self._value = self._validator.validate_read(reply)
if self.persister and self.name not in self.persister:
# Don't update the persister if it already has a value,
# otherwise the first read might overwrite the value we wanted.
self.persister[self.name] = self._value
return self._value
def write(self, value):
assert hasattr(self, '_value')
assert hasattr(self, '_device')
assert value is not None
if _log.isEnabledFor(_DEBUG):
_log.debug("%s: write %r to %s", self.name, value, self._device)
if self._device.online:
# Remember the value we're trying to set, even if the write fails.
# This way even if the device is offline or some other error occurs,
# the last value we've tried to write is remembered in the configuration.
self._value = value
if self.persister:
self.persister[self.name] = value
current_value = None
if self._validator.needs_current_value:
# the validator needs the current value, possibly to merge flag values
current_value = self._rw.read(self._device)
data_bytes = self._validator.prepare_write(value, current_value)
if data_bytes is not None:
if _log.isEnabledFor(_DEBUG):
_log.debug("%s: prepare write(%s) => %r", self.name, value, data_bytes)
reply = self._rw.write(self._device, data_bytes)
if not reply:
# tell whomever is calling that the write failed
return None
return value
def apply(self):
assert hasattr(self, '_value')
assert hasattr(self, '_device')
if _log.isEnabledFor(_DEBUG):
_log.debug("%s: apply %s (%s)", self.name, self._value, self._device)
value = self.read()
if value is not None:
self.write(value)
def __str__(self):
if hasattr(self, '_value'):
assert hasattr(self, '_device')
return '<Setting([%s:%s] %s:%s=%s)>' % (self._rw.kind, self._validator.kind, self._device.codename, self.name, self._value)
return '<Setting([%s:%s] %s)>' % (self._rw.kind, self._validator.kind, self.name)
__unicode__ = __repr__ = __str__
#
# read/write low-level operators
#
class RegisterRW(object):
__slots__ = ('register', )
kind = _NamedInt(0x01, 'register')
def __init__(self, register):
assert isinstance(register, int)
self.register = register
def read(self, device):
return device.read_register(self.register)
def write(self, device, data_bytes):
return device.write_register(self.register, data_bytes)
class FeatureRW(object):
__slots__ = ('feature', 'read_fnid', 'write_fnid')
kind = _NamedInt(0x02, 'feature')
default_read_fnid = 0x00
default_write_fnid = 0x10
def __init__(self, feature, read_fnid=default_read_fnid, write_fnid=default_write_fnid):
assert isinstance(feature, _NamedInt)
self.feature = feature
self.read_fnid = read_fnid
self.write_fnid = write_fnid
def read(self, device):
assert self.feature is not None
return device.feature_request(self.feature, self.read_fnid)
def write(self, device, data_bytes):
assert self.feature is not None
return device.feature_request(self.feature, self.write_fnid, data_bytes)
#
# value validators
# handle the conversion from read bytes, to setting value, and back
#
class BooleanValidator(object):
__slots__ = ('true_value', 'false_value', 'mask', 'needs_current_value')
kind = KIND.toggle
default_true = 0x01
default_false = 0x00
# mask specifies all the affected bits in the value
default_mask = 0xFF
def __init__(self, true_value=default_true, false_value=default_false, mask=default_mask):
if isinstance(true_value, int):
assert isinstance(false_value, int)
if mask is None:
mask = self.default_mask
else:
assert isinstance(mask, int)
assert true_value & false_value == 0
assert true_value & mask == true_value
assert false_value & mask == false_value
self.needs_current_value = (mask != self.default_mask)
elif isinstance(true_value, bytes):
if false_value is None or false_value == self.default_false:
false_value = b'\x00' * len(true_value)
else:
assert isinstance(false_value, bytes)
if mask is None or mask == self.default_mask:
mask = b'\xFF' * len(true_value)
else:
assert isinstance(mask, bytes)
assert len(mask) == len(true_value) == len(false_value)
tv = _bytes2int(true_value)
fv = _bytes2int(false_value)
mv = _bytes2int(mask)
assert tv & fv == 0
assert tv & mv == tv
assert fv & mv == fv
self.needs_current_value = any(m != b'\xFF' for m in mask)
else:
raise Exception("invalid mask '%r', type %s" % (mask, type(mask)))
self.true_value = true_value
self.false_value = false_value
self.mask = mask
def validate_read(self, reply_bytes):
if isinstance(self.mask, int):
reply_value = ord(reply_bytes[:1]) & self.mask
if _log.isEnabledFor(_DEBUG):
_log.debug("BooleanValidator: validate read %r => %02X", reply_bytes, re |
FrodeSolheim/fs-uae-launcher | launcher/settings/audio_settings_page.py | Python | gpl-2.0 | 1,061 | 0 | import fsui
from fswidgets.widget import Widget
from launcher.i18n import gettext
from launcher.option import Option
from launcher.settings.settings_page import SettingsPage
from system.prefs.components.notworking import PrefsNotWorkingWarningPanel
class AudioSettingsPage(SettingsPage):
def __init__(self, parent: Widget) -> None:
super().__init__(parent)
PrefsNotWorkingWarningPanel(parent=self)
self.layout.add_spacer(20)
icon = fsui.Icon("audio-settings", "pkg:workspace")
gettext("Audio Settings")
t | itle = gettext("Audio")
subtitle = ""
self.add_header(icon, title, subtitle)
self.add_option("volume")
self.add_option("stereo_separation")
self.add_section(gettext("Floppy Drive Sound Emulation"))
self.add_option("floppy_drive_volume")
self.add_option(Option.FLOPPY_DRIVE_VOLUME_EMPTY)
self.add_section(gettext("Advanced Audio Options"))
self.add_option("audio_frequen | cy")
self.add_option("audio_buffer_target_size")
|
SuperDARNCanada/borealis | experiments/testing_archive/test_rxfreq_not_num.py | Python | gpl-3.0 | 1,611 | 0.005587 | #!/usr/bin/python
# write an experiment that raises an exception
import sys
import os
BOREALISPATH = os.environ['BOREALISPATH']
sys.path.append(BOREALISPATH)
import experiments.superdarn_common_fields as scf
from experiment_prototype.experiment_prototype import ExperimentPrototype
from utils.experiment_options.experimentoptions import ExperimentOptions as eo
class TestExperiment(ExperimentPrototype):
def __init__(self):
cpid = 1
super(TestExperiment, self).__init__(cpid)
if scf.IS_FORWARD_RADAR:
beams_to_use = scf.STD_16_FORWARD_BEAM_ORDER
else:
beams_to_use = scf.STD_16_REVERSE_BEAM_ORDER
if scf.opts.site_id in ["cly", "rkn", "inv"]:
num_ranges = scf.POLARDARN_NUM_RANGES
if scf.opts.site_id in ["sas", "pgr"]:
num_ranges = scf.STD_NUM_RANGES
slice_1 = { # slice_id = 0, there is only one slice.
"pulse_sequence": scf.SEQUENCE_7P,
"tau_spacing": scf.TAU_SPACING_7P,
"pulse_len": scf.PULSE_LEN_45KM,
"num_ranges": num_ranges,
"first_range": scf.STD_FIRST_RANGE,
"intt": 3500, # duration of an integration, in ms
| "beam_angle": scf.STD_16 | _BEAM_ANGLE,
"beam_order": beams_to_use,
"scanbound": [i * 3.5 for i in range(len(beams_to_use))], #1 min scan
"rxfreq" : '12005', # not an int or float
"acf": True,
"xcf": True, # cross-correlation processing
"acfint": True, # interferometer acfs
}
self.add_slice(slice_1)
|
wisechengyi/pants | src/python/pants/option/enclosing_scopes_test.py | Python | apache-2.0 | 3,058 | 0.003924 | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import unittest
from pants.option.parser_hierarchy import InvalidScopeError, all_enclosing_scopes, enclosing_scope
from pants.option.scope import GLOBAL_SCOPE
class TestEnclosingScopeTraversal(unittest.TestCase):
def test_enclosing_scope(self) -> None:
"""The enclosing scope of any non-nested scope should be the global scope, and the enclosing
scope of a nested scope should be the scope without its final component."""
self.assertEqual(GLOBAL_SCOPE, enclosing_scope(GLOBAL_SCOPE))
self.assertEqual(GLOBAL_SCOPE, enclosing_scope("scope"))
self.assertEqual("base", enclosing_scope("base.subscope"))
def test_all_enclosing_scopes(self) -> None:
"""`all_enclosing_scopes` should repeatedly apply `enclosing_scope` to any valid single- or
multiple- component scope.
`all_enclosing_scopes` should not yield the global scope if `allow_global=False`.
"""
global_closure = list(all_enclosing_scopes(GLOBAL_SCOPE, allow_global=True))
self.assertEqual(global_closure, [GLOBAL_SCOPE])
global_closure_excluded = list(all_enclosing_scopes(GLOBAL_SCOPE, allow_global=False))
self.assertEqual(global_closure_excluded, [])
base_scope = "scope"
base_scope_closure = list(all_enclosing_scopes(base_scope))
self.assertEqual(base_scope_closure, [base_scope, GLOBAL_SCOPE])
subscope = "subscope"
compound_scope = f"{base_scope}.{subscope}"
compound_scope_closure = list(all_enclosing_scopes(compound_scope))
self.assertEqual(compound_scope_closure, [compound_scope, base_scope, GLOBAL_SCOPE])
compound_scope_closure_no_global = list(
all_enclosing_scopes(compound_scope, allow_global=False)
)
self.assertEqual(compound_scope_closure_no_global, [compound_scope, base_scope])
def test_valid_invalid_scope(self) -> None:
"""Scopes with dashes or underscores are treated as a single compo | nent, and scopes with
empty components raise an InvalidSc | opeError."""
base_dashed_scope = "base-scope"
self.assertEqual(enclosing_scope(base_dashed_scope), GLOBAL_SCOPE)
subscope_underscore = "sub_scope"
self.assertEqual(enclosing_scope(subscope_underscore), GLOBAL_SCOPE)
compound_scope = f"{base_dashed_scope}.{subscope_underscore}"
self.assertEqual(enclosing_scope(compound_scope), base_dashed_scope)
self.assertEqual(
list(all_enclosing_scopes(compound_scope)),
[compound_scope, base_dashed_scope, GLOBAL_SCOPE,],
)
invalid_scope = "a.b..c.d"
with self.assertRaises(InvalidScopeError):
enclosing_scope(invalid_scope)
with self.assertRaises(InvalidScopeError):
# need to bounce to list to get it to error since this is a generator
list(all_enclosing_scopes(invalid_scope))
|
sharadagarwal/autorest | AutoRest/Generators/Python/Python.Tests/Expected/AcceptanceTests/Http/autoresthttpinfrastructuretestservice/operations/http_server_failure.py | Python | mit | 7,501 | 0.000533 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class HttpServerFailure(object):
"""HttpServerFailure operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def head501(
self, custom_headers=None, raw=False, **operation_config):
"""
Return 501 status code - should be represented in the client as an
error
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Error <fixtures.acceptancetestshttp.models.Error>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/failure/server/501'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
| request = self._client.head(url, query_parameters)
response = self._cli | ent.send(request, header_parameters, **operation_config)
if response.status_code < 200 or response.status_code >= 300:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get501(
self, custom_headers=None, raw=False, **operation_config):
"""
Return 501 status code - should be represented in the client as an
error
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Error <fixtures.acceptancetestshttp.models.Error>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/failure/server/501'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code < 200 or response.status_code >= 300:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def post505(
self, boolean_value=None, custom_headers=None, raw=False, **operation_config):
"""
Return 505 status code - should be represented in the client as an
error
:param boolean_value: Simple boolean value true
:type boolean_value: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Error <fixtures.acceptancetestshttp.models.Error>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/failure/server/505'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if boolean_value is not None:
body_content = self._serialize.body(boolean_value, 'bool')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code < 200 or response.status_code >= 300:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete505(
self, boolean_value=None, custom_headers=None, raw=False, **operation_config):
"""
Return 505 status code - should be represented in the client as an
error
:param boolean_value: Simple boolean value true
:type boolean_value: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Error <fixtures.acceptancetestshttp.models.Error>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/failure/server/505'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if boolean_value is not None:
body_content = self._serialize.body(boolean_value, 'bool')
else:
body_content = None
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code < 200 or response.status_code >= 300:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
|
whypro/IBATI | ibati/models/backup.py | Python | mpl-2.0 | 350 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from ..extensions import db
cl | ass Backup(db.Model):
__tablename__ = 'ibati_backup'
id = db.Column(db.Integer, primary_key=True)
date_str = db.Column(db.String(14), nullable=False)
zip_file = db.Column(db.String(255))
size | = db.Column(db.Integer)
|
zhangnian/fastapi | fastapi/utils/error_handlers.py | Python | mit | 404 | 0.002475 | from flask import jsonify
def error_404_handler(error):
resp = jsonify({ | 'code': -1, 'msg': 'not found', ' | data': None})
resp.status_code = 404
return resp
def error_429_handler(error):
resp = jsonify({'code': -1, 'msg': 'to many requests', 'data': None})
resp.status_code = 429
return resp
def error_handler(error):
response = jsonify(error.to_dict())
return response |
ShyamSS-95/Bolt | bolt/lib/nonlinear/nonlinear_solver.py | Python | gpl-3.0 | 29,921 | 0.010227 | #!/usr/bin/env python3
# -*- coding: | utf-8 -*-
"""
This is the module where the main solver object for the
nonlinear solver of bolt is defined. This solver object
stores the details of the system defined under physical_system,
and is evolved using the methods of this module.
The solver has the opt | ion of using 2 different methods:
- A semi-lagrangian scheme based on Cheng-Knorr(1978) which
uses advective interpolation.(non-conservative)
- The interpolation schemes available are
linear and cubic spline.
- Finite volume scheme(conservative):
- Riemann solvers available are the local Lax-Friedrichs and 1st order
upwind scheme.
- The reconstruction schemes available are minmod, PPM, and WENO5
"""
# Importing dependencies:
import arrayfire as af
import numpy as np
import petsc4py, sys
petsc4py.init(sys.argv)
from petsc4py import PETSc
import socket
# Importing solver libraries:
from . import communicate
from . import boundaries
from . import timestep
from .file_io import dump
from .file_io import load
from .utils.bandwidth_test import bandwidth_test
from .utils.print_with_indent import indent
from .utils.performance_timings import print_table
from .utils.broadcasted_primitive_operations import multiply
from .compute_moments import compute_moments as compute_moments_imported
from .fields.fields import fields_solver
class nonlinear_solver(object):
"""
An instance of this class' attributes contains methods which are used
in evolving the system declared under physical system nonlinearly. The
state of the system then may be determined from the attributes of the
system such as the distribution function and electromagnetic fields.
Relevant physical information is obtained by coarse graining this system
by taking moments of the distribution function. This is performed by the
compute_moments() method.
"""
def __init__(self, physical_system, performance_test_flag = False):
"""
Constructor for the nonlinear_solver object. It takes the physical
system object as an argument and uses it in intialization and
evolution of the system in consideration.
Additionally, a performance test flag is also passed which when true,
stores time which is consumed by each of the major solver routines.
This proves particularly useful in analyzing performance bottlenecks
and obtaining benchmarks.
Parameters:
-----------
physical_system: The defined physical system object which holds
all the simulation information such as the initial
conditions, and the domain info is passed as an
argument in defining an instance of the
nonlinear_solver. This system is then evolved, and
monitored using the various methods under the
nonlinear_solver class.
"""
self.physical_system = physical_system
# Holding Domain Info:
self.q1_start, self.q1_end = physical_system.q1_start,\
physical_system.q1_end
self.q2_start, self.q2_end = physical_system.q2_start,\
physical_system.q2_end
self.p1_start, self.p1_end = physical_system.p1_start,\
physical_system.p1_end
self.p2_start, self.p2_end = physical_system.p2_start,\
physical_system.p2_end
self.p3_start, self.p3_end = physical_system.p3_start,\
physical_system.p3_end
# Holding Domain Resolution:
self.N_q1, self.dq1 = physical_system.N_q1, physical_system.dq1
self.N_q2, self.dq2 = physical_system.N_q2, physical_system.dq2
self.N_p1, self.dp1 = physical_system.N_p1, physical_system.dp1
self.N_p2, self.dp2 = physical_system.N_p2, physical_system.dp2
self.N_p3, self.dp3 = physical_system.N_p3, physical_system.dp3
# Getting number of ghost zones, and the boundary
# conditions that are utilized:
N_g_q = self.N_ghost_q = physical_system.N_ghost_q
N_g_p = self.N_ghost_p = physical_system.N_ghost_p
self.boundary_conditions = physical_system.boundary_conditions
# Declaring the communicator:
self._comm = PETSc.COMM_WORLD.tompi4py()
if(self.physical_system.params.num_devices>1):
af.set_device(self._comm.rank%self.physical_system.params.num_devices)
# Getting number of species:
self.N_species = len(physical_system.params.mass)
# Having the mass and charge along axis 1:
self.physical_system.params.mass = \
af.cast(af.moddims(af.to_array(physical_system.params.mass),
1, self.N_species
),
af.Dtype.f64
)
self.physical_system.params.charge = \
af.cast(af.moddims(af.to_array(physical_system.params.charge),
1, self.N_species
),
af.Dtype.f64
)
PETSc.Sys.Print('\nBackend Details for Nonlinear Solver:')
# Printing the backend details for each rank/device/node:
PETSc.Sys.syncPrint(indent('Rank ' + str(self._comm.rank) + ' of ' + str(self._comm.size-1)))
PETSc.Sys.syncPrint(indent('On Node: '+ socket.gethostname()))
PETSc.Sys.syncPrint(indent('Device Details:'))
PETSc.Sys.syncPrint(indent(af.info_str(), 2))
PETSc.Sys.syncPrint(indent('Device Bandwidth = ' + str(bandwidth_test(100)) + ' GB / sec'))
PETSc.Sys.syncPrint()
PETSc.Sys.syncFlush()
self.performance_test_flag = performance_test_flag
# Initializing variables which are used to time the components of the solver:
if(performance_test_flag == True):
self.time_ts = 0
self.time_interp2 = 0
self.time_sourcets = 0
self.time_fvm_solver = 0
self.time_reconstruct = 0
self.time_riemann = 0
self.time_fieldstep = 0
self.time_interp3 = 0
self.time_apply_bcs_f = 0
self.time_communicate_f = 0
petsc_bc_in_q1 = 'ghosted'
petsc_bc_in_q2 = 'ghosted'
# Only for periodic boundary conditions or shearing-box boundary conditions
# do the boundary conditions passed to the DA need to be changed. PETSc
# automatically handles the application of periodic boundary conditions when
# running in parallel. For shearing box boundary conditions, an interpolation
# operation needs to be applied on top of the periodic boundary conditions.
# In all other cases, ghosted boundaries are used.
if( self.boundary_conditions.in_q1_left == 'periodic'
or self.boundary_conditions.in_q1_left == 'shearing-box'
):
petsc_bc_in_q1 = 'periodic'
if( self.boundary_conditions.in_q2_bottom == 'periodic'
or self.boundary_conditions.in_q2_bottom == 'shearing-box'
):
petsc_bc_in_q2 = 'periodic'
if(self.boundary_conditions.in_q1_left == 'periodic'):
try:
assert(self.boundary_conditions.in_q1_right == 'periodic')
except:
raise Exception('Periodic boundary conditions need to be applied to \
both the boundaries of a particular axis'
)
if(self.boundary_conditions.in_q1_left == 'shearing-box'):
try:
assert(self.boundary_conditions.in_q1_right == 'shearing-box')
except:
raise Exception('Shearing box boundary conditions need to be applied to \
both the boundaries of a particular axis'
)
if(self.bound |
Nettacker/Nettacker | lib/payload/scanner/ics_honeypot/changes_percentage.py | Python | gpl-3.0 | 1,608 | 0.004975 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import json
HONEYPOT_CHANGES_PERCENTAGE = 11
def files_check():
try:
file1 = open(sys.argv[1], "rb").read()
except Exception as _:
sys.exit(print("cannot open the file, {0}".format(sys.argv[1])))
try:
file2 = open(sys.argv[2], "rb").read()
except Exception as _:
sys.exit(print("cannot open the file, {0}".format(sys.argv[2])))
return [json.loads(file1), json.loads(file2)]
def percentage(data1, data2):
m = 0
n = 0
for r in data1.rsplit():
try:
if r == data2.rsplit()[m]:
n += 1
except:
n += 1
m += 1
return float(100 / float(float(len(data1.rsplit())) / int(len(data1.rsplit()) - n)))
if __name__ == "__ | main__":
if len(sys.argv) != 3:
sys.exit(print("usage: python {0} file1.json file2.json".format(sys.argv[0])))
file1, file2 = files_check()
for target_selected in file1:
NOT_FIND_FLAG = True
for target_find in file2:
if target_selected["host"] == target_find["host"]:
| PERCENTAGE = percentage(target_selected["I20100_RESPONSE"], target_find["I20100_RESPONSE"])
print("HOST:{0}\tCHANGE PERCENTAGE:{1}%\tDEFAULT CONFIG:{2}\tI30100 TRAP:{3}".format(
target_selected["host"], PERCENTAGE, target_selected["DEFAULT_SIGNATURES"] or
target_selected["DEFAULT_PRODUCTS"],
target_selected["\x01I30100\n"]))
|
KevinSeghetti/survey | survey/checklist/migrations/0012_auto_20150928_1443.py | Python | gpl-3.0 | 503 | 0.001988 | # -*- coding: utf-8 -*-
fro | m __future__ import unicode_literals
from django.db import models, migrations
def fix_notes_field (apps, schema_editor):
Question = apps.get_model("checklist", "Question")
for question in Question.objects.all():
question.notes = ""
question.save()
class Migration(migrations.Migration):
dependencies = [
('checklist', '0011_auto_20150928_1437'),
]
operations = [
migrations.RunPython(fix_notes_fi | eld),
]
|
meego-tablet-ux/meego-app-browser | tools/site_compare/scrapers/ie/__init__.py | Python | bsd-3-clause | 735 | 0.012245 | #!/usr/bin/python2.4
#
# Copyright 2007 Google Inc. All Rights Reserved.
"""Selects the appropriate scraper for Internet Explorer."""
__author__ = 'jhaas@google.com (Jonathan Haas)'
def GetScraper(version):
"""Returns the scraper mo | dule for the given version.
Args:
version: version string of IE, or None for most recent
Returns:
scrape module for given version
"""
# Pychecker will warn that the parameter is unused; we only
# support one version of IE at this time
# We only have one version of the IE scraper for now
return __import__("ie7", globals(), locals(), [''])
# if invoked rather than imported, test
if __nam | e__ == "__main__":
version = "7.0.5370.1"
print GetScraper(version).version
|
editeodoro/Bbarolo | pyBBarolo/_version.py | Python | gpl-2.0 | 32 | 0 | __version__ = | version = '1 | .2.3'
|
huzq/scikit-learn | sklearn/feature_extraction/tests/test_dict_vectorizer.py | Python | bsd-3-clause | 7,189 | 0.000835 | # Authors: Lars Buitinck
# Dan Blanchard <dblanchard@ets.org>
# License: BSD 3 clause
from random import Random
import numpy as np
import scipy.sparse as sp
from numpy.testing import assert_array_equal
from numpy.testing import assert_allclose
import pytest
from sklearn.feature_extraction import DictVectorizer
from sklearn.feature_selection import SelectKBest, chi2
@pytest.mark.parametrize("sparse", (True, False))
@pytest.mark.parametrize("dtype", (int, np.float32, np.int16))
@pytest.mark.parametrize("sort", (True, False))
@pytest.mark.parametrize("iterable", (True, False))
def test_dictvectorizer(sparse, dtype, sort, iterable):
D = [{"foo": 1, "bar": 3}, {"bar": 4, "baz": 2}, {"bar": 1, "quux": 1, "quuux": 2}]
v = DictVectorizer(sparse=sparse, dtype=dtype, sort=sort)
X = v.fit_transform(iter(D) if iterable else D)
assert sp.issparse(X) == sparse
assert X.shape == (3, 5)
assert X.sum() == 14
assert v.inverse_transform(X) == D
if sparse:
# CSR matrices can't be compared for equality
assert_array_equal(X.A, v.transform(iter(D) if iterable else D).A)
else:
assert_array_equal(X, v.transform(iter(D) if iterable else D))
if sort:
assert v.feature_names_ == sorted(v.feature_names_)
def test_feature_selection():
# make two feature dicts with two useful features and a bunch of useless
# ones, in terms of chi2
d1 = dict([("useless%d" % i, 10) for i in range(20)], useful1=1, useful2=20)
d2 = dict([("useless%d" % i, 10) for i in range(20)], useful1=20, useful2=1)
for indices in (True, False):
v = DictVectorizer().fit([d1, d2])
X = v.transform([d1, d2])
sel = SelectKBest(chi2, k=2).fit(X, [0, 1])
v.restrict(sel.get_support(indices=indices), indices=indices)
assert v.get_feature_names() == ["useful1", "useful2"]
def test_one_of_k():
D_in = [
{"version": "1", "ham": 2},
{"version": "2", "spam": 0.3},
{"version=3": True, "spam": -1},
]
v = DictVectorizer()
X = v.fit_transform(D_in)
assert X.shape == (3, 5)
D_out = v.inverse_transform(X)
assert D_out[0] == {"version=1": 1, "ham": 2}
names = v.get_feature_names()
assert "version=2" in names
assert "version" not in names
def test_iterable_value():
D_names = ["ham", "spam", "version=1", "version=2", "version=3"]
X_expected = [
[2.0, 0.0, 2.0, 1.0, 0.0],
| [0.0, 0.3, 0.0, 1.0, 0.0],
[0.0, -1.0, 0.0, 0.0, 1.0],
]
D_in = [
{"version": ["1", "2", "1"], "ham": 2},
{"version": "2", "spam": 0.3},
{"version=3": True, "spam": -1},
]
v = DictVectorizer()
X = v.fit_transform(D_in)
X = X.toarray()
assert_array_equal(X, X_expected)
| D_out = v.inverse_transform(X)
assert D_out[0] == {"version=1": 2, "version=2": 1, "ham": 2}
names = v.get_feature_names()
assert names == D_names
def test_iterable_not_string_error():
error_value = (
"Unsupported type <class 'int'> in iterable value. "
"Only iterables of string are supported."
)
D2 = [{"foo": "1", "bar": "2"}, {"foo": "3", "baz": "1"}, {"foo": [1, "three"]}]
v = DictVectorizer(sparse=False)
with pytest.raises(TypeError) as error:
v.fit(D2)
assert str(error.value) == error_value
def test_mapping_error():
error_value = (
"Unsupported value type <class 'dict'> "
"for foo: {'one': 1, 'three': 3}.\n"
"Mapping objects are not supported."
)
D2 = [
{"foo": "1", "bar": "2"},
{"foo": "3", "baz": "1"},
{"foo": {"one": 1, "three": 3}},
]
v = DictVectorizer(sparse=False)
with pytest.raises(TypeError) as error:
v.fit(D2)
assert str(error.value) == error_value
def test_unseen_or_no_features():
D = [{"camelot": 0, "spamalot": 1}]
for sparse in [True, False]:
v = DictVectorizer(sparse=sparse).fit(D)
X = v.transform({"push the pram a lot": 2})
if sparse:
X = X.toarray()
assert_array_equal(X, np.zeros((1, 2)))
X = v.transform({})
if sparse:
X = X.toarray()
assert_array_equal(X, np.zeros((1, 2)))
try:
v.transform([])
except ValueError as e:
assert "empty" in str(e)
def test_deterministic_vocabulary():
# Generate equal dictionaries with different memory layouts
items = [("%03d" % i, i) for i in range(1000)]
rng = Random(42)
d_sorted = dict(items)
rng.shuffle(items)
d_shuffled = dict(items)
# check that the memory layout does not impact the resulting vocabulary
v_1 = DictVectorizer().fit([d_sorted])
v_2 = DictVectorizer().fit([d_shuffled])
assert v_1.vocabulary_ == v_2.vocabulary_
def test_n_features_in():
# For vectorizers, n_features_in_ does not make sense and does not exist.
dv = DictVectorizer()
assert not hasattr(dv, "n_features_in_")
d = [{"foo": 1, "bar": 2}, {"foo": 3, "baz": 1}]
dv.fit(d)
assert not hasattr(dv, "n_features_in_")
def test_dictvectorizer_dense_sparse_equivalence():
"""Check the equivalence between between sparse and dense DictVectorizer.
Non-regression test for:
https://github.com/scikit-learn/scikit-learn/issues/19978
"""
movie_entry_fit = [
{"category": ["thriller", "drama"], "year": 2003},
{"category": ["animation", "family"], "year": 2011},
{"year": 1974},
]
movie_entry_transform = [{"category": ["thriller"], "unseen_feature": "3"}]
dense_vectorizer = DictVectorizer(sparse=False)
sparse_vectorizer = DictVectorizer(sparse=True)
dense_vector_fit = dense_vectorizer.fit_transform(movie_entry_fit)
sparse_vector_fit = sparse_vectorizer.fit_transform(movie_entry_fit)
assert not sp.issparse(dense_vector_fit)
assert sp.issparse(sparse_vector_fit)
assert_allclose(dense_vector_fit, sparse_vector_fit.toarray())
dense_vector_transform = dense_vectorizer.transform(movie_entry_transform)
sparse_vector_transform = sparse_vectorizer.transform(movie_entry_transform)
assert not sp.issparse(dense_vector_transform)
assert sp.issparse(sparse_vector_transform)
assert_allclose(dense_vector_transform, sparse_vector_transform.toarray())
dense_inverse_transform = dense_vectorizer.inverse_transform(dense_vector_transform)
sparse_inverse_transform = sparse_vectorizer.inverse_transform(
sparse_vector_transform
)
expected_inverse = [{"category=thriller": 1.0}]
assert dense_inverse_transform == expected_inverse
assert sparse_inverse_transform == expected_inverse
def test_dict_vectorizer_unsupported_value_type():
"""Check that we raise an error when the value associated to a feature
is not supported.
Non-regression test for:
https://github.com/scikit-learn/scikit-learn/issues/19489
"""
class A:
pass
vectorizer = DictVectorizer(sparse=True)
X = [{"foo": A()}]
err_msg = "Unsupported value Type"
with pytest.raises(TypeError, match=err_msg):
vectorizer.fit_transform(X)
|
lizardsystem/lizard-wbconfiguration | lizard_wbconfiguration/migrations/0033_auto__chg_field_areaconfiguration_nutc_inc_2__chg_field_areaconfigurat.py | Python | gpl-3.0 | 75,395 | 0.005756 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'AreaConfiguration.nutc_inc_2'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'nutc_inc_2', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.nutc_inc_3'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'nutc_inc_3', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.nutc_inc_1'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'nutc_inc_1', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.min_concentr_phopshate_seepage'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'min_concentr_phopshate_seepage', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.nutc_inc_4'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'nutc_inc_4', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.nutc_min_3'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'nutc_min_3', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.max_outtake'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'max_outtake', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.surface'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'surface', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.marge_bov'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'marge_bov', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.min_concentr_nitrogyn_seepage'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'min_concentr_nitrogyn_seepage', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.min_concentr_so4_seepage'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'min_concentr_so4_seepage', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.init_water_level'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'init_water_level', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.kwel'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'kwel', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.min_concentr_nitrogyn_precipitation' |
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'min_concentr_nitrogyn_precipitation', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.nutc_min_2'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'nutc_min_2', self.gf('django.db.models.fields.DecimalField')(null=True, m | ax_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.ini_con_cl'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'ini_con_cl', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.wegz'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'wegz', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.nutc_min_4'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'nutc_min_4', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.zomerp'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'zomerp', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.incr_concentr_phosphate_seepage'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'incr_concentr_phosphate_seepage', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.incr_concentr_nitrogyn_seepage'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'incr_concentr_nitrogyn_seepage', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.incr_concentr_so4_precipitation'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'incr_concentr_so4_precipitation', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.incr_concentr_so4_seepage'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'incr_concentr_so4_seepage', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.concentr_chloride_seepage'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'concentr_chloride_seepage', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.min_concentr_so4_precipitation'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'min_concentr_so4_precipitation', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.winterp'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'winterp', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.max_intake'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'max_intake', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.bottom_height'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'bottom_height', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.lentep'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'lentep', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.incr_concentr_nitrogyn_precipitation'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'incr_concentr_nitrogyn_precipitation', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.incr_concentr_phosphate_precipitation'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'incr_concentr_phosphate_precipitation', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.concentr_chloride_precipitation'
db.alter_column('lizard_wbconfiguration_areaconfiguration', 'concentr_chloride_precipitation', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=5))
# Changing field 'AreaConfiguration.min_concentr_phospha |
KWierso/treeherder | treeherder/etl/jobs.py | Python | mpl-2.0 | 18,563 | 0.000593 | import copy
import logging
import os
import time
from datetime import datetime
from hashlib import sha1
import newrelic.agent
from django.core.exceptions import ObjectDoesNotExist
from django.db.utils import IntegrityError
from past.builtins import long
from treeherder.etl.artifact import (serialize_artifact_json_blobs,
store_job_artifacts)
from treeherder.etl.common import get_guid_root
from treeherder.model.models import (BuildPlatform,
FailureClassification,
Job,
JobGroup,
JobLog,
JobType,
Machine,
MachinePlatform,
Option,
OptionCollection,
Product,
Push,
ReferenceDataSignatures,
TaskclusterMetadata)
logger = logging.getLogger(__name__)
def _get_number(s):
try:
return long(s)
except (ValueError, TypeError):
return 0
def _remove_existing_jobs(data):
"""
Remove jobs from data where we already have them in the same state.
1. split the incoming jobs into pending, running and complete.
2. fetch the ``job_guids`` from the db that are in the same state as they
are in ``data``.
3. build a new list of jobs in ``new_data`` that are not already in
the db and pass that back. It could end up empty at that point.
"""
new_data = []
guids = [datum['job']['job_guid'] for datum in data]
state_map = {
guid: state for (guid, state) in Job.objects.filter(
guid__in=guids).values_list('guid', 'state')
}
for datum in data:
job = datum['job']
if not state_map.get(job['job_guid']):
new_data.append(datum)
else:
# should not transition from running to pending,
# or completed to any other state
current_state = state_map[job['job_guid']]
if current_state == 'completed' or (
job['state'] == 'pending' and
current_state == 'running'):
continue
new_data.append(datum)
return new_data
def _load_job(repository, job_datum, push_id):
"""
Load a job into the treeherder database
If the job is a ``retry`` the ``job_guid`` will have a special
suffix on it. But the matching ``pending``/``running`` job will not.
So we append the suffixed ``job_guid`` to ``retry_job_guids``
so that we can update the job_id_lookup later with the non-suffixed
``job_guid`` (root ``job_guid``). Then we can find the right
``pending``/``running`` job and update it with this ``retry`` job.
"""
build_platform, _ = BuildPlatform.objects.get_or_create(
os_name=job_datum.get('build_platform', {}).get('os_name', 'unknown'),
platform=job_datum.get('build_platform', {}).get('platform', 'unknown'),
architecture=job_datum.get('build_platform', {}).get('architecture',
'unknown'))
machine_platform, _ = MachinePlatform.objects.get_or_create(
os_name=job_datum.get('machine_platform', {}).get('os_name', 'unknown'),
platform=job_datum.get('machine_platform', {}).get('platform', 'unknown'),
architecture=job_datum.get('machine_platform', {}).get('architecture',
'unknown'))
option_names = job_datum.get('option_collection', [])
option_collection_hash = OptionCollection.calculate_hash(
option_names)
if not OptionCollection.objects.filter(
option_collection_hash=option_collection_hash).exists():
# in the unlikely event that we haven't seen this set of options
# before, add the appropriate database rows
options = []
for option_name in option_names:
option, _ = Option.objects.get_or_create(name=option_name)
options.append(option)
for option in options:
OptionCollection.objects.create(
option_collection_hash=option_collection_hash,
option=option)
machine, _ = Machine.objects.get_or_create(
name=job_datum.get('machine', 'unknown'))
job_type, _ = JobType.objects.get_or_create(
symbol=job_datum.get('job_symbol') or 'unknown',
name=job_datum.get('name') or 'unknown')
job_group, _ = JobGroup.objects.get_or_create(
name=job_datum.get('group_name') or 'unknown',
symbol=job_datum.get('group_symbol') or 'unknown')
product_name = job_datum.get('product_name', 'unknown')
if not product_name.strip():
product_name = 'unknown'
product, _ = Product.objects.get_or_create(name=product_name)
job_guid = job_datum['job_guid']
job_guid = job_guid[0:50]
who = job_datum.get('who') or 'unknown'
who = who[0:50]
reason = job_datum.get('reason') or 'unknown'
reason = reason[0:125]
state = job_datum.get('state') or 'unknown'
state = state[0:25]
build_system_type = job_datum.get('build_system_type', 'buildbot')
reference_data_name = job_datum.get('reference_data_name', None)
default_failure_classification = FailureClassification.objects.get(
name='not classified')
sh = sha1()
sh.update(''.join(
map(str,
[build_system_type, repository.name, build_platform.os_name,
build_platform.platform, build_platform.architecture,
machine_platform.os_name, machine_platform.platform,
machine_platform.architecture,
job_group.name, job_group.symbol, job_type.name,
job_type.symbol, option_collection_hash,
reference_data_name])).encode('utf-8'))
signature_hash = sh.hexdigest()
# Should be the buildername in the case of buildbot (if not provided
# default to using the signature hash)
if not reference_data_name:
reference_data_name = signature_hash
signature, _ = ReferenceDataSignatures.objects.get_or_create(
name=reference_data_name,
signature=signature_hash,
build_system_type=build_system_type,
repository=repository.name, defaults={
'first_submission_timestamp': time.time(),
'build_os_name': build_platform.os_name,
'build_platform': build_platform.platform,
'build_architecture': build_platform.architecture,
'machine_os_name': machine_platform.os_name,
'machine_platform': machine_platform.platform,
'machine_architecture': machine_platform.architecture,
'job_group_name': job_group.name,
'job_group_symbol': job_group.symbol,
'job_type_name': job_type.name,
'job_type_symbol': job_type.symbol,
'option_collection_hash': option_collection_hash
})
tier = job_datum.get('tier') or 1
result = job_datum.get('result', 'unknown')
submit_time = datetime.fromtimestamp(
_get_number(job_datum.get('submit_timestamp')))
start_time = datetime.fromtimestamp(
_get_number(job_d | atum.get('start_timestamp')))
end_time = datetime.fromtimestamp(
_get_number(job_datum.get('end_timestamp')))
# first, try to create the job with the given guid (if it doesn't
# exist yet)
job_guid_root = get_guid_root(job_guid)
if not Job.objects.filter(guid__in=[job_guid, job_guid_root]).exists():
# This could theoretically already have been created by another process
# that is running updates sim | ultaneously. So just attempt to create
# it, but allow it to skip if it's the same guid. The odds are
# extremely high that this is a pending and running job that came in
# quick succession and are being processed by two different workers.
|
tridge/MAVProxy | MAVProxy/modules/mavproxy_rally.py | Python | gpl-3.0 | 13,720 | 0.0043 | """
MAVProxy rally module
"""
from pymavlink import mavwp
from pymavlink import mavutil
import time, os, platform
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib import mp_util
if mp_util.has_wxpython:
from MAVProxy.modules.lib.mp_menu import *
class RallyModule(mp_module.MPModule):
def __init__(self, mpstate):
super(RallyModule, self).__init__(mpstate, "rally", "rally point control", public = True)
self.rallyloader_by_sysid = {}
self.add_command('rally', self.cmd_rally, "rally point control", ["<add|clear|land|list|move|remove|>",
"<load|save> (FILENAME)"])
self.have_list = False
self.abort_alt = 50
self.abort_first_send_time = 0
self.abort_previous_send_time = 0
self.abort_ack_received = True
self.menu_added_console = False
self.menu_added_map = False
if mp_util.has_wxpython:
self.menu = MPMenuSubMenu('Rally',
items=[MPMenuItem('Clear', 'Clear', '# rally clear'),
MPMenuItem('List', 'List', '# rally list'),
MPMenuItem('Load', 'Load', '# rally load ',
handler=MPMenuCallFileDialog(flags=('open',),
title='Rally Load',
wildcard='RallyPoints(*.txt,*.rally,*.ral)|*.txt;*.rally;*.ral')),
MPMenuItem('Save', 'Save', '# rally save ',
handler=MPMenuCallFileDialog(flags=('save', 'overwrite_prompt'),
title='Rally Save',
wildcard='RallyPoints(*.txt,*.rally,*.ral)|*.txt;*.rally;*.ral')),
MPMenuItem('Add', 'Add', '# rally add ',
handler=MPMenuCallTextDialog(title='Rally Altitude (m)',
default=100))])
@property
def rallyloader(self):
'''rally loader by system ID'''
if not self.target_system in self.rallyloader_by_sysid:
self.rallyloader_by_sysid[self.target_system] = mavwp.MAVRallyLoader(self.settings.target_system,
self.settings.target_component)
return self.rallyloader_by_sysid[self.target_system]
def last_change(self):
'''return time of last changes made to rally points'''
return self.rallyloader.last_change
def rally_count(self):
'''return number of waypoints'''
return self.rallyloader.rally_count()
def rally_point(self, i):
'''return instance of mavutil.mavlink.MAVLink_rally_point_message'''
return self.rallyloader.rally_point(i)
def set_last_change(self, time):
'''can be used to cause map redraws'''
self.rallyloader.last_change = time
def idle_task(self):
'''called on idle'''
if self.module('console') is not None:
if not self.menu_added_console:
self.menu_added_console = True
self.module('console').add_menu(self.menu)
else:
self.menu_added_console = False
if self.module('map') is not None:
if not self.menu_added_map:
self.menu_added_map = True
self.module('map').add_menu(self.menu)
else:
self.menu_added_map = False
'''handle abort command; it is critical that the AP to receive it'''
if self.abort_ack_received is False:
#only send abort every second (be insistent, but don't spam)
if (time.time() - self.abort_previous_send_time > 1):
self.master.mav.command_long_send(self.settings.target_system,
self.settings.target_component,
mavutil.mavlink.MAV_CMD_DO_GO_AROUND,
0, int(self.abort_alt), 0, 0, 0, 0, 0, 0,)
self.abort_previous_send_time = time.time()
#try to get an ACK from the plane:
if self.abort_first_send_time == 0:
self.abort_first_send_time = time.time()
elif time.time() - self.abort_first_send_time > 10: #give up after 10 seconds
print("Unable to send abort command!\n")
self.abort_ack_received = True
def cmd_rally_add(self, args):
'''handle rally add'''
if len(args) < 1:
alt = self.settings.rallyalt
else:
alt = float(args[0])
if len(args) < 2:
break_alt = self.settings.rally_breakalt
else:
break_alt = float(args[1])
|
if len(args) < 3:
flag = self.settings.rally_flags
else:
flag = int(args[2])
#currently only supporting autoland values:
#True (nonzero) and False (zero)
| if (flag != 0):
flag = 2
if not self.have_list:
print("Please list rally points first")
return
if (self.rallyloader.rally_count() > 4):
print("Only 5 rally points possible per flight plan.")
return
latlon = self.mpstate.click_location
if latlon is None:
print("No map click position available")
return
land_hdg = 0.0
self.rallyloader.create_and_append_rally_point(latlon[0] * 1e7, latlon[1] * 1e7, alt, break_alt, land_hdg, flag)
self.send_rally_points()
print("Added Rally point at %s %f %f, autoland: %s" % (str(latlon), alt, break_alt, bool(flag & 2)))
def cmd_rally_alt(self, args):
'''handle rally alt change'''
if (len(args) < 2):
print("Usage: rally alt RALLYNUM newAlt <newBreakAlt>")
return
if not self.have_list:
print("Please list rally points first")
return
idx = int(args[0])
if idx <= 0 or idx > self.rallyloader.rally_count():
print("Invalid rally point number %u" % idx)
return
new_alt = int(args[1])
new_break_alt = None
if (len(args) > 2):
new_break_alt = int(args[2])
self.rallyloader.set_alt(idx, new_alt, new_break_alt)
self.send_rally_point(idx-1)
self.fetch_rally_point(idx-1)
self.rallyloader.reindex()
def cmd_rally_move(self, args):
'''handle rally move'''
if len(args) < 1:
print("Usage: rally move RALLYNUM")
return
if not self.have_list:
print("Please list rally points first")
return
idx = int(args[0])
if idx <= 0 or idx > self.rallyloader.rally_count():
print("Invalid rally point number %u" % idx)
return
rpoint = self.rallyloader.rally_point(idx-1)
latlon = self.mpstate.click_location
if latlon is None:
print("No map click position available")
return
oldpos = (rpoint.lat*1e-7, rpoint.lng*1e-7)
self.rallyloader.move(idx, latlon[0], latlon[1])
self.send_rally_point(idx-1)
p = self.fetch_rally_point(idx-1)
if p.lat != int(latlon[0]*1e7) or p.lng != int(latlon[1]*1e7):
print("Rally move failed")
return
self.rallyloader.reindex()
print("Moved rally point from %s to %s at %fm" % (str(oldpos), str(latlon), rpoint.alt))
def cmd_rally(self, args):
'''rally point commands'''
#TODO: add_land arg
if len(args) < 1:
self.print_usage()
return
elif args[0] == "add":
self.cmd_rally_add(args[1:])
elif args[0] == "move":
self.cmd_rally_move(ar |
lileeyao/acm | dp/hard/longest_substring_without_repeating_characters.py | Python | gpl-2.0 | 520 | 0.001923 | class Solution:
# @return an integer
def lengthOfLon | gestSubstring(self, s):
lookup = {}
maxlen = 0
start = 0
for i in xrange(len(s)):
if s[i] not in lookup:
lookup[s[i]] = i
else:
maxlen = max(maxlen, i - start)
start = max(start, lookup[s[i]] + 1)
lookup[s[i]] = i
return max(maxlen, len(s)-start)
s = Sol | ution()
print s.lengthOfLongestSubstring("wlrbbmbmasdas")
|
sumeet/stopwatch | tests.py | Python | mit | 2,021 | 0 | import time
import unittest
import stopwatch
class MockSystemClock(object):
"""Represents a system clock with time starting at `0` and incremented
whenever `sleep()` is called.
Meant to replace the `time()` and `sleep()` functions in the `time` module.
>>> clock = MockSystemClock()
>>> clock.time()
0
>>> clock.sleep(1)
>>> clock.time()
1
"""
def __init__(self):
"""Initialize the current system time to `0`."""
self._system_time = 0
def time(self):
"""Return the current system time."""
return self._system_time
d | ef sleep(self, seconds):
"""Increment the system time by `seconds`."""
self._system_time += seconds
class StopwatchTestCase(unittest.TestCase):
def setUp(self):
"""Monkey patch `time.time()` and `time.sleep()` to point to the
corresponding methods on a new `MockSystemClock` instance.
"""
| self._time_time = time.time
self._time_sleep = time.sleep
mock_system_clock = MockSystemClock()
time.time = mock_system_clock.time
time.sleep = mock_system_clock.sleep
def tearDown(self):
"""Restore the `time` module."""
time.time = self._time_time
time.sleep = self._time_sleep
def test_stopwatch_as_object(self):
"""Test using a `Stopwatch` as a regular object."""
sw = stopwatch.Stopwatch()
sw.start()
self.assertEqual(0, sw.time_elapsed)
time.sleep(1)
self.assertEqual(1, sw.time_elapsed)
sw.stop()
self.assertEqual(1, sw.total_run_time)
def test_stopwatch_as_context_manager(self):
"""Test using a `Stopwatch` as a context manager."""
with stopwatch.Stopwatch() as sw:
sw.start()
self.assertEqual(0, sw.time_elapsed)
time.sleep(1)
self.assertEqual(1, sw.time_elapsed)
self.assertEqual(1, sw.total_run_time)
if __name__ == '__main__':
unittest.main()
|
Damgaard/account-deleter | deleter/tests.py | Python | gpl-3.0 | 1,050 | 0 | # Copyright (C) 2013 Andreas Damgaard Pedersen
#
# This program is free software: you | can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the G | NU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This file demonstrates writing tests using the unittest module.
These will pass when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""Test that 1 + 1 always equals 2."""
self.assertEqual(1 + 1, 2)
|
AlpineNow/python-alpine-api | alpine/workspace.py | Python | mit | 18,353 | 0.003106 | from __future__ import absolute_import
from .exception import *
from .alpineobject import AlpineObject
from .user import User
import json
class Workspace(AlpineObject):
"""
A class for interacting with workspaces. The top-level methods deal with workspace properties. The subclass
`Member` can be used to interact with member lists.
"""
member = None
@property
def stage(self):
return self.Stage()
@property
def memberRole(self):
return self.MemberRole()
def __init__(self, base_url, session, token):
super(Workspace, self).__init__(base_url, session, token)
self.member = self.Member(base_url, session, token)
def create(self, workspace_name, public=False, summary=None):
"""
Creates a workspace. Will fail if the workspace name already exists.
:param str workspace_name: Unique workspace name.
:param bool public: Allow the workspace to be viewable by non-members and non-admins.
:param str summary: Description of new workspace.
:return: Created workspace information or error message.
:rtype: dict
Example::
>>> session.workspace.create(workspace_name = "Public Data ETL", public = True)
"""
url = "{0}/workspaces".format(self.base_url)
url = self._add_token_to_url(url)
str_public = "false"
if public:
str_public = "true"
self.session.headers.update({"Content-Type": "application/x-www-form-urlencoded"})
payload = {"name": workspace_name,
"public": str_public,
"summary": summary}
response = self.session.post(url, data=payload, verify=False)
self.logger.debug("Received response code {0} with reason {1}...".format(response.status_code, response.reason))
try:
return response.json()['response']
except:
return response.json()
def delete(self, workspace_id):
"""
Attempts to delete the given workspace. Will fail if the workspace does not exist.
:param str workspace_id: ID of the workspace to be deleted.
:return: None.
:rtype: NoneType
:exception WorkspaceNotFoundException: The workspace does not exist.
Example::
>>> session.workspace.delete(workspace_id = 1671)
"""
try:
self.get(workspace_id)
url = "{0}/workspaces/{1}".format(self.base_url, workspace_id)
url = self._add_token_to_url(url)
self.logger.debug("Deleting workspace with ID: <{0}>".format(workspace_id))
response = self.session.delete(url)
self.logger.debug("Received response code {0} with reason {1}"
.format(response.status_code, response.reason))
if response.status_code == 200:
self.logger.debug("Workspace successfully deleted.")
else:
raise InvalidResponseCodeException("Response code invalid, the expected response code is {0}, "
"the actual response code is {1}".format(200, response.status_code))
return None
except WorkspaceNotFoundException as err:
self.logger.debug("Workspace not found, error {0}".format(err))
def get_list(self, user_id=None, active=None, per_page=50):
"""
Gets a list of metadata for each workspace. If a user ID is provided, only workspaces that the user \
is a member of will be returned.
:param str user_id: ID of the user.
:param bool active: Return only active workspaces (optional). True will only return the active spaces.
:param int per_page: Maximum number to fetch with each API call.
:return: List of workspace metadata.
:rtype: list of dict
:exception UserNotFoundException: The user does not exist.
Example::
>>> my_workspaces = session.workspace.get_list(user_id = my_user_id)
>>> len(my_workspaces)
8
"""
if active is True:
active_state = "true"
else:
active_state = None
workspace_list = None
url = "{0}/workspaces".format(self.base_url)
url = self._add_token_to_url(url)
self.session.headers.update({"Content-Type": "application/json"})
payload = {"user_id": user_id,
"active": active_state,
"per_page": per_page,
}
page_current = 0
while True:
payload['page'] = page_current + 1
r = self.session.get(url, params=payload, verify=False)
workspace_list_response = r.json()
page_total = workspace_list_response['pagination']['total']
page_current = workspace_list_response['pagination']['page']
if workspace_list:
workspace_list.extend(workspace_list_response['response'])
else:
workspace_list = workspace_list_response['response']
if page_total == page_current:
break
return workspace_list
def get(self, workspace_id):
"""
Gets a workspace's metadata.
:param str workspace_id: Unique workspace name.
:return: Selected workspace's data
:rtype: dict
:exception WorkspaceNotFoundException: The workspace does not exist.
Example::
>>> session.workspace.get(workspace_id = 1761)
"""
url = "{0}/workspaces/{1}".format(self.base_url, workspace_id)
url = self._add_token_to_url(url)
self.session.headers.update({"Content-Type": "application/json"})
r = self.session.get(url, verify=False)
workspace_response = r.json()
try:
if workspace_response['response']:
self.logger.debug("Found workspace ID: <{0}> in list".format(workspace_id))
return workspace_response['response']
else:
raise WorkspaceNotFoundException("Workspace ID: <{0}> not found".format(workspace_id))
except Exception as err:
raise WorkspaceNotFoundException("Workspace ID: <{0}> not found".format(workspace_id))
def get_id(self, workspace_name, user_id=None):
"""
Get the ID of the workspace. Will throw an exception if the workspace does not exist.
:param str workspace_name: Unique workspace name.
:param int user_id: ID of a user.
:return: ID of the workspace.
:rtype: int
:exception WorkspaceNotFoundException: The workspace does not exist.
Example::
>>> session.workspace.get_id("Public Data ETL")
1672
"""
workspace_list = self.get_list(user_id)
for workspace in workspace_list:
if workspace['name'] == workspace_name:
return workspace['id']
# return None
raise WorkspaceNotFoundException("The workspace with name '{0}' is not found for user ID: <{1}>".format(
workspace_name, user_id))
def update(self, workspace_id, is_public=None, is_active=None, name=None,
summary=None, stage=None, owner_id=None):
"""
Update a workspace's metadata. Only included fields will be changed.
:param int workspace_id: ID of the workspace.
:param bool is_public: Allow the workspace to be viewable by non-members and non-admins.
:param bool is_active: Set active vs. archived status.
:param str name: New name for the workspace.
:param str summary: New description of the workspace.
:param int stage: Stage ID. Use the `Workspace.Stage` object for convenience.
:param int owner_id: ID of the new workspace owner. This owner must also be a member of the workspace.
:return: Updated workspace metadata.
:rtype: dict
Example::
>>> session.workspace.update(workspace_id = 1672, summary = "New focus of project is ML!",
>>> | s | tage |
tdruez/django-registration | registration/tests/test_views.py | Python | bsd-3-clause | 1,271 | 0.000787 | """
Tests for django-registration's built-in views.
"""
from django.core.urlresolvers import reverse
from django.test import override_settings, TestCase
from ..models import RegistrationProfile
@override_settings(ROOT_URLCONF='registration.tests.urls')
class ActivationViewTests(TestCase):
"""
Tests for aspects of the activation view not currently exercised
by any built-in workflow.
"""
@override_settings(ACCOUNT_ACTIVATION_DAYS=7)
def test_activation(self):
"""
Activation of an account functions properly when using a
simple string URL as the success redirect.
"""
| data = {
'username': 'bob',
'email': 'bob@example.com',
'password1': 'secret',
'password2': 'secret'
}
resp = self.client.post(
reverse('registration_register'),
data=data
)
profile = Regis | trationProfile.objects.get(
user__username=data['username']
)
resp = self.client.get(
reverse(
'registration_activate',
args=(),
kwargs={'activation_key': profile.activation_key}
)
)
self.assertRedirects(resp, '/')
|
jupiny/EnglishDiary | english_diary/users/mixins/tasks.py | Python | mit | 1,168 | 0 | from django.conf import settin | gs
from django.contrib.auth import get_user_model
from django.template.loader import render_to_string
from core.utils.email import send_email
class SendVerificationEmailTaskMixin(object):
@property
def email_sender(self):
return NotImplemented
@property
def email_subject(self):
return N | otImplemented
@property
def email_template(self):
return NotImplemented
def run(self, user_id):
user = get_user_model().objects.get(pk=user_id)
# Send email to only user who has email except for TEST_EMAIL
if user.email and user.email != settings.TEST_EMAIL:
send_email(
sender=self.email_sender,
receiver=user.email,
subject=self.email_subject.format(
username=user.username
),
html=render_to_string(
self.email_template,
context={
"username": user.username,
"verification_key": user.profile.verification_key,
},
),
)
|
Velkata/IntorductionToLogicApps_AzureBootcampBulgaria2016 | src/daTkzsGame/Tkzs/Tkzs/app.py | Python | apache-2.0 | 876 | 0.004566 | """
This script runs the application using a development server.
It contains the definition of routes and views for the application.
"""
from flask import Flask, render_template, request
import requests
app = Flask(__name__)
app.config.from_pyfile('app.cfg')
# Make the WSGI interface available at the top level so wfastcgi can get it.
wsgi_app = app.wsgi_app
@app.route('/')
def home():
return render_template('index.html')
@app.route('/score', methods=['POST'])
def score():
r = request | s.post(app.config['TRIGGER_URL'], data=request.data, headers={'Con | tent-type': 'application/json'}, timeout=200)
return r.text, r.status_code
if __name__ == '__main__':
import os
HOST = os.environ.get('SERVER_HOST', 'localhost')
try:
PORT = int(os.environ.get('SERVER_PORT', '5555'))
except ValueError:
PORT = 5555
app.run(HOST, PORT)
|
opencivicdata/scrapers-ca | disabled/ca_sk_municipalities/people.py | Python | mit | 3,626 | 0.003309 | from utils import CanadianScraper, CanadianPerson as Person
from pupa.scrape import Organization
import os
import re
import subprocess
from urllib.request import urlopen
COUNCIL_PAGE = 'http://www.municipal.gov.sk.ca/Programs-Services/Municipal-Directory-pdf'
# See also HTML format http://www.mds.gov.sk.ca/apps/Pub/MDS/welcome.aspx
class SaskatchewanMunicipalitiesPersonScraper(CanadianScraper):
def scrape(self):
response = urlopen(COUNCIL_PAGE).read()
pdf = open('/tmp/sk.pdf', 'w')
pdf.write(response)
| pdf.close()
data = subprocess.check_output(['pdftotext', '-layout', '/tmp/sk.pdf', '-'])
data = data.splitlines(True)
pages = []
page = []
for line in data:
if line.strip() and 'Page' not in line and 'CITIES' not in line and 'NORTHERN TOWNS, VILLAGES' not in line:
page.append(line)
| elif page:
pages.append(page)
page = []
districts = []
for page in pages:
index = re.search(r'(\s{6,})', page[0])
if index:
index = index.end() - 1
else:
index = -1
dist1 = []
dist2 = []
for line in page:
dist1.append(line[:index].strip())
dist2.append(line[index:].strip())
districts.append(dist1)
districts.append(dist2)
for district in districts:
district_name = district.pop(0).split(',')[0].title()
org = Organization(name=district_name + ' Council', classification='legislature', jurisdiction_id=self.jurisdiction.jurisdiction_id)
org.add_source(COUNCIL_PAGE)
councillors = []
contacts = {}
for i, line in enumerate(district):
if 'Phone' in line:
phone = line.split(':')[1].replace('(', '').replace(') ', '-').strip()
if phone:
contacts['voice'] = phone
if 'Fax' in line:
fax = line.split(':')[1].replace('(', '').replace(') ', '-').strip()
if fax:
contacts['fax'] = fax
if 'E-Mail' in line:
email = line.split(':')[1].strip()
if email:
contacts['email'] = email
if 'Address' in line and line.split(':')[1].strip():
address = line.split(':')[1].strip() + ', ' + ', '.join(district[i + 1:]).replace(' ,', '')
contacts['address'] = address
if 'Mayor' in line or 'Councillor' in line or 'Alderman' in line:
councillor = line.split(':')[1].replace('Mr.', '').replace('Mrs.', '').replace('Ms.', '').replace('His Worship', '').replace('Her Worship', '').strip()
role = line.split(':')[0].strip()
if councillor:
councillors.append([councillor, role])
if not councillors:
continue
yield org
for councillor in councillors:
p = Person(primary_org='legislature', name=councillor[0], district=district_name)
p.add_source(COUNCIL_PAGE)
membership = p.add_membership(org, role=councillor[1], district=district_name)
for key, value in contacts.items():
membership.add_contact_detail(key, value, '' if key == 'email' else 'legislature')
yield p
os.system('rm /tmp/sk.pdf')
|
LaurentClaessens/mazhe | src_yanntricks/yanntricksKScolorD.py | Python | gpl-3.0 | 526 | 0.045627 | from yanntricks import *
def KScolo | rD():
pspict,fig = SinglePicture("KScolorD")
pspict.dilatation(1)
x=var('x')
C=Circle(Point(0,0),1)
N1=C.graph(90,180)
N2=C.graph(270,360)
C.parameters.color="blue"
N1.parameters.color="black"
N2.parameters.color=N1.parameters.color
N1.wave(0.1,0.2)
#N2.wave(0.1,0.2)
N=Point(0,1)
S=Point(0,-1)
pspict.axes.no_graduation()
| pspict.DrawGraphs(C,N1,N2,N,S)
pspict.DrawDefaultAxes()
fig.conclude()
fig.write_the_file()
|
odoousers2014/LibrERP | l10n_it_sale/wizard/confirmation.py | Python | agpl-3.0 | 3,636 | 0.002475 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2013-2014 Didotech SRL (info at didotech.com)
# All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
## | ############################################################################
from openerp.osv import orm, fields
import decimal_precision as dp
import netsvc
from tools import ustr
class sale_order_confirm(orm.TransientModel):
_inherit = "sale.order.confirm"
_columns = {
'cig': fields.char('C | IG', size=64, help="Codice identificativo di gara"),
'cup': fields.char('CUP', size=64, help="Codice unico di Progetto")
}
# def default_get(self, cr, uid, fields, context=None):
# sale_order_obj = self.pool['sale.order']
# if context is None:
# context = {}
#
# res = super(sale_order_confirm, self).default_get(cr, uid, fields, context=context)
# sale_order_data = sale_order_obj.browse(cr, uid, context['active_ids'][0], context)
#
# res['cup'] = sale_order_data.cig
# res['cig'] = sale_order_data.cup
#
# return res
def sale_order_confirmated(self, cr, uid, ids, context=None):
sale_order_obj = self.pool['sale.order']
result = super(sale_order_confirm, self).sale_order_confirmated(cr, uid, ids, context=context)
sale_order_confirm_data = self.browse(cr, uid, ids[0], context=context)
if result.get('res_id'):
sale_order_obj.write(cr, uid, result['res_id'], {
'cig': sale_order_confirm_data.cig,
'cup': sale_order_confirm_data.cup,
}, context=context)
else:
sale_order_obj.write(cr, uid, context['active_ids'][0], {
'cig': sale_order_confirm_data.cig,
'cup': sale_order_confirm_data.cup,
}, context=context)
for order in sale_order_obj.browse(cr, uid, [result.get('res_id') or context['active_ids'][0]], context=context):
# partner = self.pool['res.partner'].browse(cr, uid, order.partner_id.id)
picking_obj = self.pool['stock.picking']
picking_ids = picking_obj.search(cr, uid, [('sale_id', '=', order.id)], context=context)
for picking_id in picking_ids:
picking_obj.write(cr, uid, picking_id, {
'cig': sale_order_confirm_data.cig or '',
'cup': sale_order_confirm_data.cup or ''
}, context=context)
return result
|
calebjordan/klayout-macros | pymacros/Make Layer Cells.py | Python | mit | 2,057 | 0.017501 | # $description: Split into layer cells
# $autorun
# $show-in-menu
import pya
import sys
sys.stderr = sys.s | tdout
class MenuAction(pya.Action):
def __init__(self, title, shortcut, action):
self.title = title
self.shortcut = shortcut
self.action = action
def triggered(self):
self.action()
def make_layer_cells():
#Load Vie | w
app = pya.Application.instance()
mw = app.main_window()
lv = mw.current_view()
ly = lv.active_cellview().layout()
dbu = ly.dbu
if lv==None:
raise Exception("No view selected")
cv = lv.cellview(lv.active_cellview_index())
#Loop through layers
for layer in [1,2,3]:
new_cell = ly.create_cell(cv.cell.display_title() + "L" + str(layer))
# Loop through instances
for inst in cv.cell.each_inst():
#Calculate location of instances
itrans = pya.ICplxTrans.from_trans(pya.CplxTrans())
box = inst.bbox().transformed(itrans)
x = box.center().x
y = box.center().y
#Create new cell to represent given layer
new_subcell = ly.create_cell(inst.cell.display_title() + "L" + str(layer))
#Map Bounding box and shape layers to new layer
lm = pya.LayerMapping()
lm.map(ly.layer(1,3), ly.layer(1,3))
lm.map(ly.layer(layer, 0), ly.layer(layer, 0))
lm.map(ly.layer(layer,1), ly.layer(layer, 0))
#Create Instance Array to place into cell
array = pya.CellInstArray()
#Copy shapes, place, and insert
array.cell_index=new_subcell.cell_index()
new_subcell.copy_shapes(inst.cell, lm)
array.trans = pya.Trans(pya.Point(x,y))
new_cell.insert(array)
x = MenuAction("Make Layer Cells", "", make_layer_cells)
app = pya.Application.instance()
mw = app.main_window()
menu = mw.menu()
menu.insert_separator("@hcp_context_menu.end", "sep_layer_cells")
menu.insert_item("@hcp_context_menu.end", "layer_cells", x) |
nucular/AutobahnPython | examples/twisted/websocket/echo_variants/client.py | Python | mit | 2,386 | 0 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Tavendo GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import sys
from twisted.internet import reactor
from twisted.python import log
from autobahn.twisted.websocket import WebSocketClientProtocol, \
WebSocketClientFactory, \
connectWS
class EchoClientProtocol(WebSocketClientProtocol):
def sendHello(self):
self.sendMessage("Hello, world!".encode('utf8'))
def onOpen(self):
self.sendHello()
def onMessage(self, payload, isBinary):
if not isBinary:
print("Text message received: {}".format(payload | .decode('utf8')))
reactor.callLater(1, self.sendHello)
if __name__ == '__main__':
if len(sys.argv) < 2:
print("Need the WebSocket server address, i.e. ws://127.0.0.1:9000")
sys.exit(1)
if len(sys.argv) > 2 and sys.argv[2] == 'debug':
log.startLogging(sys.stdout) |
debug = True
else:
debug = False
factory = WebSocketClientFactory(sys.argv[1],
debug=debug,
debugCodePaths=debug)
factory.protocol = EchoClientProtocol
connectWS(factory)
reactor.run()
|
xapi-project/sm | tests/test_lvmlib.py | Python | lgpl-2.1 | 6,561 | 0 | import unittest
import mock
import lvmlib
class ExecResultMixIn(object):
def assertExecutionSucceeded(self, exec_result):
returncode, stdout, stderr = exec_result
self.assertEquals(0, returncode)
def assertExecutionFailed(self, exec_result):
returncode, stdout, stderr = exec_result
self.assertEquals(1, returncode)
class TestLVSubSystem(unittest.TestCase, ExecResultMixIn):
def test_lvcreate_is_mocked(self):
executable_injector = mock.Mock()
lvsubsystem = lvmlib.LVSubsystem(None, executable_injector)
self.assertTrue(
mock.call('/usr/sbin/lvcreate', lvsubsystem.fake_lvcreate)
in executable_injector.mock_calls
)
def test_lvremove_is_mocked(self):
executable_injector = mock.Mock()
lvsubsystem = lvmlib.LVSubsystem(None, executable_injector)
self.assertTrue(
mock.call('/usr/sbin/lvremove', lvsubsystem.fake_lvremove)
in executable_injector.mock_calls
)
def test_dmsetup_is_mocked(self):
executable_injector = mock.Mock()
lvsubsystem = lvmlib.LVSubsystem(None, executable_injector)
self.assertTrue(
mock.call('/sbin/dmsetup', lvsubsystem.fake_dmsetup)
in executable_injector.mock_calls
)
def test_add_volume_group(self):
lvsubsystem = lvmlib.LVSubsystem(None, mock.Mock())
lvsubsystem.add_volume_group('vg')
vg = lvsubsystem.get_volume_group('vg')
self.assertEquals('vg', vg.name)
def test_add_multiple_volume_groups(self):
lvsubsystem = lvmlib.LVSubsystem(None, mock.Mock())
lvsubsystem.add_volume_group('vg1')
lvsubsystem.add_volume_group('vg2')
lvsubsystem.add_volume_group('vg3')
vg1 = lvsubsystem.get_volume_group('vg1')
vg2 = lvsubsystem.get_volume_group('vg2')
vg3 = lvsubsystem.get_volume_group('vg3')
self.assertEquals('vg1', vg1.name)
self.assertEquals('vg2', vg2.name)
self.assertEquals('vg3', vg3.name)
def test_fake_lvcreate_creates_volume(self):
lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock())
vg = lvsubsystem.add_volume_group('vg')
exec_result = lvsubsystem.fake_lvcreate(
"someprog -n name -L 100 vg".split(), '')
lv, = lvsubsystem.get_logical_volumes_with_name('name')
self.assertEquals('name', lv.name)
self.assertEquals(lvsubsystem.get_volume_group('vg'), lv.volume_group)
self.assertTrue(lv.active)
self.assertTrue(lv.zeroed)
self.assertEquals(None, lv.tag)
self.assertEquals(100, lv.size_mb)
def test_fake_lvcreate_with_tags(self):
lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock())
lvsubsystem.add_volume_group('vg')
exec_result = lvsubsystem.fake_lvcreate(
"someprog -n name --addtag tagg -L 100 vg".split(), '')
lv, = lvsubsystem.get_logical_volumes_with_name('name')
self.assertEquals('tagg', lv.tag)
def test_fake_lvcreate_inactive(self):
lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock())
lvsubsystem.add_volume_group('vg')
exec_result = lvsubsystem.fake_lvcreate(
"someprog -n name --inactive -L 100 vg".split(), '')
lv, = lvsubsystem.get_logical_volumes_with_name('name')
self.assertFalse(lv.active)
def test_fake_lvcreate_non_zeroed(self):
lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock())
lvsubsystem.add_volume_group('vg')
exec_result = lvsubsystem.fake_lvcreate(
"someprog -n name --zero n -L 100 vg".split(), '')
lv, = lvsubsystem.get_logical_volumes_with_name('name')
self.assertFalse(lv.zeroed)
self.assertExecutionSucceeded(exec_result)
def test_get_the_correct_volume(self):
lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock())
lvsubsystem.add_volume_group('vg')
result1 = lvsubsystem.fake_lvcreate(
"someprog -n name1 --zero n -L 100 vg".split(), '')
result2 = lvsubsystem.fake_lvcreate(
"someprog -n name2 --zero n -L 200 vg".split(), '')
lv, = lvsubsystem.get_logical_volumes_with_name('name1')
self.assertEqual(100, lv.size_mb)
lv, = lvsubsystem.get_logical_volumes_with_name('name2')
self.assertEqual(200, lv.size_mb)
# Now remove them
lvsubsystem.fake_lvremove('someprog vg/name2'.split(), '')
def test_fake_lvcreate_called_with_wrong_params(self):
lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock())
| lvsubsystem.add_volume_group('vg')
|
exec_result = lvsubsystem.fake_lvcreate(
"someprog --something-stupid -n name n -L 100 vg".split(), '')
self.assertExecutionFailed(exec_result)
def test_fake_lvcreate_fails_if_no_volume_group_found(self):
lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock())
exec_result = lvsubsystem.fake_lvcreate(
"someprog -n name -L 100 nonexisting".split(), '')
self.assertExecutionFailed(exec_result)
def test_fake_lvremove(self):
lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock())
lvsubsystem.add_volume_group('vg')
lvsubsystem.get_volume_group('vg').add_volume('lv', 100)
exec_result = lvsubsystem.fake_lvremove(
"someprog vg/lv".split(), '')
self.assertExecutionSucceeded(exec_result)
def test_fake_lvremove_with_force(self):
lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock())
lvsubsystem.add_volume_group('vg')
lvsubsystem.get_volume_group('vg').add_volume('lv', 100)
exec_result = lvsubsystem.fake_lvremove(
"someprog -f vg/lv".split(), '')
self.assertExecutionSucceeded(exec_result)
def test_fake_lvremove_with_bad_params(self):
lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock())
lvsubsystem.add_volume_group('vg')
lvsubsystem.get_volume_group('vg').add_volume('lv', 100)
exec_result = lvsubsystem.fake_lvremove(
"someprog -f vg/lv --stupid-parameter".split(), '')
self.assertExecutionFailed(exec_result)
def test_fake_dmsetup_status_returns_zero(self):
lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock())
exec_result = lvsubsystem.fake_dmsetup(
"someprog status".split(), '')
self.assertExecutionSucceeded(exec_result)
|
motasay/twitter-sentiment | src/evaluation.py | Python | gpl-3.0 | 1,524 | 0.003937 | import logging
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.metrics import explained_variance_score
from sklearn.metrics import mean_absolute_error
from sklearn.metrics import mean_squared_error
from sklearn.metrics import r2_score
import matplotlib.pyplot as plt
def print_evaluations(Y_true, Y_pred, classification=True):
if classification:
report = classification_report(Y_true, Y_pred)
logging.info('Classification report:\n%s | ' % str(report))
cm = confusion_matrix(Y_true, Y_pred)
logging.info('Confusion Matrix:\n%s' % str(cm))
# fig = plt.figure()
# ax = fig.add_subplot(111)
# cax = ax.matshow(cm)
# fig.colorbar(cax)
#
| # ax.set_xticklabels(['']+['-1', '0', '1'])
# ax.set_yticklabels(['']+['-1', '0', '1'])
#
# plt.title('Confusion Matrix')
# plt.ylabel('True label')
# plt.xlabel('Predicted label')
# plt.show(block=False)
else:
var = explained_variance_score(Y_true, Y_pred)
logging.info('Explained variance (best=1.0): %f' % var)
mae = mean_absolute_error(Y_true, Y_pred)
logging.info('Mean absolute error (best=0.0): %f' % mae)
mse = mean_squared_error(Y_true, Y_pred)
logging.info('Mean squared error (best=0.0): %f' % mse)
r2 = r2_score(Y_true, Y_pred)
logging.info('R squared score (best=1.0): %f' % r2)
|
Quantify-world/apification | src/apification/renderers.py | Python | mit | 101 | 0.009901 | import j | son
class JSONRenderer(object):
def render(self, data):
return json. | dumps(data)
|
aawc/ProjectEuler | 05/evenlyDivisible.py | Python | mit | 1,404 | 0.026353 | #!/bin/python
import math
def SumOfFactors(f_1, f_2):
for key in f_2.keys():
if (key in f_1):
f_1[key] += f_2[key]
else:
f_1[key] = f_2[key]
return f_1;
def PrimeFactorsOfNumber(n):
sqrt_n = int(math.sqrt(n))
if (n == sqrt_n * sqrt_n):
# Perfect square
return SumOfFactors(
PrimeFactorsOfNumber(sqrt_n),
PrimeFactorsOfNumber(sqrt_n))
for i in range (2, sqrt_n+1):
if (n % i == 0):
# n isn't prime
return SumOfFactors(
PrimeFactorsOfNumber(i),
PrimeFactorsOfNumber(n/i))
# n is prime
factors = {}
| factors[n] = 1
return factors
def EvenlyDivisible(largest):
prime_factors = {};
for i in range (largest, 2, -1):
prime_factors_i = PrimeFactorsOfNumber(i)
print i, prime_factors_i
for prime_factor in prime_factors_i.keys():
value = prime_factors_i[prime_factor]
if prime_factor in prime_factors:
if value > prime_factors[prime_factor]:
prime_factors[ | prime_factor] = value
print prime_factor, value, prime_factors
else:
prime_factors[prime_factor] = value
print prime_factor, value, prime_factors
product = 1
for prime_factor in prime_factors:
value = prime_factors[prime_factor]
product *= math.pow(prime_factor, value)
return product
def main():
print EvenlyDivisible(20);
if __name__ == '__main__':
main()
|
youtube/cobalt | third_party/libvpx/tools/3D-Reconstruction/MotionEST/Exhaust.py | Python | bsd-3-clause | 8,571 | 0.007584 | ## Copyright (c) 2020 The WebM project authors. All Rights Reserved.
##
## Use of this source code is governed by a BSD-style license
## that can be found in the LICENSE file in the root of the source
## tree. An additional intellectual property rights grant can be found
## in the file PATENTS. All contributing project authors may
## be found in the AUTHORS file in the root of the source tree.
##
# coding: utf-8
import numpy as np
import numpy.linalg as LA
from Util import MSE
from MotionEST import MotionEST
"""Exhaust Search:"""
class Exhaust(MotionEST):
"""
Constructor:
cur_f: current frame
ref_f: reference frame
blk_sz: block size
wnd_size: search window size
metric: metric to compare the blocks distrotion
"""
def __init__(self, cur_f, ref_f, blk_size, wnd_size, metric=MSE):
self.name = 'exhaust'
self.wnd_sz = wnd_size
self.metric = metric
super(Exhaust, self).__init__(cur_f, ref_f, blk_size)
"""
search method:
cur_r: start row
cur_c: start column
"""
def search(self, cur_r, cur_c):
min_loss = self.block_dist(cur_r, cur_c, [0, 0], self.metric)
cur_x = cur_c * self.blk_sz
cur_y = cur_r * self.blk_sz
ref_x = cur_x
ref_y = cur_y
#search all validate positions and select the one with minimum distortion
for y in xrange(cur_y - self.wnd_sz, cur_y + self.wnd_sz):
for x in xrange(cur_x - self.wnd_sz, cur_x + self.wnd_sz):
if 0 <= x < self.width - self.blk_sz and 0 <= y < self.height - self.blk_sz:
loss = self.block_dist(cur_r, cur_c, [y - cur_y, x - cur_x],
self.metric)
if loss < min_loss:
min_loss = loss
ref_x = x
ref_y = y
return ref_x, ref_y
def motion_field_estimation(self):
for i in xrange(self.num_row):
for j in xrange(self.num_col):
ref_x, ref_y = self.search(i, j)
self.mf[i, j] = np.array(
[ref_y - i * self.blk_sz, ref_x - j * self.blk_sz])
"""Exhaust with Neighbor Constraint"""
class ExhaustNeighbor(MotionEST):
"""
Constructor:
cur_f: current frame
ref_f: reference frame
blk_sz: block size
wnd_size: search window size
beta: neigbor loss weight
metric: metric to compare the blocks distrotion
"""
def __init__(self, cur_f, ref_f, blk_size, wnd_size, beta, metric=MSE):
self.name = 'exhaust + neighbor'
self.wnd_sz = wnd_size
self.beta = beta
self.metric = metric
super(ExhaustNeighbor, self).__init__(cur_f, ref_f, blk_size)
self.assign = np.zeros((self.num_row, self.num_col), dtype=np.bool)
"""
estimate neighbor loss:
cur_r: current row
cur_c: current column
mv: current motion vector
"""
def neighborLoss(self, cur_r, cur_c, mv):
loss = 0
#accumulate difference between current block's motion vector with neighbors'
for i, j in {(-1, 0), (1, 0), (0, 1), (0, -1)}:
nb_r = cur_r + i
nb_c = cur_c + j
if 0 <= nb_r < self.num_row and 0 <= nb_c < self.num_col and self.assign[
nb_r, nb_c]:
loss += LA.norm(mv - self.mf[nb_r, nb_c])
return loss
"""
search method:
cur_r: start row
cur_c: start column
"""
def search(self, cur_r, cur_c):
dist_loss = self.block_dist(cur_r, cur_c, [0, 0], self.metric)
nb_loss = self.neighborLoss(cur_r, cur_c, np.array([0, 0]))
min_loss = dist_loss + self.beta * nb_loss
cur_x = cur_c * self.blk_sz
cur_y = cur_r * self.blk_sz
ref_x = cur_x
ref_y = cur_y
#search all validate positions and select the one with minimum distortion
# as well as weighted neighbor loss
for y in xrange(cur_y - self.wnd_sz, cur_y + self.wnd_sz):
for x in xrange(cur_x - self.wnd_sz, cur_x + self.wnd_sz):
if 0 <= x < self.width - self.blk_sz and 0 <= y < self.height - self.blk_sz:
dist_loss = self.block_dist(cur_r, cur_c, [y - cur_y, x - cur_x],
self.metric)
nb_loss = self.neighborLoss(cur_r, cur_c, [y - cur_y, x - cur_x])
loss = dist_loss + self.beta * nb_loss
if loss < min_loss:
min_loss = loss
ref_x = x
ref_y = y
return ref_x, ref_y
def motion_field_estimation(self):
for i in xrange(self.num_row):
for j in xrange(self.num_col):
ref_x, ref_y = self.search(i, j)
self.mf[i, j] = np.array(
[ref_y - i * self.blk_sz, ref_x - j * self.blk_sz])
self.assign[i, j] = True
"""Exhaust with Neighbor Constraint and Feature Score"""
class Exhau | stNeighborFeatureScore(MotionEST):
"""
Constructor:
cur_f: current frame
ref_f: reference frame
blk_sz: block size
wnd_size: search window size
| beta: neigbor loss weight
max_iter: maximum number of iterations
metric: metric to compare the blocks distrotion
"""
def __init__(self,
cur_f,
ref_f,
blk_size,
wnd_size,
beta=1,
max_iter=100,
metric=MSE):
self.name = 'exhaust + neighbor+feature score'
self.wnd_sz = wnd_size
self.beta = beta
self.metric = metric
self.max_iter = max_iter
super(ExhaustNeighborFeatureScore, self).__init__(cur_f, ref_f, blk_size)
self.fs = self.getFeatureScore()
"""
get feature score of each block
"""
def getFeatureScore(self):
fs = np.zeros((self.num_row, self.num_col))
for r in xrange(self.num_row):
for c in xrange(self.num_col):
IxIx = 0
IyIy = 0
IxIy = 0
#get ssd surface
for x in xrange(self.blk_sz - 1):
for y in xrange(self.blk_sz - 1):
ox = c * self.blk_sz + x
oy = r * self.blk_sz + y
Ix = self.cur_yuv[oy, ox + 1, 0] - self.cur_yuv[oy, ox, 0]
Iy = self.cur_yuv[oy + 1, ox, 0] - self.cur_yuv[oy, ox, 0]
IxIx += Ix * Ix
IyIy += Iy * Iy
IxIy += Ix * Iy
#get maximum and minimum eigenvalues
lambda_max = 0.5 * ((IxIx + IyIy) + np.sqrt(4 * IxIy * IxIy +
(IxIx - IyIy)**2))
lambda_min = 0.5 * ((IxIx + IyIy) - np.sqrt(4 * IxIy * IxIy +
(IxIx - IyIy)**2))
fs[r, c] = lambda_max * lambda_min / (1e-6 + lambda_max + lambda_min)
if fs[r, c] < 0:
fs[r, c] = 0
return fs
"""
do exhaust search
"""
def search(self, cur_r, cur_c):
min_loss = self.block_dist(cur_r, cur_c, [0, 0], self.metric)
cur_x = cur_c * self.blk_sz
cur_y = cur_r * self.blk_sz
ref_x = cur_x
ref_y = cur_y
#search all validate positions and select the one with minimum distortion
for y in xrange(cur_y - self.wnd_sz, cur_y + self.wnd_sz):
for x in xrange(cur_x - self.wnd_sz, cur_x + self.wnd_sz):
if 0 <= x < self.width - self.blk_sz and 0 <= y < self.height - self.blk_sz:
loss = self.block_dist(cur_r, cur_c, [y - cur_y, x - cur_x],
self.metric)
if loss < min_loss:
min_loss = loss
ref_x = x
ref_y = y
return ref_x, ref_y
"""
add smooth constraint
"""
def smooth(self, uvs, mvs):
sm_uvs = np.zeros(uvs.shape)
for r in xrange(self.num_row):
for c in xrange(self.num_col):
avg_uv = np.array([0.0, 0.0])
for i, j in {(r - 1, c), (r + 1, c), (r, c - 1), (r, c + 1)}:
if 0 <= i < self.num_row and 0 <= j < self.num_col:
avg_uv += uvs[i, j] / 6.0
for i, j in {(r - 1, c - 1), (r - 1, c + 1), (r + 1, c - 1),
(r + 1, c + 1)}:
if 0 <= i < self.num_row and 0 <= j < self.num_col:
avg_uv += uvs[i, j] / 12.0
sm_uvs[r, c] = (self.fs[r, c] * mvs[r, c] + self.beta * avg_uv) / (
self.beta + self.fs[r, c])
return sm_uvs
def motion_field_estimation(self):
#get matchin |
hanw/p4-hlir | p4_hlir/hlir/exclusive_conditions.py | Python | apache-2.0 | 8,296 | 0.006027 | # Copyright 2013-present Barefoot Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import p4
from collections import defaultdict
def _get_extracted_headers(parse_state):
extracted = set()
return extracted
# def _get_hdr_name(hdr):
# if hdr.virtual:
# return hdr.base_name
# elif hdr.index is not None:
# return hdr.base_name
# else:
# return hdr.name
def _find_parser_paths(hlir):
def _find_paths(state, paths, current_path, path_hdrs, tag_stacks_index):
try:
next_states = set(state.branch_to.values())
except:
paths.append(path_hdrs)
return
extracted_headers = set()
for call in state.call_sequence:
if call[0] == p4.parse_call.extract:
hdr = call[1]
if hdr.virtual:
base_name = hdr.base_name
current_index = tag_stacks_index[base_name]
if current_index > hdr.max_index:
paths.append(path_hdrs)
return
tag_stacks_index[base_name] += 1
name = base_name + "[%d]" % current_index
hdr = hlir.p4_header_instances[name]
extracted_headers.add(hdr)
if len(extracted_headers & path_hdrs) != 0:
paths.append(extracted_headers | path_hdrs)
return
for next_state in next_states:
_find_paths(next_state, paths, current_path + [state],
extracted_headers | path_hdrs, tag_stacks_index.copy())
paths = []
start_state = hlir.p4_parse_states["start"]
_find_paths(start_state, paths, [], set(), defaultdict(int))
return paths
def _find_compatible_headers(hlir):
def _find_rec(state, current_path, path_hdrs, compatibles):
if state in current_path: return
try:
next_states = set(state.branch_to.values())
except:
return
extracted_headers = _get_extracted_headers(state)
for hdr1, hdr2 in itertools.product(path_hdrs, extracted_headers):
compatibles.add( (hdr1, hdr2) )
compatibles.add( (hdr2, hdr1) )
for next_state in next_states:
_find_rec(next_state, current_path + [state],
path_hdrs | extracted_headers, compatibles)
compatibles = set()
start_state = hlir.p4_parse_states["start"]
_find_rec(start_state, [], set(), compatibles)
return compatibles
def _get_headers_in_condition(p4_expression, hdrs):
try:
if p4_expression.op == "valid":
hdrs.add(p4_expression.right)
_get_headers_in_condition(p4_expression.left, hdrs)
_get_headers_in_condition(p4_expression.right, hdrs)
except AttributeError:
return
class Solver():
TRUE = 0
FALSE = 1
DONT_KNOW = 2
def __init__(self, hlir):
self.hlir = hlir
# self.compatible_headers = _find_compatible_headers(hlir)
self.paths = _find_parser_paths(hlir)
self.compatible_headers = {}
self.implied_headers = {}
all_headers = set()
for _, hdr in hlir.p4_header_instances.items():
if hdr.metadata or hdr.virtual: continue
all_headers.add(hdr)
for _, hdr in hlir.p4_header_instances.items():
if hdr.metadata or hdr.virtual: continue
self.compatible_headers[hdr] = set()
self.implied_headers[hdr] = all_headers.copy()
for path in self.paths:
for hdr in path:
self.compatible_headers[hdr] |= path
self.implied_headers[hdr] &= path
# print "COMPATIBLE_HEADERS"
# for hdr, s in self.compatible_headers.items():
# print hdr, ":", [str(h) for h in s]
# print "IMPLIED_HEADERS"
# for hdr, s in self.implied_headers.items():
# print hdr, ":", [str(h) for h in s]
def _check_header_values_coherent(self, hdrs_valid):
for hdr1, hdr2 in itertools.product(hdrs_valid, repeat = 2):
if hdr2 not in self.compatible_headers[hdr1] and\
hdrs_valid[hdr1] and hdrs_valid[hdr2]:
return False
if hdr1 in self.implied_headers[hdr2] and\
hdrs_valid[hdr2] and not hdrs_valid[hdr1]:
return False
if hdr2 in self.implied_headers[hdr1] and\
hdrs_valid[hdr1] and not hdrs_valid[hdr2]:
return False
return True
def _check_condition(self, c, hdrs_valid):
if not c: return Solver.TRUE
if c.op == "valid":
if hdrs_valid[c.right]:
return Solver.TRUE
else:
return Solver.FALSE
elif c.op == "and":
left = self._check_condition(c.left, hdrs_valid)
right = self._check_condition(c.right, hdrs_valid)
if left == Solver.TRUE and right == Solver.TRUE: return Solver.TRUE
if left == Solver.FALSE or right == Solver.FALSE: return Solver.FALSE
return Solver.DONT_KNOW
elif c.op == "or":
left = self._check_condition(c.left, hdrs_valid)
right = self._check_condition(c.right, hdrs_valid)
if left == Solver.TRUE or right == Solver.TRUE: return Solver.TRUE
if left == Solver.FALSE and right == Solver.FALSE: return Solver.FALSE
return Solver.DONT_KNOW
elif c.op == "not":
right = self._check_condition(c.right, hdrs_valid)
if right == Solver.TRUE: return Solver.FALSE
if right == Solver.FALSE: return Solver.TRUE
return Solver.DONT_KNOW
return Solver.DONT_KNOW
# unknonw_cond is a condition (p4_expression) we want to evaluate
# known_conds is a list of 2-tuples (condition, value), where condition is a
# p4_expression and value the boolean value of condition
def evaluate_condition(self, dangerous_hdrs,
unknown_cond, known_conds):
used_hdrs = set()
_get_headers_in_condition(unknown_cond, used_hdrs)
if known_conds:
for c in zip(*known_conds)[0]:
_get_headers_in_condition(c, used_hdrs)
if (used_hdrs & dangerous_hdrs): return False
used_hdrs_ordered = list(used_hdrs)
used_hdrs_valid = {}
num_used_hdrs = len(used_hdrs)
result = None
for values in itertools.product([True, False], repeat = num_used_hdrs):
for idx, hdr in enumerate(used_hdrs_ordered):
used_hdrs_valid[hdr] = values[idx]
if not self._check_header_values_coherent(used_hdrs_valid): continue
violated = False
for known_c, value in known_conds:
check_c = self._check_condition(known_c, used_hdrs_valid)
if check_c == Solver.FALSE and value:
violated = True
break
elif check_c == Solver.TRUE and not value:
violated = True
break
elif check_c == Solver.DONT_KNOW:
| pass
if violated:
cont | inue
unknown_value = self._check_condition(unknown_cond, used_hdrs_valid)
if unknown_value == Solver.DONT_KNOW: return None
if result is None:
result = unknown_value
elif result != unknown_value:
return None
if result == Solver |
restran/api-gateway-dashboard | dashboard/forms.py | Python | mit | 3,021 | 0.002025 | # !/usr/bin/env python
# -*- coding: utf-8 -*-
# created by restran on 2016/1/2
from __future__ import unicode_literals, absolute_import
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import *
from common.forms import BaseModelForm
logger = logging.getLogger(__name__)
class ClientForm(BaseModelForm):
class Meta:
model = Client
fields = ('name', 'memo', 'enable', "app_id", 'secret_key',
'login_auth_url', 'access_token_ex', 'refresh_token_ex',
'sms_login_auth_url', 'change_password_url', 'sms_change_password_url')
def clean_refresh_token_ex(self):
access_token_ex = self.cleaned_data['access_token_ex']
refresh_token_ex = self.cleaned_data['refresh_token_ex']
if access_tok | en_ex >= refresh_token_ex:
raise forms.ValidationError(_('refresh_token 的过期时间不能小于 | access_token'))
return refresh_token_ex
ClientForm.base_fields.keyOrder = [
'name', 'memo', 'url', 'enable', 'app_id',
'secret_key', 'login_auth_url', 'access_token_ex',
'refresh_token_ex', 'sms_login_auth_url', 'sms_change_password_url',
'change_password_url'
]
#
# class ClientEndpointForm(BaseModelForm):
# class Meta:
# model = Client
# fields = ('name', 'memo', 'enable', 'access_key', 'secret_key')
class EndpointForm(BaseModelForm):
def __init__(self, *args, **kwargs):
super(EndpointForm, self).__init__(*args, **kwargs)
class Meta:
model = Endpoint
fields = ('name', 'is_builtin', 'url', 'unique_name', 'enable_acl', 'version',
'async_http_connect_timeout', 'async_http_request_timeout', 'enable_hmac',
'memo', 'require_login')
def clean_url(self):
is_builtin = self.cleaned_data['is_builtin']
url = self.cleaned_data['url']
if not is_builtin and (url is None or url == ''):
raise forms.ValidationError(_('Endpoint URL 不能为空'))
else:
return url
def clean_unique_name(self):
unique_name = self.cleaned_data['unique_name']
if self.instance is not None:
sites = Endpoint.objects.filter(unique_name=unique_name).values('id')
for t in sites:
if t['id'] != self.instance.id:
raise forms.ValidationError(_('已存在相同名称的 Endpoint'))
else:
sites = Endpoint.objects.filter(unique_name=unique_name).values('id')
if len(sites) > 0:
raise forms.ValidationError(_('已存在相同名称的 Endpoint'))
return unique_name
EndpointForm.base_fields.keyOrder = [
'name', 'unique_name', 'is_builtin', 'url', 'prefix_uri', 'enable_acl',
'async_http_connect_timeout', 'async_http_request_timeout', 'enable_hmac',
'memo', 'require_login']
class ACLRuleForm(BaseModelForm):
class Meta:
model = ACLRule
fields = ('re_uri', 'is_permit')
|
tta/gnuradio-tta | gr-qtgui/python/__init__.py | Python | gpl-3.0 | 944 | 0.006356 | #
# Copyright 2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING | . If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# The presence of this file turns th | is directory into a Python package
from qtgui_swig import *
import qtgui_swig as qtgui # to preserve the old interface
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.