code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/python
# Copyright 2016 Joel Dunham
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Basic command-line interface to a configured, compiled and exported OLD parser.
Usage:
$ ./parse.py wordi (wordj ... wordn)
This script is intended to be included in the .zip archive returned by an OLD application
when GET /morphologicalparsers/id/export is requested on the fully generated and
compiled morphological parser with id ``id``. It expects all requisite files for the parser
and its sub-objects (e.g., the compiled morphophonology foma script, the pickled LM Trie, the
lexicon and dictionary pickle files, if needed, etc.) as well as a configuration pickle file
(i.e., config.pickle) to be present in the current working directory.
The code for the parser functionality is all located in ``parser.py``, which is the same as
that used by an OLD web application.
Note that the included simplelm module is a somewhat modified version from that available at
<<URL>>.
"""
import os
import sys
import cPickle
# Alter the module search path so that the directory containing this script is in it.
# This is necessary for the importation of the local ``parser`` module.
script_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.append(script_dir)
import parser
if not 'PhonologyFST' in dir(parser):
# Import the *local* parser module
import imp
parser_module_path = os.path.join(script_dir, 'parser.py')
parser = imp.load_source(os.path.dirname(__file__), parser_module_path)
config_file = 'config.pickle'
config_path = os.path.join(script_dir, config_file)
config = cPickle.load(open(config_path, 'rb'))
cache_file = 'cache.pickle'
cache_path = os.path.join(script_dir, cache_file)
phonology = parser.PhonologyFST(
parent_directory = script_dir,
word_boundary_symbol = config['phonology']['word_boundary_symbol']
)
morphology = parser.MorphologyFST(
parent_directory = script_dir,
word_boundary_symbol = config['morphology']['word_boundary_symbol'],
rare_delimiter = config['morphology']['rare_delimiter'],
rich_upper = config['morphology']['rich_upper'],
rich_lower = config['morphology']['rich_lower'],
rules_generated = config['morphology']['rules_generated']
)
language_model = parser.LanguageModel(
parent_directory = script_dir,
rare_delimiter = config['language_model']['rare_delimiter'],
start_symbol = config['language_model']['start_symbol'],
end_symbol = config['language_model']['end_symbol'],
categorial = config['language_model']['categorial']
)
parser = parser.MorphologicalParser(
parent_directory = script_dir,
word_boundary_symbol = config['parser']['word_boundary_symbol'],
morpheme_delimiters = config['parser']['morpheme_delimiters'],
phonology = phonology,
morphology = morphology,
language_model = language_model,
cache = parser.Cache(path=cache_path)
)
if __name__ == '__main__':
inputs = sys.argv[1:]
for input_ in inputs:
parse = parser.pretty_parse(input_)[input_]
if parse:
print u'%s %s' % (input_, u' '.join(parse))
else:
print u'%s No parse' % input_
|
jrwdunham/old
|
onlinelinguisticdatabase/lib/parse.py
|
Python
|
apache-2.0
| 3,675
|
import os
from time import strftime
from enigma import iPlayableService, eTimer, eServiceCenter, iServiceInformation, ePicLoad
from ServiceReference import ServiceReference
from Screens.Screen import Screen
from Screens.HelpMenu import HelpableScreen
from Screens.MessageBox import MessageBox
from Screens.InputBox import InputBox
from Screens.ChoiceBox import ChoiceBox
from Screens.InfoBarGenerics import InfoBarSeek, InfoBarAudioSelection, InfoBarCueSheetSupport, InfoBarNotifications, InfoBarSubtitleSupport
from Components.ActionMap import NumberActionMap, HelpableActionMap
from Components.Label import Label
from Components.Pixmap import Pixmap,MultiPixmap
from Components.FileList import FileList
from Components.MediaPlayer import PlayList
from Components.ServicePosition import ServicePositionGauge
from Components.ServiceEventTracker import ServiceEventTracker, InfoBarBase
from Components.Playlist import PlaylistIOInternal, PlaylistIOM3U, PlaylistIOPLS
from Components.AVSwitch import AVSwitch
from Components.Harddisk import harddiskmanager
from Components.config import config
from Tools.Directories import fileExists, pathExists, resolveFilename, SCOPE_CONFIG, SCOPE_PLAYLIST, SCOPE_CURRENT_SKIN
from settings import MediaPlayerSettings
from Screens.InfoBar import MoviePlayer
import random
class ExMoviePlayer(MoviePlayer):
def __init__(self, session, service):
self.session = session
MoviePlayer.__init__(self, session, service)
self.skinName = "MoviePlayer"
MoviePlayer.WithoutStopClose = True
def doEofInternal(self, playing):
self.leavePlayer()
def leavePlayer(self):
list = ((_("Yes"), "y"), (_("No"), "n"),)
self.session.openWithCallback(self.cbDoExit, ChoiceBox, title=_("Stop playing this movie?"), list = list)
def cbDoExit(self, answer):
answer = answer and answer[1]
if answer == "y":
self.close()
class MyPlayList(PlayList):
def __init__(self):
PlayList.__init__(self)
def PlayListShuffle(self):
random.shuffle(self.list)
self.l.setList(self.list)
self.currPlaying = -1
self.oldCurrPlaying = -1
class MediaPixmap(Pixmap):
def __init__(self):
Pixmap.__init__(self)
self.coverArtFileName = ""
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.paintCoverArtPixmapCB)
self.coverFileNames = ["folder.png", "folder.jpg"]
def applySkin(self, desktop, screen):
from Tools.LoadPixmap import LoadPixmap
noCoverFile = None
if self.skinAttributes is not None:
for (attrib, value) in self.skinAttributes:
if attrib == "pixmap":
noCoverFile = value
break
if noCoverFile is None:
noCoverFile = resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/no_coverArt.png")
self.noCoverPixmap = LoadPixmap(noCoverFile)
return Pixmap.applySkin(self, desktop, screen)
def onShow(self):
Pixmap.onShow(self)
sc = AVSwitch().getFramebufferScale()
#0=Width 1=Height 2=Aspect 3=use_cache 4=resize_type 5=Background(#AARRGGBB)
self.picload.setPara((self.instance.size().width(), self.instance.size().height(), sc[0], sc[1], False, 1, "#00000000"))
def paintCoverArtPixmapCB(self, picInfo=None):
ptr = self.picload.getData()
if ptr != None:
self.instance.setPixmap(ptr.__deref__())
def updateCoverArt(self, path):
while not path.endswith("/"):
path = path[:-1]
new_coverArtFileName = None
for filename in self.coverFileNames:
if fileExists(path + filename):
new_coverArtFileName = path + filename
if self.coverArtFileName != new_coverArtFileName:
self.coverArtFileName = new_coverArtFileName
if new_coverArtFileName:
self.picload.startDecode(self.coverArtFileName)
else:
self.showDefaultCover()
def showDefaultCover(self):
self.instance.setPixmap(self.noCoverPixmap)
def embeddedCoverArt(self):
print "[embeddedCoverArt] found"
self.coverArtFileName = "/tmp/.id3coverart"
self.picload.startDecode(self.coverArtFileName)
class MediaPlayer(Screen, InfoBarBase, InfoBarSeek, InfoBarAudioSelection, InfoBarCueSheetSupport, InfoBarNotifications, InfoBarSubtitleSupport, HelpableScreen):
ALLOW_SUSPEND = True
ENABLE_RESUME_SUPPORT = True
def __init__(self, session, args = None):
Screen.__init__(self, session)
InfoBarAudioSelection.__init__(self)
InfoBarCueSheetSupport.__init__(self, actionmap = "MediaPlayerCueSheetActions")
InfoBarNotifications.__init__(self)
InfoBarBase.__init__(self)
InfoBarSubtitleSupport.__init__(self)
HelpableScreen.__init__(self)
self.summary = None
self.oldService = self.session.nav.getCurrentlyPlayingServiceReference()
self.session.nav.stopService()
self.playlistparsers = {}
self.addPlaylistParser(PlaylistIOM3U, "m3u")
self.addPlaylistParser(PlaylistIOPLS, "pls")
self.addPlaylistParser(PlaylistIOInternal, "e2pls")
# 'None' is magic to start at the list of mountpoints
defaultDir = config.mediaplayer.defaultDir.getValue()
self.filelist = FileList(defaultDir, matchingPattern = "(?i)^.*\.(mp2|mp3|ogg|ts|mts|m2ts|wav|wave|m3u|pls|e2pls|mpg|vob|avi|divx|m4v|mkv|mp4|m4a|dat|flac|flv|mov|dts|3gp|3g2|asf|wmv|wma)", useServiceRef = True, additionalExtensions = "4098:m3u 4098:e2pls 4098:pls")
self["filelist"] = self.filelist
self.playlist = MyPlayList()
self.is_closing = False
self.delname = ""
self.playlistname = ""
self["playlist"] = self.playlist
self["PositionGauge"] = ServicePositionGauge(self.session.nav)
self["currenttext"] = Label("")
self["artisttext"] = Label(_("Artist")+':')
self["artist"] = Label("")
self["titletext"] = Label(_("Title")+':')
self["title"] = Label("")
self["albumtext"] = Label(_("Album")+':')
self["album"] = Label("")
self["yeartext"] = Label(_("Year")+':')
self["year"] = Label("")
self["genretext"] = Label(_("Genre")+':')
self["genre"] = Label("")
self["coverArt"] = MediaPixmap()
self["repeat"] = MultiPixmap()
self.seek_target = None
try:
from Plugins.SystemPlugins.Hotplug.plugin import hotplugNotifier
hotplugNotifier.append(self.hotplugCB)
except Exception, ex:
print "[MediaPlayer] No hotplug support", ex
class MoviePlayerActionMap(NumberActionMap):
def __init__(self, player, contexts = [ ], actions = { }, prio=0):
NumberActionMap.__init__(self, contexts, actions, prio)
self.player = player
def action(self, contexts, action):
self.player.show()
return NumberActionMap.action(self, contexts, action)
self["OkCancelActions"] = HelpableActionMap(self, "OkCancelActions",
{
"ok": (self.ok, _("Add file to playlist")),
"cancel": (self.exit, _("Exit mediaplayer")),
}, -2)
self["MediaPlayerActions"] = HelpableActionMap(self, "MediaPlayerActions",
{
"play": (self.xplayEntry, _("Play entry")),
"pause": (self.pauseEntry, _("Pause")),
"stop": (self.stopEntry, _("Stop entry")),
"previous": (self.previousMarkOrEntry, _("Play from previous mark or playlist entry")),
"next": (self.nextMarkOrEntry, _("Play from next mark or playlist entry")),
"menu": (self.showMenu, _("Menu")),
"skipListbegin": (self.skip_listbegin, _("Jump to beginning of list")),
"skipListend": (self.skip_listend, _("Jump to end of list")),
"prevBouquet": (self.switchToPlayList, _("Switch to playlist")),
"nextBouquet": (self.switchToFileList, _("Switch to filelist")),
"delete": (self.deletePlaylistEntry, _("Delete playlist entry")),
"shift_stop": (self.clear_playlist, _("Clear playlist")),
"shift_record": (self.playlist.PlayListShuffle, _("Shuffle playlist")),
"subtitles": (self.subtitleSelection, _("Subtitle selection")),
}, -2)
self["InfobarEPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.showEventInformation, _("show event details")),
})
self["actions"] = MoviePlayerActionMap(self, ["DirectionActions"],
{
"right": self.rightDown,
"rightRepeated": self.doNothing,
"rightUp": self.rightUp,
"left": self.leftDown,
"leftRepeated": self.doNothing,
"leftUp": self.leftUp,
"up": self.up,
"upRepeated": self.up,
"upUp": self.doNothing,
"down": self.down,
"downRepeated": self.down,
"downUp": self.doNothing,
}, -2)
InfoBarSeek.__init__(self, actionmap = "MediaPlayerSeekActions")
self.onClose.append(self.delMPTimer)
self.onClose.append(self.__onClose)
self.righttimer = False
self.rightKeyTimer = eTimer()
self.rightKeyTimer.callback.append(self.rightTimerFire)
self.lefttimer = False
self.leftKeyTimer = eTimer()
self.leftKeyTimer.callback.append(self.leftTimerFire)
self.currList = "filelist"
self.isAudioCD = False
self.AudioCD_albuminfo = {}
self.cdAudioTrackFiles = []
self.onShown.append(self.applySettings)
self.playlistIOInternal = PlaylistIOInternal()
list = self.playlistIOInternal.open(resolveFilename(SCOPE_CONFIG, "playlist.e2pls"))
if list:
for x in list:
self.playlist.addFile(x.ref)
self.playlist.updateList()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedInfo: self.__evUpdatedInfo,
iPlayableService.evUser+10: self.__evAudioDecodeError,
iPlayableService.evUser+11: self.__evVideoDecodeError,
iPlayableService.evUser+12: self.__evPluginError,
iPlayableService.evUser+13: self["coverArt"].embeddedCoverArt
})
def doNothing(self):
pass
def createSummary(self):
return MediaPlayerLCDScreen
def exit(self):
self.playlistIOInternal.clear()
for x in self.playlist.list:
self.playlistIOInternal.addService(ServiceReference(x[0]))
if self.savePlaylistOnExit:
try:
self.playlistIOInternal.save(resolveFilename(SCOPE_CONFIG, "playlist.e2pls"))
except IOError:
print "couldn't save playlist.e2pls"
if config.mediaplayer.saveDirOnExit.getValue():
config.mediaplayer.defaultDir.setValue(self.filelist.getCurrentDirectory())
config.mediaplayer.defaultDir.save()
try:
from Plugins.SystemPlugins.Hotplug.plugin import hotplugNotifier
hotplugNotifier.remove(self.hotplugCB)
except:
pass
del self["coverArt"].picload
self.close()
def checkSkipShowHideLock(self):
self.updatedSeekState()
def doEofInternal(self, playing):
if playing:
self.nextEntry()
else:
self.show()
def __onClose(self):
self.session.nav.playService(self.oldService)
def __evUpdatedInfo(self):
currPlay = self.session.nav.getCurrentService()
sTagTrackNumber = currPlay.info().getInfo(iServiceInformation.sTagTrackNumber)
sTagTrackCount = currPlay.info().getInfo(iServiceInformation.sTagTrackCount)
sTagTitle = currPlay.info().getInfoString(iServiceInformation.sTagTitle)
print "[__evUpdatedInfo] title %d of %d (%s)" % (sTagTrackNumber, sTagTrackCount, sTagTitle)
self.readTitleInformation()
def __evAudioDecodeError(self):
currPlay = self.session.nav.getCurrentService()
sTagAudioCodec = currPlay.info().getInfoString(iServiceInformation.sTagAudioCodec)
print "[__evAudioDecodeError] audio-codec %s can't be decoded by hardware" % (sTagAudioCodec)
self.session.open(MessageBox, _("This STB_BOX cannot decode %s streams!") % sTagAudioCodec, type = MessageBox.TYPE_INFO,timeout = 20 )
def __evVideoDecodeError(self):
currPlay = self.session.nav.getCurrentService()
sTagVideoCodec = currPlay.info().getInfoString(iServiceInformation.sTagVideoCodec)
print "[__evVideoDecodeError] video-codec %s can't be decoded by hardware" % (sTagVideoCodec)
self.session.open(MessageBox, _("This STB_BOX cannot decode %s streams!") % sTagVideoCodec, type = MessageBox.TYPE_INFO,timeout = 20 )
def __evPluginError(self):
currPlay = self.session.nav.getCurrentService()
message = currPlay.info().getInfoString(iServiceInformation.sUser+12)
print "[__evPluginError]" , message
self.session.open(MessageBox, message, type = MessageBox.TYPE_INFO,timeout = 20 )
def delMPTimer(self):
del self.rightKeyTimer
del self.leftKeyTimer
def readTitleInformation(self):
currPlay = self.session.nav.getCurrentService()
if currPlay is not None:
sTitle = currPlay.info().getInfoString(iServiceInformation.sTagTitle)
sAlbum = currPlay.info().getInfoString(iServiceInformation.sTagAlbum)
sGenre = currPlay.info().getInfoString(iServiceInformation.sTagGenre)
sArtist = currPlay.info().getInfoString(iServiceInformation.sTagArtist)
sYear = currPlay.info().getInfoString(iServiceInformation.sTagDate)
if sTitle == "":
if not self.isAudioCD:
sTitle = currPlay.info().getName().split('/')[-1]
else:
sTitle = self.playlist.getServiceRefList()[self.playlist.getCurrentIndex()].getName()
if self.AudioCD_albuminfo:
if sAlbum == "" and "title" in self.AudioCD_albuminfo:
sAlbum = self.AudioCD_albuminfo["title"]
if sGenre == "" and "genre" in self.AudioCD_albuminfo:
sGenre = self.AudioCD_albuminfo["genre"]
if sArtist == "" and "artist" in self.AudioCD_albuminfo:
sArtist = self.AudioCD_albuminfo["artist"]
if "year" in self.AudioCD_albuminfo:
sYear = self.AudioCD_albuminfo["year"]
self.updateMusicInformation( sArtist, sTitle, sAlbum, sYear, sGenre, clear = True )
else:
self.updateMusicInformation()
def updateMusicInformation(self, artist = "", title = "", album = "", year = "", genre = "", clear = False):
self.updateSingleMusicInformation("artist", artist, clear)
self.updateSingleMusicInformation("title", title, clear)
self.updateSingleMusicInformation("album", album, clear)
self.updateSingleMusicInformation("year", year, clear)
self.updateSingleMusicInformation("genre", genre, clear)
def updateSingleMusicInformation(self, name, info, clear):
if info != "" or clear:
if self[name].getText() != info:
self[name].setText(info)
def leftDown(self):
self.lefttimer = True
self.leftKeyTimer.start(1000)
def rightDown(self):
self.righttimer = True
self.rightKeyTimer.start(1000)
def leftUp(self):
if self.lefttimer:
self.leftKeyTimer.stop()
self.lefttimer = False
self[self.currList].pageUp()
self.updateCurrentInfo()
def rightUp(self):
if self.righttimer:
self.rightKeyTimer.stop()
self.righttimer = False
self[self.currList].pageDown()
self.updateCurrentInfo()
def leftTimerFire(self):
self.leftKeyTimer.stop()
self.lefttimer = False
self.switchToFileList()
def rightTimerFire(self):
self.rightKeyTimer.stop()
self.righttimer = False
self.switchToPlayList()
def switchToFileList(self):
self.currList = "filelist"
self.filelist.selectionEnabled(1)
self.playlist.selectionEnabled(0)
self.updateCurrentInfo()
def switchToPlayList(self):
if len(self.playlist) != 0:
self.currList = "playlist"
self.filelist.selectionEnabled(0)
self.playlist.selectionEnabled(1)
self.updateCurrentInfo()
def up(self):
self[self.currList].up()
self.updateCurrentInfo()
def down(self):
self[self.currList].down()
self.updateCurrentInfo()
def showAfterSeek(self):
pass
def showAfterCuesheetOperation(self):
self.show()
def hideAfterResume(self):
self.hide()
def getIdentifier(self, ref):
if self.isAudioCD:
return ref.getName()
else:
text = ref.getPath()
return text.split('/')[-1]
# FIXME: maybe this code can be optimized
def updateCurrentInfo(self):
text = ""
if self.currList == "filelist":
idx = self.filelist.getSelectionIndex()
r = self.filelist.list[idx]
text = r[1][7]
if r[0][1] == True:
if len(text) < 2:
text += " "
if text[:2] != "..":
text = "/" + text
self.summaries.setText(text,1)
idx += 1
if idx < len(self.filelist.list):
r = self.filelist.list[idx]
text = r[1][7]
if r[0][1] == True:
text = "/" + text
self.summaries.setText(text,3)
else:
self.summaries.setText(" ",3)
idx += 1
if idx < len(self.filelist.list):
r = self.filelist.list[idx]
text = r[1][7]
if r[0][1] == True:
text = "/" + text
self.summaries.setText(text,4)
else:
self.summaries.setText(" ",4)
text = ""
if not self.filelist.canDescent():
r = self.filelist.getServiceRef()
if r is None:
return
text = r.getPath()
self["currenttext"].setText(os.path.basename(text))
if self.currList == "playlist":
t = self.playlist.getSelection()
if t is None:
return
#display current selected entry on LCD
text = self.getIdentifier(t)
self.summaries.setText(text,1)
self["currenttext"].setText(text)
idx = self.playlist.getSelectionIndex()
idx += 1
if idx < len(self.playlist):
currref = self.playlist.getServiceRefList()[idx]
text = self.getIdentifier(currref)
self.summaries.setText(text,3)
else:
self.summaries.setText(" ",3)
idx += 1
if idx < len(self.playlist):
currref = self.playlist.getServiceRefList()[idx]
text = self.getIdentifier(currref)
self.summaries.setText(text,4)
else:
self.summaries.setText(" ",4)
def ok(self):
if self.currList == "filelist":
if self.filelist.canDescent():
self.filelist.descent()
self.updateCurrentInfo()
else:
self.copyFile()
if self.currList == "playlist":
if self.playlist.getCurrentIndex() == self.playlist.getSelectionIndex():
self.hide()
else:
self.changeEntry(self.playlist.getSelectionIndex())
def showMenu(self):
menu = []
if len(self.cdAudioTrackFiles):
menu.insert(0,(_("Play audio-CD..."), "audiocd"))
if self.currList == "filelist":
if self.filelist.canDescent():
menu.append((_("Add directory to playlist"), "copydir"))
else:
menu.append((_("Add files to playlist"), "copyfiles"))
menu.append((_("Switch to playlist"), "playlist"))
if config.usage.setup_level.index >= 1: # intermediate+
menu.append((_("Delete file"), "deletefile"))
else:
menu.append((_("Switch to filelist"), "filelist"))
menu.append((_("Clear playlist"), "clear"))
menu.append((_("Delete entry"), "deleteentry"))
if config.usage.setup_level.index >= 1: # intermediate+
menu.append((_("Shuffle playlist"), "shuffle"))
menu.append((_("Hide player"), "hide"));
menu.append((_("Load playlist"), "loadplaylist"));
if config.usage.setup_level.index >= 1: # intermediate+
menu.append((_("Save playlist"), "saveplaylist"));
menu.append((_("Delete saved playlist"), "deleteplaylist"));
menu.append((_("Edit settings"), "settings"))
self.session.openWithCallback(self.menuCallback, ChoiceBox, title="", list=menu)
def menuCallback(self, choice):
if choice is None:
return
if choice[1] == "copydir":
self.copyDirectory(self.filelist.getSelection()[0])
elif choice[1] == "copyfiles":
self.copyDirectory(os.path.dirname(self.filelist.getSelection()[0].getPath()) + "/", recursive = False)
elif choice[1] == "playlist":
self.switchToPlayList()
elif choice[1] == "filelist":
self.switchToFileList()
elif choice[1] == "deleteentry":
if self.playlist.getSelectionIndex() == self.playlist.getCurrentIndex():
self.stopEntry()
self.deleteEntry()
elif choice[1] == "clear":
self.clear_playlist()
elif choice[1] == "hide":
self.hide()
elif choice[1] == "saveplaylist":
self.save_playlist()
elif choice[1] == "loadplaylist":
self.load_playlist()
elif choice[1] == "deleteplaylist":
self.delete_saved_playlist()
elif choice[1] == "shuffle":
self.playlist.PlayListShuffle()
elif choice[1] == "deletefile":
self.deleteFile()
elif choice[1] == "settings":
self.session.openWithCallback(self.applySettings, MediaPlayerSettings, self)
elif choice[1] == "audiocd":
self.playAudioCD()
def playAudioCD(self):
from enigma import eServiceReference
if len(self.cdAudioTrackFiles):
self.playlist.clear()
self.savePlaylistOnExit = False
self.isAudioCD = True
for file in self.cdAudioTrackFiles:
ref = eServiceReference(4097, 0, file)
self.playlist.addFile(ref)
try:
from Plugins.Extensions.CDInfo.plugin import Query
cdinfo = Query(self)
cdinfo.scan()
except ImportError:
pass # we can live without CDInfo
self.changeEntry(0)
self.switchToPlayList()
def applySettings(self):
self.savePlaylistOnExit = config.mediaplayer.savePlaylistOnExit.getValue()
if config.mediaplayer.repeat.getValue() == True:
self["repeat"].setPixmapNum(1)
else:
self["repeat"].setPixmapNum(0)
def showEventInformation(self):
from Screens.EventView import EventViewSimple
from ServiceReference import ServiceReference
evt = self[self.currList].getCurrentEvent()
if evt:
self.session.open(EventViewSimple, evt, ServiceReference(self.getCurrent()))
# also works on filelist (?)
def getCurrent(self):
return self["playlist"].getCurrent()
def deletePlaylistEntry(self):
if self.currList == "playlist":
if self.playlist.getSelectionIndex() == self.playlist.getCurrentIndex():
self.stopEntry()
self.deleteEntry()
def skip_listbegin(self):
if self.currList == "filelist":
self.filelist.moveToIndex(0)
else:
self.playlist.moveToIndex(0)
self.updateCurrentInfo()
def skip_listend(self):
if self.currList == "filelist":
idx = len(self.filelist.list)
self.filelist.moveToIndex(idx - 1)
else:
self.playlist.moveToIndex(len(self.playlist)-1)
self.updateCurrentInfo()
def save_playlist(self):
self.session.openWithCallback(self.save_playlist2,InputBox, title=_("Please enter filename (empty = use current date)"),windowTitle = _("Save playlist"), text=self.playlistname)
def save_playlist2(self, name):
if name is not None:
name = name.strip()
if name == "":
name = strftime("%y%m%d_%H%M%S")
self.playlistname = name
name += ".e2pls"
self.playlistIOInternal.clear()
for x in self.playlist.list:
self.playlistIOInternal.addService(ServiceReference(x[0]))
self.playlistIOInternal.save(resolveFilename(SCOPE_PLAYLIST) + name)
def load_playlist(self):
listpath = []
playlistdir = resolveFilename(SCOPE_PLAYLIST)
try:
for i in os.listdir(playlistdir):
listpath.append((i,playlistdir + i))
except IOError,e:
print "Error while scanning subdirs ",e
if config.mediaplayer.sortPlaylists.value:
listpath.sort()
self.session.openWithCallback(self.PlaylistSelected, ChoiceBox, title=_("Please select a playlist..."), list = listpath)
def PlaylistSelected(self,path):
if path is not None:
self.playlistname = path[0].rsplit('.',1)[-2]
self.clear_playlist()
extension = path[0].rsplit('.',1)[-1]
if self.playlistparsers.has_key(extension):
playlist = self.playlistparsers[extension]()
list = playlist.open(path[1])
for x in list:
self.playlist.addFile(x.ref)
self.playlist.updateList()
def delete_saved_playlist(self):
listpath = []
playlistdir = resolveFilename(SCOPE_PLAYLIST)
try:
for i in os.listdir(playlistdir):
listpath.append((i,playlistdir + i))
except IOError,e:
print "Error while scanning subdirs ",e
if config.mediaplayer.sortPlaylists.value:
listpath.sort()
self.session.openWithCallback(self.DeletePlaylistSelected, ChoiceBox, title=_("Please select a playlist to delete..."), list = listpath)
def DeletePlaylistSelected(self,path):
if path is not None:
self.delname = path[1]
self.session.openWithCallback(self.deleteConfirmed, MessageBox, _("Do you really want to delete %s?") % (path[1]))
def deleteConfirmed(self, confirmed):
if confirmed:
try:
os.remove(self.delname)
except OSError,e:
print "delete failed:", e
self.session.open(MessageBox, _("Delete failed!"), MessageBox.TYPE_ERROR)
def clear_playlist(self):
self.isAudioCD = False
self.stopEntry()
self.playlist.clear()
self.switchToFileList()
def copyDirectory(self, directory, recursive = True):
print "copyDirectory", directory
if directory == '/':
print "refusing to operate on /"
return
filelist = FileList(directory, useServiceRef = True, showMountpoints = False, isTop = True)
for x in filelist.getFileList():
if x[0][1] == True: #isDir
if recursive:
if x[0][0] != directory:
self.copyDirectory(x[0][0])
elif filelist.getServiceRef() and filelist.getServiceRef().type == 4097:
self.playlist.addFile(x[0][0])
self.playlist.updateList()
def deleteFile(self):
if self.currList == "filelist":
self.service = self.filelist.getServiceRef()
else:
self.service = self.playlist.getSelection()
if self.service is None:
return
if self.service.type != 4098 and self.session.nav.getCurrentlyPlayingServiceReference() is not None:
if self.service == self.session.nav.getCurrentlyPlayingServiceReference():
self.stopEntry()
serviceHandler = eServiceCenter.getInstance()
offline = serviceHandler.offlineOperations(self.service)
info = serviceHandler.info(self.service)
name = info and info.getName(self.service)
result = False
if offline is not None:
# simulate first
if not offline.deleteFromDisk(1):
result = True
if result == True:
self.session.openWithCallback(self.deleteConfirmed_offline, MessageBox, _("Do you really want to delete %s?") % (name))
else:
self.session.openWithCallback(self.close, MessageBox, _("You cannot delete this!"), MessageBox.TYPE_ERROR)
def deleteConfirmed_offline(self, confirmed):
if confirmed:
serviceHandler = eServiceCenter.getInstance()
offline = serviceHandler.offlineOperations(self.service)
result = False
if offline is not None:
# really delete!
if not offline.deleteFromDisk(0):
result = True
if result == False:
self.session.open(MessageBox, _("Delete failed!"), MessageBox.TYPE_ERROR)
else:
self.removeListEntry()
def removeListEntry(self):
currdir = self.filelist.getCurrentDirectory()
self.filelist.changeDir(currdir)
deleteend = False
while not deleteend:
index = 0
deleteend = True
if len(self.playlist) > 0:
for x in self.playlist.list:
if self.service == x[0]:
self.playlist.deleteFile(index)
deleteend = False
break
index += 1
self.playlist.updateList()
if self.currList == "playlist":
if len(self.playlist) == 0:
self.switchToFileList()
def copyFile(self):
if self.filelist.getServiceRef().type == 4098: # playlist
ServiceRef = self.filelist.getServiceRef()
extension = ServiceRef.getPath()[ServiceRef.getPath().rfind('.') + 1:]
if self.playlistparsers.has_key(extension):
playlist = self.playlistparsers[extension]()
list = playlist.open(ServiceRef.getPath())
for x in list:
self.playlist.addFile(x.ref)
self.playlist.updateList()
else:
self.playlist.addFile(self.filelist.getServiceRef())
self.playlist.updateList()
if len(self.playlist) == 1:
self.changeEntry(0)
def addPlaylistParser(self, parser, extension):
self.playlistparsers[extension] = parser
def nextEntry(self):
next = self.playlist.getCurrentIndex() + 1
if next < len(self.playlist):
self.changeEntry(next)
elif ( len(self.playlist) > 0 ) and ( config.mediaplayer.repeat.getValue() == True ):
self.stopEntry()
self.changeEntry(0)
def nextMarkOrEntry(self):
if not self.jumpPreviousNextMark(lambda x: x):
next = self.playlist.getCurrentIndex() + 1
if next < len(self.playlist):
self.changeEntry(next)
else:
self.doSeek(-1)
def previousMarkOrEntry(self):
if not self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True):
next = self.playlist.getCurrentIndex() - 1
if next >= 0:
self.changeEntry(next)
def deleteEntry(self):
self.playlist.deleteFile(self.playlist.getSelectionIndex())
self.playlist.updateList()
if len(self.playlist) == 0:
self.switchToFileList()
def changeEntry(self, index):
self.playlist.setCurrentPlaying(index)
self.playEntry()
def playServiceRefEntry(self, serviceref):
serviceRefList = self.playlist.getServiceRefList()
for count in range(len(serviceRefList)):
if serviceRefList[count] == serviceref:
self.changeEntry(count)
break
def xplayEntry(self):
if self.currList == "playlist":
self.playEntry()
else:
self.stopEntry()
self.playlist.clear()
self.isAudioCD = False
sel = self.filelist.getSelection()
if sel:
if sel[1]: # can descent
# add directory to playlist
self.copyDirectory(sel[0])
else:
# add files to playlist
self.copyDirectory(os.path.dirname(sel[0].getPath()) + "/", recursive = False)
if len(self.playlist) > 0:
self.changeEntry(0)
def playEntry(self, audio_extensions = frozenset((".mp2", ".mp3", ".wav", ".ogg", ".flac", ".m4a"))):
if len(self.playlist.getServiceRefList()):
needsInfoUpdate = False
currref = self.playlist.getServiceRefList()[self.playlist.getCurrentIndex()]
if self.session.nav.getCurrentlyPlayingServiceReference() is None or currref != self.session.nav.getCurrentlyPlayingServiceReference():
idx = self.playlist.getCurrentIndex()
currref = self.playlist.getServiceRefList()[idx]
text = self.getIdentifier(currref)
ext = os.path.splitext(text)[1].lower()
if ext not in audio_extensions and not self.isAudioCD:
movie = self.playlist.getServiceRefList()[self.playlist.getCurrentIndex()]
self.session.openWithCallback(self.stopEntry, ExMoviePlayer, movie)
else:
self.session.nav.playService(self.playlist.getServiceRefList()[self.playlist.getCurrentIndex()])
info = eServiceCenter.getInstance().info(currref)
description = info and info.getInfoString(currref, iServiceInformation.sDescription) or ""
self["title"].setText(description)
# display just playing musik on LCD
idx = self.playlist.getCurrentIndex()
currref = self.playlist.getServiceRefList()[idx]
text = self.getIdentifier(currref)
ext = os.path.splitext(text)[1].lower()
text = ">"+text
# FIXME: the information if the service contains video (and we should hide our window) should com from the service instead
if ext not in audio_extensions and not self.isAudioCD:
self.hide()
else:
needsInfoUpdate = True
self.summaries.setText(text,1)
# get the next two entries
idx += 1
if idx < len(self.playlist):
currref = self.playlist.getServiceRefList()[idx]
text = self.getIdentifier(currref)
self.summaries.setText(text,3)
else:
self.summaries.setText(" ",3)
idx += 1
if idx < len(self.playlist):
currref = self.playlist.getServiceRefList()[idx]
text = self.getIdentifier(currref)
self.summaries.setText(text,4)
else:
self.summaries.setText(" ",4)
else:
idx = self.playlist.getCurrentIndex()
currref = self.playlist.getServiceRefList()[idx]
text = currref.getPath()
ext = os.path.splitext(text)[1].lower()
if ext not in audio_extensions and not self.isAudioCD:
self.hide()
else:
needsInfoUpdate = True
self.unPauseService()
if needsInfoUpdate == True:
path = self.playlist.getServiceRefList()[self.playlist.getCurrentIndex()].getPath()
self["coverArt"].updateCoverArt(path)
else:
self["coverArt"].showDefaultCover()
self.readTitleInformation()
def updatedSeekState(self):
if self.seekstate == self.SEEK_STATE_PAUSE:
self.playlist.pauseFile()
elif self.seekstate == self.SEEK_STATE_PLAY:
self.playlist.playFile()
elif self.isStateForward(self.seekstate):
self.playlist.forwardFile()
elif self.isStateBackward(self.seekstate):
self.playlist.rewindFile()
def pauseEntry(self):
self.pauseService()
if self.seekstate == self.SEEK_STATE_PAUSE:
self.show()
else:
self.hide()
def stopEntry(self):
self.playlist.stopFile()
self.session.nav.playService(None)
self.updateMusicInformation(clear=True)
self.show()
def unPauseService(self):
self.setSeekState(self.SEEK_STATE_PLAY)
def subtitleSelection(self):
from Screens.AudioSelection import SubtitleSelection
self.session.open(SubtitleSelection, self)
def hotplugCB(self, dev, media_state):
if dev == harddiskmanager.getCD():
if media_state == "1":
from Components.Scanner import scanDevice
devpath = harddiskmanager.getAutofsMountpoint(harddiskmanager.getCD())
self.cdAudioTrackFiles = []
res = scanDevice(devpath)
list = [ (r.description, r, res[r], self.session) for r in res ]
if list:
(desc, scanner, files, session) = list[0]
for file in files:
if file.mimetype == "audio/x-cda":
self.cdAudioTrackFiles.append(file.path)
else:
self.cdAudioTrackFiles = []
if self.isAudioCD:
self.clear_playlist()
class MediaPlayerLCDScreen(Screen):
skin = (
"""<screen name="MediaPlayerLCDScreen" position="0,0" size="132,64" id="1">
<widget name="text1" position="4,0" size="132,35" font="Regular;16"/>
<widget name="text3" position="4,36" size="132,14" font="Regular;10"/>
<widget name="text4" position="4,49" size="132,14" font="Regular;10"/>
</screen>""",
"""<screen name="MediaPlayerLCDScreen" position="0,0" size="96,64" id="2">
<widget name="text1" position="0,0" size="96,35" font="Regular;14"/>
<widget name="text3" position="0,36" size="96,14" font="Regular;10"/>
<widget name="text4" position="0,49" size="96,14" font="Regular;10"/>
</screen>""")
def __init__(self, session, parent):
Screen.__init__(self, session)
self["text1"] = Label("Media player")
self["text3"] = Label("")
self["text4"] = Label("")
def setText(self, text, line):
if len(text) > 10:
if text[-4:] == ".mp3":
text = text[:-4]
textleer = " "
text = text + textleer*10
if line == 1:
self["text1"].setText(text)
elif line == 3:
self["text3"].setText(text)
elif line == 4:
self["text4"].setText(text)
def main(session, **kwargs):
session.open(MediaPlayer)
def menu(menuid, **kwargs):
if menuid == "mainmenu" and config.mediaplayer.onMainMenu.getValue():
return [(_("Media Player"), main, "media_player", 1)]
return []
def filescan_open(list, session, **kwargs):
from enigma import eServiceReference
mp = session.open(MediaPlayer)
mp.playlist.clear()
mp.savePlaylistOnExit = False
for file in list:
if file.mimetype == "video/MP2T":
stype = 1
else:
stype = 4097
ref = eServiceReference(stype, 0, file.path)
mp.playlist.addFile(ref)
mp.changeEntry(0)
mp.switchToPlayList()
def audioCD_open(list, session, **kwargs):
from enigma import eServiceReference
mp = session.open(MediaPlayer)
mp.cdAudioTrackFiles = [f.path for f in list]
mp.playAudioCD()
def movielist_open(list, session, **kwargs):
if not list:
# sanity
return
from enigma import eServiceReference
from Screens.InfoBar import InfoBar
f = list[0]
if f.mimetype == "video/MP2T":
stype = 1
else:
stype = 4097
if InfoBar.instance:
path = os.path.split(f.path)[0]
if not path.endswith('/'):
path += '/'
config.movielist.last_videodir.value = path
InfoBar.instance.showMovies(eServiceReference(stype, 0, f.path))
def filescan(**kwargs):
from Components.Scanner import Scanner, ScanPath
return [
Scanner(mimetypes = ["video/mpeg", "video/MP2T", "video/x-msvideo", "video/mkv"],
paths_to_scan =
[
ScanPath(path = "", with_subdirs = False),
],
name = "Movie",
description = _("Watch movies..."),
openfnc = movielist_open,
),
Scanner(mimetypes = ["video/x-vcd"],
paths_to_scan =
[
ScanPath(path = "mpegav", with_subdirs = False),
ScanPath(path = "MPEGAV", with_subdirs = False),
],
name = "Video CD",
description = _("View video CD..."),
openfnc = filescan_open,
),
Scanner(mimetypes = ["audio/mpeg", "audio/x-wav", "application/ogg", "audio/x-flac"],
paths_to_scan =
[
ScanPath(path = "", with_subdirs = False),
],
name = "Music",
description = _("Play music..."),
openfnc = filescan_open,
),
Scanner(mimetypes = ["audio/x-cda"],
paths_to_scan =
[
ScanPath(path = "", with_subdirs = False),
],
name = "Audio-CD",
description = _("Play audio-CD..."),
openfnc = audioCD_open,
),
]
from Plugins.Plugin import PluginDescriptor
def Plugins(**kwargs):
return [
#PluginDescriptor(name = _("Media player"), description = _("Play back media files"), where = PluginDescriptor.WHERE_PLUGINMENU, needsRestart = False, fnc = main),
PluginDescriptor(name = _("Media Player"), where = PluginDescriptor.WHERE_FILESCAN, needsRestart = False, fnc = filescan),
PluginDescriptor(name = _("Media Player"), description = _("Play back media files"), where = PluginDescriptor.WHERE_MENU, needsRestart = False, fnc = menu)
]
|
popazerty/dvbapp2-gui
|
lib/python/Plugins/Extensions/MediaPlayer/plugin.py
|
Python
|
gpl-2.0
| 36,514
|
#!/usr/bin/env python3.5
# -*- coding: utf-8 -*-
# Author: ChenLiang
import socket
# 定义IP加端口
ip_port = ('127.0.0.1', 9999) # 一定是元组形式
# 买通信设备
s = socket.socket()
# 关联通信设备
s.bind(ip_port)
# 开机
s.listen(5) # 最大挂起5个连接, 3.0默认为128
# 待机
conn, addr = s.accept() # 通信连接
while True:
try:
# 接收消息
recv_data = conn.recv(1024) # 接收1024字节
if str(recv_data, encoding='utf-8') == 'exit':
break
# 发送消息
send_data = recv_data.upper()
conn.send(send_data)
except Exception:
break
# 结束通话
conn.close()
|
smartczm/python-learn
|
Old-day01-10/s13-day9/Socket/while_socket/server.py
|
Python
|
gpl-2.0
| 681
|
from django.conf import settings
from django.utils.translation import gettext as _
from django.shortcuts import render, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseForbidden
from keops.contrib.base.models.ui import Menu
from keops.api import site
@login_required
def index(request, current_menu=None, context=None):
groups = None
if request.user.is_superuser:
menu = Menu.objects.filter(parent_id=None)
else:
groups = [obj.pk for obj in request.user.groups.all()]
menu = Menu.objects.filter(parent_id=None, groups__in=groups)
if current_menu is None:
current_menu = menu.first()
else:
if request.user.is_superuser:
m = Menu.objects
else:
m = Menu.objects.filter(groups__in=groups)
current_menu = m.get(pk=current_menu)
if not current_menu:
return HttpResponseForbidden('You do not have menu permissions!')
ctx = {
'_': _,
'request': request,
'user': request.user,
'groups': groups,
'menu': menu,
'settings': settings,
'current_menu': current_menu,
}
if context:
ctx.update(context)
return render(request, '/keops/web/index.html', ctx)
@login_required
def action(request, service, action_id):
if service is None:
from keops.contrib.base.models import Action
action = get_object_or_404(Action, pk=action_id)
act_cls = Action.ACTIONS[action.action_type]
action_id = get_object_or_404(act_cls, pk=action_id)
return action_id.dispatch_action(request)
svc = site.services[service]
return svc(request).dispatch_action(action_id)
|
katrid/keops
|
keops/views/web.py
|
Python
|
bsd-3-clause
| 1,743
|
#Code generator for gauss kernel
import Utils
local_size = 64
from KDECodeGenerator import GaussKernel
from KDECodeGenerator import CategoricalKernel
import copy
from KDECodeGenerator import generateTableGradientContributionKernel
from KDECodeGenerator import generateTableEstGrad
from KDECodeGenerator import generateTableObjectiveGrad
from KDECodeGenerator import generatePreamble
from KDECodeGenerator import generateGPUJKDETestWrapper
from KDECodeGenerator import generateGPUJKDELocalTraining
from JoinGraph import constructJoinGraph
import operator
def prod(iterable):
return reduce(operator.mul, iterable, 1)
def generateBinarySearchCode(f):
print >>f, """
unsigned int binarySearch(__global unsigned int* x, double val,unsigned int len){
unsigned int l = 0;
unsigned int u = len-1;
if(x[l] >= val) return 0;
if(x[u] < val) return len;
while(u - l != 1){
int c = l + (u-l)/2;
if(x[c] >= val)
u=c;
else
l=c;
}
return u;
}
"""
def generateRectKDELimit(f):
print >>f, """
double compute_rect_limit(double bw1, double bw2){
bw1 = fmax(1.0,bw1 * 3.07634);
bw2 = fmax(1.0,bw2 * 3.07634);
double obw1 = 2* ((unsigned int) (bw1+1.0)/2)+1;
double obw2 = 2* ((unsigned int) (bw2+1.0)/2)+1;
return (obw1+obw2)/2;
}
"""
def generateContKDELimit(f):
print >>f, """
double compute_cont_limit(double bw1, double bw2, double t1, double t2){
double bw_term = bw1*bw1 + bw2*bw2;
return sqrt(-2.0 * bw_term * log(sqrt(2*M_PI*bw_term)/ (t1*t2)));
}
"""
class CatKDEKernel:
def generateEstimateCode(self,f, query,base,node,stats):
ts,dvals = stats
t1,c1 = node.left_col
t2, c2 = node.right_col
icols = Utils.generateInvariantColumns(query)
print >> f, "%sT j = (1-bw_t%s_c%s)*(1-bw_t%s_c%s) + bw_t%s_c%s * bw_t%s_c%s / (%f-1.0);" % (" " * base,t1,c1,t2,c2,t1,c1,t2,c2,max(dvals[t1][c1],dvals[t2][c2]))
print >> f, "%ssum += j " % (" " * base),
if icols[t1]:
print >> f, "* c_t%s" % (t1),
if icols[t2]:
print >> f, "* c_t%s" % (t2),
print >> f, ";"
if icols[t2]:
print >> f, "%sosum += c_t%s;" % (" " * base,t2)
def generateCountCode(self,f, query,base,node,stats):
print >> f, "%ssum += 1.0;" % (" " * base)
def generatePreamble(self,f,query):
print >> f, " T sum = 0.0;"
print >> f, " T osum = 0.0;"
class ContKDEKernel:
def generateEstimateCode(self,f, query,base,node,stats):
if len(query.joins) == 1 and len(query.joins[0]) == 2:
self.generate2EstimateCode(f, query,base,node,stats)
return
icols = Utils.generateInvariantColumns(query)
for j, join in enumerate(query.joins):
print >> f, "%sunsigned int min_val_j%s = 0;" % (" " * (base), j)
for pt in join:
a, b = pt
print >> f, "%smin_val_j%s = min(min_val_j%s,val_t%s_c%s);" % (" " * (base), j, j, a, b)
for pt in join:
a, b = pt
print >> f, "%sunsigned int sval_t%s_c%s = val_t%s_c%s-min_val_j%s;" % (" " * (base),a,b,a,b,j)
print >> f, "%sT mu_sum_j%s = (0.0 " % (" " * (base), j),
for pt in join:
a, b = pt
print >> f, "+ sval_t%s_c%s / bw_t%s_c%s" % (a, b, a, b),
print >> f, ") / sigma_sum_inv_j%s;" % j
print >> f, "%sT scaled_mu_sum_j%s = 0.0" % (" " * base, j),
for pt in join:
a, b = pt
print >> f, "+ ((T)sval_t%s_c%s)*sval_t%s_c%s / bw_t%s_c%s" % (a, b, a, b, a, b),
print >> f, ";"
print >> f, "%sT j%s = exp(-0.5 *(scaled_mu_sum_j%s - mu_sum_j%s * mu_sum_j%s * sigma_sum_inv_j%s));" % (
" " * base, j, j, j, j, j)
print >> f, "%ssum += 1.0 " % (" " * base),
for j, join in enumerate(query.joins):
print >> f, "* j%s * factor_j%s * pow(0.5*M_1_PI,%s)" % (j,j,(len(join)-1)/2.0),
for i,c in enumerate(icols):
if len(c) != 0:
print >> f, "* c_t%s" % (i),
print >> f, ";"
def generate2EstimateCode(self,f, query,base,node,stats):
icols = Utils.generateJoinColumns(query)
if len(query.joins) != 1:
raise Exception("This feature was disabled due to huge ass numerical instabilities.")
join = query.joins[0]
t1,c1 = join[0]
t2,c2 = join[1]
print >> f, "%ssum += M_SQRT1_2 * M_2_SQRTPI * 0.5 / resbw * exp( -0.5 * (((T)val_t%s_c%s)-((T)val_t%s_c%s))*(((T)val_t%s_c%s)-((T)val_t%s_c%s))/(resbw*resbw)) " % (" " * base,t1,c1,t2,c2,t1,c1,t2,c2),
for i, pt in enumerate(join):
a, b = pt
print >> f, "* c_t%s" % (a),
print >> f, ";"
def generateCountCode(self,f, query,base,node,stats):
print >> f, "%ssum += 1.0;" % (" " * base),
def generatePreamble(self,f,query):
if len(query.joins) == 1 and len(query.joins[0]) == 2:
self.generate2Preamble(f, query)
return
print >> f, " T sum = 0.0;"
# Create all
for x, join in enumerate(query.joins):
for pt in join:
a, b = pt
print >> f, " bw_t%s_c%s *= bw_t%s_c%s;" % (a, b, a, b)
print >> f
for x, join in enumerate(query.joins):
print >> f, " T sigma_prod_j%s = 1.0" % x,
for pt in join:
a, b = pt
print >> f, "* bw_t%s_c%s" % (a, b),
print >> f, ";"
for x, join in enumerate(query.joins):
print >> f, " T sigma_sum_inv_j%s = 0.0" % x,
for pt in join:
a, b = pt
print >> f, "+ 1.0/(bw_t%s_c%s)" % (a, b),
print >> f, ";"
print >> f
print >> f, " T factor_j%s = sqrt(1.0/(sigma_prod_j%s*sigma_sum_inv_j%s));" % (x, x, x)
def generate2Preamble(self,f,query):
print >> f, " T sum = 0.0;"
print >> f, " T resbw = sqrt(0.0 + "
# Create all
for x, join in enumerate(query.joins):
for pt in join:
a, b = pt
print >> f, " + bw_t%s_c%s * bw_t%s_c%s" % (a, b, a, b),
print >> f, ");"
print >> f
class RectKDEKernel:
def upper_bound(self, cols):
a, b = cols.pop()
if cols:
return "min(val_t%s_c%s + ibw_t%s_c%s/2.0, %s)" % (a, b, a, b, self.upper_bound(cols))
else:
return "val_t%s_c%s + ibw_t%s_c%s/2.0" % (a, b, a, b)
def lower_bound(self, cols):
a, b = cols.pop()
if cols:
return "fmax(val_t%s_c%s - ibw_t%s_c%s/2.0, %s)" % (a, b, a, b, self.lower_bound(cols))
else:
return "val_t%s_c%s - ibw_t%s_c%s/2.0" % (a, b, a, b)
def generateEstimateCode(self,f, query,base,node,stats):
icols = Utils.generateInvariantColumns(query)
for j, join in enumerate(query.joins):
print >> f, "%sT iu_j%s = %s;" % (" " * (base), j, self.upper_bound(join[:]))
print >> f, "%sT il_j%s = %s;" % (" " * (base), j, self.lower_bound(join[:]))
print >> f, "%sT ou_j%s = iu_j%s + 1;" % (" " * (base), j, j)
print >> f, "%sT ol_j%s = il_j%s - 1;" % (" " * (base), j, j)
print >> f, "%sT j%s = 0.0;" % (" " * (base), j)
print >> f, "%sif(iu_j%s - il_j%s >= 0.0 && ou_j%s - ol_j%s > 0.0){" % (" " * (base), j, j, j, j)
base += 1
print >> f, "%sj%s += (iu_j%s - il_j%s);" % (" " * (base), j, j, j)
print >> f, "%sj%s += %s;" % (" " * (base), j, '*'.join(
map(lambda x: "(il_j%s -fmax(val_t%s_c%s-bw_t%s_c%s/2.0,ol_j%s))" % (j, x[0], x[1], x[0], x[1], j), join)))
print >> f, "%sj%s += %s;" % (" " * (base), j, '*'.join(
map(lambda x: "(min(val_t%s_c%s+bw_t%s_c%s/2.0,ou_j%s)-iu_j%s)" % (x[0], x[1], x[0], x[1], j, j), join)))
base -= 1
print >> f, "%s} else if(iu_j%s - il_j%s < 0.0 && ou_j%s - ol_j%s > 0.0) {" % (" " * (base), j, j, j, j)
base += 1
print >> f, "%sj%s = %s;" % (" " * (base), j, '*'.join(map(
lambda x: "(min(val_t%s_c%s+bw_t%s_c%s/2.0,ou_j%s)-fmax(val_t%s_c%s-bw_t%s_c%s/2.0,ol_j%s))" % (
x[0], x[1], x[0], x[1], j, x[0], x[1], x[0], x[1], j), join)))
base -= 1
print >> f, "%s}" % (" " * (base))
print >> f, "%sj%s /= %s;" % (
(" " * (base), j, '*'.join(map(lambda x: "bw_t%s_c%s" % (x[0], x[1]), join))))
print >> f, "%ssum += 1.0 " % (" " * base),
for j, join in enumerate(query.joins):
print >> f, "* j%s" % j,
for i, pt in enumerate(join):
a, b = pt
if a == 0 and len(icols[a]) > 0:
print >> f, "* c_t%s" % (a),
else:
if len(icols[a]) > 0:
print >> f, "* c_t%s" % (a),
print >> f, ";"
def generateCountCode(self,f, query,base,node,stats):
print >> f, "%ssum += 1.0;" % (" " * base),
def generatePreamble(self,f,query):
print >>f, " T sum = 0.0;"
#Create all
for x,join in enumerate(query.joins):
for pt in join:
a,b = pt
print >>f, " bw_t%s_c%s = fmax(1.0,bw_t%s_c%s * 3.07634);" % (a,b,a,b)
print >>f, " unsigned int ibw_t%s_c%s = 2* ((unsigned int) (bw_t%s_c%s+1.0)/2) - 1;" % (a,b,a,b)
print >>f, " unsigned int obw_t%s_c%s = ibw_t%s_c%s + 2;" % (a,b,a,b)
print >>f
def generateJoinEstimateKernel(f,query,estimator,stats):
print >>f, "__kernel void estimate("
icols = Utils.generateInvariantColumns(query)
jcols = Utils.generateJoinColumns(query)
_ , dvals = stats
graph = constructJoinGraph(query)
t1, c1 = graph.left_col
t2, c2 = graph.right_col
tids = graph.collectTableIDs()
pairs = graph.collectJoinPairs()
if estimator.join_kernel == "Cont":
kde = ContKDEKernel()
elif estimator.join_kernel == "Rect":
kde = RectKDEKernel()
elif estimator.join_kernel == "Cat":
kde = CatKDEKernel()
for x,t in enumerate(tids):
for jc in jcols[t]:
print >>f, " __global unsigned int* t%s_c%s," % (t,jc),
print >>f, " double bw_t%s_c%s," % (t,jc)
if icols[t]:
print >>f, " __global double* inv_t%s," % (t)
if x > 0:
print >>f, " unsigned int n_t%s," % (t)
#Here we go.
for t1,c1,t2,c2 in pairs:
print >> f, " double limit_t%s_c%s_t%s_c%s," % (t1,c1,t2,c2)
if estimator.join_kernel == "Cat":
print >> f, " double omega,"
print >>f, " __global double* contributions, unsigned int ss){"
print >> f
#We start of with table 1.
kde.generatePreamble(f,query)
print >>f, " for(unsigned int offset = 0; offset < ss; offset += get_global_size(0)){"
print >>f, " if (offset + get_global_id(0) < ss){"
graph.generateJoinEstimateKernelBottomUp(f, query, estimator)
kde.generateEstimateCode(f,query,graph.jid+1,graph,stats)
graph.generateJoinEstimateKernelTopDown(f, query)
if estimator.join_kernel == "Cat":
print >> f, " T jnone = (1.0-bw_t%s_c%s) * bw_t%s_c%s / (%f-1.0) + (1.0-bw_t%s_c%s) * bw_t%s_c%s / (%f-1.0) + bw_t%s_c%s*bw_t%s_c%s * (%f-2.0) / ((%f-1.0)*(%f-1.0));" % (t1,c1,t2,c2,dvals[t2][c2],
t2,c2,t1,c1,dvals[t1][c1],
t1,c1,t2,c2,min(dvals[t1][c1],dvals[t2][c2]),dvals[t1][c1],dvals[t2][c2])
t1, c1 = graph.left_col
print >>f, " sum += c_t%s * jnone * (omega-osum);" % (t1)
print >>f, " }"
print >>f, " }"
print >>f, " if (get_global_id(0) < ss) contributions[get_global_id(0)] = sum;"
print >>f, "}"
#Classes representing a left-deep join tree
def generateCIncludes(f):
print >>f, """
#include <iostream>
#include <string>
#include <streambuf>
#include <nlopt.h>
#include <sstream>
#include <cmath>
#include <fstream>
#include <cstdio>
#include <boost/compute/core.hpp>
#include <boost/compute/algorithm/transform.hpp>
#include <boost/compute/algorithm/sort_by_key.hpp>
#include <boost/compute/algorithm/gather.hpp>
#include <boost/compute/algorithm/inclusive_scan.hpp>
#include <boost/compute/algorithm/exclusive_scan.hpp>
#include <boost/compute/algorithm/scatter.hpp>
#include <boost/compute/algorithm/scatter_if.hpp>
#include <boost/compute/algorithm/transform.hpp>
#include <boost/compute/algorithm/reduce.hpp>
#include <boost/compute/container/vector.hpp>
#include <boost/compute/functional/math.hpp>
#include <boost/compute/iterator/counting_iterator.hpp>
namespace compute = boost::compute;
std::chrono::time_point<std::chrono::high_resolution_clock> opt_start;
"""
def generateGPUJKDEGlobalTraining(cf,query,estimator):
icols = Utils.generateInvariantColumns(query)
#Generate contribution arrays
print >>cf, " std::string training_cardinality_string = iteration_stream.str() + \"/training_join_true.dump\";"
print >>cf, " p.j_training_cardinality = readUArrayFromFile(training_cardinality_string.c_str());"
for i,indices in enumerate(icols):
if len(indices) != 0:
for j in indices:
if estimator.kernels[i][j] == "GaussRange":
print >>cf, " std::string training_j_l_t%s_c%s_string = iteration_stream.str() + \"/training_join_l_%s_%s.dump\";" % (i,j,query.tables[i].tid,query.tables[i].columns[j].cid)
print >>cf, " p.j_training_l_t%s_c%s= readUArrayFromFile(training_j_l_t%s_c%s_string.c_str());" % (i,j,i,j)
print >>cf, " std::string training_j_u_t%s_c%s_string = iteration_stream.str() + \"/training_join_u_%s_%s.dump\";" % (i,j,query.tables[i].tid,query.tables[i].columns[j].cid)
print >>cf, " p.j_training_u_t%s_c%s = readUArrayFromFile(training_j_u_t%s_c%s_string.c_str());" % (i,j,i,j)
else:
print >>cf, " std::string training_j_p_t%s_c%s_string = iteration_stream.str() + \"/training_join_p_%s_%s.dump\";" % (i,j,query.tables[i].tid,query.tables[i].columns[j].cid)
print >>cf, " p.j_training_p_t%s_c%s = readUArrayFromFile(training_j_p_t%s_c%s_string.c_str());" % (i,j,i,j)
print >>cf
print >>cf, " double* bw = (double*) calloc(%s,sizeof(double));" % (len(Utils.flatten(estimator.kernels)))
print >>cf, " double ub[%s] = {0.0};" % (len(Utils.flatten(estimator.kernels)))
print >>cf, " double lb[%s] = {0.0};" % (len(Utils.flatten(estimator.kernels)))
i = 0
for x,kernels in enumerate(estimator.kernels):
for y,kernel in enumerate(kernels):
if kernel == "GaussRange" or kernel == "GaussPoint":
print >>cf, " ub[%s] = fmax(p.bw_t%s_c%s,2.0);" % (i,x,y)
print >>cf, " lb[%s] = 0.1;" % (i)
print >>cf, " bw[%s] = 2.0;" % (i)
elif kernel == "CategoricalPoint":
print >>cf, " lb[%s] = DBL_EPSILON;" % (i)
print >>cf, " bw[%s] = 1.0/p.ss%s;" % (i,x)
print >>cf, " ub[%s] = 1.0-DBL_EPSILON;" % (i)
else:
print (y,kernel)
raise Exception("Wuut wuut?")
i += 1
print >> cf, " double minf = 0.0;"
#print >> cf, " std::string bwstr(argv[0]);"
#print >> cf, " bwstr.append(\".bw_dump\");"
#print >> cf, " if(fexists(bwstr.c_str())) bw = readDArrayFromFile(bwstr.c_str());"
i = 0
for x,kernels in enumerate(estimator.kernels):
for y,kernel in enumerate(kernels):
print >>cf, " ub[%s] = fmax(bw[%s],ub[%s]);" % (i,i,i)
i += 1
#The categorical kernel needs global optimization urgently
if estimator.join_kernel == "Cat":
print >>cf, """
nlopt_opt gopt = nlopt_create(NLOPT_GN_MLSL,%s);
nlopt_set_lower_bounds(gopt,lb);
nlopt_set_upper_bounds(gopt,ub);
nlopt_set_min_objective(gopt,obj,&p);
""" % (len(Utils.flatten(estimator.kernels)))
print >>cf
print >>cf, """
nlopt_set_maxeval(gopt, %s);
nlopt_set_ftol_rel(gopt, %s);
nlopt_opt lopt = nlopt_create(NLOPT_LN_COBYLA,%s);
nlopt_set_lower_bounds(lopt,lb);
nlopt_set_upper_bounds(lopt,ub);
nlopt_set_local_optimizer(gopt, lopt);
int grc = nlopt_optimize(gopt, bw, &minf);
assert(grc >=0);
""" % (40,"1e-10",len(Utils.flatten(estimator.kernels)))
print >>cf, " opt_start = std::chrono::high_resolution_clock::now();"
print >> cf, """
nlopt_opt opt = nlopt_create(NLOPT_LN_COBYLA,%s);
nlopt_set_lower_bounds(opt,lb);
nlopt_set_upper_bounds(opt,ub);
nlopt_set_maxeval(opt, %s);
nlopt_set_ftol_rel(opt, %s);
nlopt_set_min_objective(opt,obj,&p);
p.opt = &opt;
int frc = nlopt_optimize(opt, bw, &minf);
assert(frc >=0);
""" % (len(Utils.flatten(estimator.kernels)), 1000, "1e-5")
#print >> cf, " ddump(bw, %s, bwstr.c_str());" % len(Utils.flatten(estimator.kernels))
i=0
for x,kernels in enumerate(estimator.kernels):
for y,kernel in enumerate(kernels):
print >>cf, " p.bw_t%s_c%s = bw[%s];" % (x,y,i)
i += 1
print >>cf
def generateGPUJKDECode(i,query,estimator,stats,cu_factor):
ts, dv = stats
graph = constructJoinGraph(query)
tids = graph.collectTableIDs()
#Generate Kernels
with open("./%s_kernels.cl" % i,'w') as cf:
generatePreamble(cf)
gk = GaussKernel()
gk.pointEstimateFunction(cf)
gk.pointGradientFunction(cf)
gk.rangeEstimateFunction(cf)
gk.rangeGradientFunction(cf)
ck = CategoricalKernel()
ck.pointEstimateFunction(cf)
ck.pointGradientFunction(cf)
print >>cf, "//"
graph.generateTableEstimateKernel(cf,query,estimator,stats)
generateBinarySearchCode(cf)
generateJoinEstimateKernel(cf,query,estimator,stats)
print >>cf, "//"
#Do we need table level estimation kernels?
if estimator.bw_optimization == "local":
for j, kernels in enumerate(estimator.kernels):
generateTableGradientContributionKernel(cf,"grad_t%s" % j,kernels,dv[j])
with open("./%s_AGPUJKDE.cpp" % i,'w') as cf:
generateCIncludes(cf)
generateRectKDELimit(cf)
generateContKDELimit(cf)
generateGPUJKDEParameterArray(cf,query,estimator)
Utils.generateGPUVectorConverterFunction(cf)
Utils.generateUintFileReaderFunction(cf)
Utils.generateDoubleFileReaderFunction(cf)
Utils.generateFileCheckFunction(cf)
Utils.generateScottBWFunction(cf)
Utils.generateDoubleDumper(cf)
generateGPUJKDEEstimateFunction(cf,graph,query,estimator,prod(ts.values())**-1.0,stats,cu_factor)
generateGPUJKDETestWrapper(cf,query,estimator)
if estimator.bw_optimization == "local":
for tid,table in enumerate(query.tables):
generateTableEstGrad(cf,tid,query,estimator)
generateTableObjectiveGrad(cf,tid,query,estimator)
elif estimator.bw_optimization == "join":
generateGPUJKDEObjective(cf,query,estimator)
cols = Utils.generateInvariantColumns(query)
jcols = Utils.generateJoinColumns(query)
print >>cf, """
int main( int argc, const char* argv[] ){
parameters p;
compute::device device = compute::system::default_device();
p.ctx = compute::context(device);
p.queue=compute::command_queue(p.ctx, device);
"""
print >>cf, """
std::ifstream t("./%s_kernels.cl");
t.exceptions ( std::ifstream::failbit | std::ifstream::badbit );
std::string str((std::istreambuf_iterator<char>(t)),
std::istreambuf_iterator<char>());
""" % i
#Read table sizes and read columns into memory and transfer to device the GPU
print >>cf, " std::stringstream iteration_stream;"
print >>cf, " p.iteration = (unsigned int) atoi(argv[%s]);" % (len(query.tables)+1)
print >>cf, " iteration_stream << \"./iteration\" << std::setw(2) << std::setfill('0') << argv[%s];" % (len(query.tables)+1)
for j,t in enumerate(query.tables):
print >>cf, " p.ss%s = atoi(argv[%s]);" % (j,j+1)
print >>cf, " p.ts%s= %s;" % (j,ts[j])
for k,c in enumerate(t.columns):
print >>cf, " std::stringstream s_t%s_c%s_stream ;" % (j,k)
print >>cf, " s_t%s_c%s_stream << iteration_stream.str() << \"/sample_\" << atoi(argv[%s]) << \"_%s_%s.dump\";" % (j,k,j+1,t.tid,c.cid)
print >>cf, " std::string s_t%s_c%s_string = s_t%s_c%s_stream.str();" % (j,k,j,k)
print >>cf, " unsigned int* s_t%s_c%s = readUArrayFromFile(s_t%s_c%s_string.c_str());" % (j,k,j,k)
print >>cf, " p.s_t%s_c%s = toGPUVector(s_t%s_c%s, p.ss%s, p.ctx, p.queue);" % (j,k,j,k,j)
if estimator.kernels[j][k] == "GaussPoint" or estimator.kernels[j][k] == "GaussRange":
print >>cf, " p.bw_t%s_c%s = scott_bw(s_t%s_c%s, p.ss%s, %s);" % (j,k,j,k,j,len(query.tables))
print >>cf, " if(p.bw_t%s_c%s < 0.2) p.bw_t%s_c%s = 0.2;" % (j,k,j,k)
else:
print >>cf, " p.bw_t%s_c%s = 1.0/(1.0+1.0/%f);" % (j,k,dv[j][k]-1)
print >>cf
for t,cs in enumerate(jcols):
if cols[t]:
for c in cs:
print >> cf, " p.sr_t%s_c%s = compute::vector<unsigned int>(p.ss%s, p.ctx);" % (t,c,t)
print >> cf, " p.final_contributions = compute::vector<double>(p.ss%s, p.ctx);" % (tids[0])
print >>cf, """
compute::program pr = compute::program::create_with_source(str,p.ctx);
try{
std::ostringstream oss;
pr.build(oss.str());
} catch(const std::exception& ex){
std::cout << pr.build_log() << std::endl;
}
"""
for j,t in enumerate(query.tables):
if len(cols[j]) > 0:
print >>cf, " p.invk%s = pr.create_kernel(\"invk_t%s\");" % (j,j)
print >>cf, " p.inv_t%s = compute::vector<double>(p.ss%s, p.ctx);" % (j,j)
print >> cf, " p.invr_t%s = compute::vector<double>(p.ss%s, p.ctx);" % (j, j)
print >>cf, " p.estimate = pr.create_kernel(\"estimate\");"
print >>cf
for t, tab in enumerate(query.tables):
print >> cf, " p.map_t%s = compute::vector<unsigned int >(p.ss%s+1, p.ctx);" % (t,t)
print >> cf, " p.count_t%s = compute::vector<int >(p.ss%s+1, p.ctx);" % (t,t)
print >> cf, " p.count_t%s[0] = -1;" % t
#Prepare training
if estimator.bw_optimization == "local":
generateGPUJKDELocalTraining(cf,query,estimator,cu_factor)
elif estimator.bw_optimization == "join":
if estimator.join_kernel == "Rect":
raise Exception("This is not how optimization on join works.")
elif estimator.join_kernel == "Cont":
generateGPUJKDEGlobalTraining(cf,query,estimator)
elif estimator.join_kernel == "Cat":
generateGPUJKDEGlobalTraining(cf,query,estimator)
else:
raise Exception("I don't know this join kernel.")
else:
raise Exception("I don't know this type of join optimization.")
print >>cf, " std::string test_cardinality_string = iteration_stream.str() + \"/test_join_true.dump\";"
print >>cf, " p.j_test_cardinality = readUArrayFromFile(test_cardinality_string.c_str());"
for i,indices in enumerate(cols):
if len(indices) != 0:
for j in indices:
if estimator.kernels[i][j] == "GaussRange":
print >>cf, " std::string j_l_t%s_c%s_string = iteration_stream.str() + \"/test_join_l_%s_%s.dump\";" % (i,j,query.tables[i].tid,query.tables[i].columns[j].cid)
print >>cf, " p.j_l_t%s_c%s= readUArrayFromFile(j_l_t%s_c%s_string.c_str());" % (i,j,i,j)
print >>cf, " std::string j_u_t%s_c%s_string = iteration_stream.str() + \"/test_join_u_%s_%s.dump\";" % (i,j,query.tables[i].tid,query.tables[i].columns[j].cid)
print >>cf, " p.j_u_t%s_c%s = readUArrayFromFile(j_u_t%s_c%s_string.c_str());" % (i,j,i,j)
else:
print >>cf, " std::string j_p_t%s_c%s_string = iteration_stream.str() + \"/test_join_p_%s_%s.dump\";" % (i,j,query.tables[i].tid,query.tables[i].columns[j].cid)
print >>cf, " p.j_p_t%s_c%s = readUArrayFromFile(j_p_t%s_c%s_string.c_str());" % (i,j,i,j)
print >>cf
print >>cf, " join_test(&p);"
print >>cf, "}"
def generateGPUJKDEObjective(f,query,estimator):
icols = Utils.generateInvariantColumns(query)
print >>f, "double obj(unsigned n, const double* bw, double* grad, void* f_data){"
print >>f, " parameters* p = (parameters*) f_data;"
i = 0
for x,table in enumerate(query.tables):
for y,col in enumerate(table.columns):
print >>f, " p->bw_t%s_c%s = bw[%s];" % (x,y,i)
i += 1
print >>f, " int first = 1;"
print >>f, " double est = 0.0;"
if estimator.objective == "squared":
print >>f, " double objective = 0.0;"
elif estimator.objective == "Q":
print >>f, " double objective = 1.0;"
else:
raise Exception("I don't know this objective function.")
print >>f, " for(unsigned int i = 0; i < %s; i++){" % estimator.training
if hasattr(estimator, 'limit_opt'):
print >>f, " if(std::chrono::duration_cast<std::chrono::minutes>(std::chrono::high_resolution_clock::now()-opt_start).count() > %s)" % estimator.limit_opt
print >>f, " nlopt_force_stop(*(p->opt));"
print >>f, " if(first ",
for x, cols in enumerate(icols):
for y in cols:
if estimator.kernels[x][y] == "GaussRange":
print >>f, "|| p->j_training_u_t%s_c%s[i] != p->j_training_u_t%s_c%s[i-1]" % (x,y,x,y)
print >>f, "|| p->j_training_l_t%s_c%s[i] != p->j_training_l_t%s_c%s[i-1]" % (x,y,x,y),
else:
print >>f, "|| p->j_training_p_t%s_c%s[i] != p->j_training_p_t%s_c%s[i-1]" % (x,y,x,y),
print >> f, "){"
print >>f, " first = 0;"
print >>f, " est = join_estimate_instance(p ",
for x, cols in enumerate(icols):
for y in cols:
if estimator.kernels[x][y] == "GaussRange":
print >>f, ", p->j_training_u_t%s_c%s[i], p->j_training_l_t%s_c%s[i] " % (x,y,x,y),
else:
print >>f, ", p->j_training_p_t%s_c%s[i] " % (x,y),
print >>f, ");"
print >> f, " }"
print >>f, " unsigned int trues = p->j_training_cardinality[i];"
if estimator.objective == "squared":
print >>f, " objective += (est-trues)*(est-trues)/%s;" % estimator.training
elif estimator.objective == "Q":
print >>f, " if(est < 1.0) est = 1.0;"
print >>f, " objective *= std::pow(std::max(est/trues,trues/est),1.0/%s);" % estimator.training
else:
raise Exception("I don't know this objective function.")
print >>f, " }"
print >>f, " return objective;"
print >>f, "}"
print >>f
#Generate parameter struct that is passed to the estimation/gradient functions
def generateGPUJKDEParameterArray(f,query,estimator):
cols = Utils.generateInvariantColumns(query)
jcols = Utils.generateJoinColumns(query)
print >>f, """
typedef struct{
compute::command_queue queue;
compute::context ctx;
"""
print >>f, " unsigned int iteration;"
print >>f, " nlopt_opt* opt;"
for j,t in enumerate(query.tables):
print >>f, " size_t ss%s;" % (j)
print >>f, " unsigned int ts%s;" % (j)
if len(cols[j]) > 0:
print >>f, " compute::kernel invk%s;" % (j)
print >>f, " compute::vector<double> inv_t%s;" % (j)
print >> f, " compute::vector<double> invr_t%s;" % (j)
for k,c in enumerate(t.columns):
print >>f, " compute::vector<unsigned int> s_t%s_c%s;" % (j,k)
print >>f, " double bw_t%s_c%s;" % (j,k)
print >>f
print >>f, " compute::kernel estimate;"
for t,tab in enumerate(query.tables):
print >>f, " compute::vector<unsigned int> map_t%s;" % t
print >>f, " compute::vector<int> count_t%s;" % t
for t,_ in enumerate(jcols):
for c in jcols[t]:
print >> f, " compute::vector<unsigned int> sr_t%s_c%s;" % (t, c)
print >>f, " compute::vector<double> final_contributions;"
#Training
if estimator.bw_optimization == "local":
for tid,tab in enumerate(query.tables):
print >>f, " size_t global_t%s;" % (tid)
print >>f, " size_t local_t%s;" % (tid)
print >>f, " compute::kernel estk%s;" % (tid)
print >>f, " compute::kernel gradk%s;" % (tid)
print >>f, " compute::vector<double> est_t%s;" % (tid)
print >>f, " unsigned int* true_t%s;" % tid
for cid,col in enumerate(tab.columns):
print >>f, " compute::vector<double> grad_t%s_c%s;" % (tid,cid)
if estimator.kernels[tid][cid] == "GaussRange":
print >>f, " unsigned int* l_t%s_c%s;" % (tid,cid)
print >>f, " unsigned int* u_t%s_c%s;" % (tid,cid)
else:
print >>f, " unsigned int* p_t%s_c%s;" % (tid,cid)
print >>f
print >>f, " unsigned int* j_test_cardinality;"
for i,indices in enumerate(cols):
#Start with computing the invariant contributions
if len(indices) != 0:
for j in indices:
if estimator.kernels[i][j] == "GaussRange":
print >>f, " unsigned int* j_l_t%s_c%s;" % (i,j)
print >>f, " unsigned int* j_u_t%s_c%s;" % (i,j)
else:
print >>f, " unsigned int* j_p_t%s_c%s;" % (i,j)
if estimator.bw_optimization == "join":
print >>f, " unsigned int* j_training_cardinality;"
for i,indices in enumerate(cols):
#Start with computing the invariant contributions
if len(indices) != 0:
for j in indices:
if estimator.kernels[i][j] == "GaussRange":
print >>f, " unsigned int* j_training_l_t%s_c%s;" % (i,j)
print >>f, " unsigned int* j_training_u_t%s_c%s;" % (i,j)
else:
print >>f, " unsigned int* j_training_p_t%s_c%s;" % (i,j)
print >>f, """
} parameters;
"""
def generateGPUJKDEEstimateFunction(f, nodes, query, estimator, limit, stats, cu_factor):
icols = Utils.generateInvariantColumns(query)
jcols = Utils.generateJoinColumns(query)
ts, dv = stats
print >> f, "double join_estimate_instance(parameters* p"
for i, indices in enumerate(icols):
# Start with computing the invariant contributions
if len(indices) != 0:
for j in indices:
if estimator.kernels[i][j] == "GaussRange":
print >> f, " , unsigned int u_t%s_c%s, unsigned int l_t%s_c%s" % (i, j, i, j)
else:
print >> f, " , unsigned int p_t%s_c%s" % (i, j)
print >> f
print >> f, "){"
nodes.generateTableCode(f, query, estimator, limit, cu_factor)
if estimator.join_kernel == "Cat":
if len(query.joins) > 1 or len(query.joins[0]) != 2:
raise Exception("The categorical kernel does not support more than two joins.")
# Compute omega_2
t2, _ = nodes.right_col
print >> f, " double omega_2 = 1.0;"
if icols[t2]:
print >> f, " boost::compute::reduce(p->invr_t%s.begin(),p->invr_t%s.begin()+rss_t%s, &omega_2, p->queue);" % (
t2, t2, t2)
print >> f, " p->queue.finish();"
# Next, generate the limits
pairs = nodes.collectJoinPairs()
tids = nodes.collectTableIDs()
for t1, c1, t2, c2 in pairs:
if estimator.join_kernel == "Cont":
print >> f, " double limit_t%s_c%s_t%s_c%s = compute_cont_limit(p->bw_t%s_c%s, p->bw_t%s_c%s, p->ts%s, p->ts%s);" % (
t1, c1, t2, c2, t1, c1, t2, c2, t1, t2)
elif estimator.join_kernel == "Rect":
print >> f, " double limit_t%s_c%s_t%s_c%s = compute_rect_limit(p->bw_t%s_c%s, p->bw_t%s_c%s);" % (
t1, c1, t2, c2, t1, c1, t2, c2)
elif estimator.join_kernel == "Cat":
print >> f, " double limit_t%s_c%s_t%s_c%s = 0.0;" % (t1, c1, t2, c2)
else:
raise Exception("Unsupported join kernel.")
print >> f, " size_t local = 64;"
print >> f, " size_t global = std::min((size_t) p->ctx.get_device().compute_units()*%s , ((rss_t%s-1)/local+1)*local);" % (cu_factor,tids[0])
print >> f, " p->estimate.set_args(",
for x, t in enumerate(tids):
for jc in jcols[t]:
if icols[t]:
print >> f, " p->sr_t%s_c%s," % (t, jc),
else:
print >> f, " p->s_t%s_c%s," % (t, jc),
print >> f, " p->bw_t%s_c%s," % (t, jc)
if icols[t]:
print >> f, " p->invr_t%s," % (t)
if x > 0:
print >> f, " (unsigned int) rss_t%s," % (t)
# Here we go.
for t1, c1, t2, c2 in pairs:
print >> f, " limit_t%s_c%s_t%s_c%s," % (t1, c1, t2, c2)
if estimator.join_kernel == "Cat":
print >> f, " omega_2,"
print >> f, " p->final_contributions, (unsigned int) rss_t%s);" % tids[0]
print >> f, " p->queue.enqueue_nd_range_kernel(p->estimate,1,NULL,&(global), &(local));"
print >> f, " double est = 0.0;"
print >> f, " boost::compute::reduce(p->final_contributions.begin(), p->final_contributions.begin()+std::min(global,rss_t%s), &est, p->queue);" % (
tids[0])
print >> f, " p->queue.finish();"
for i, _ in enumerate(query.tables):
print >> f, " est *= ((double) p->ts%s)/p->ss%s;" % (i, i)
print >> f, " return est;"
print >> f, "}"
|
martinkiefer/join-kde
|
code/AggressiveKDECodeGenerator.py
|
Python
|
gpl-3.0
| 35,836
|
#!/usr/bin/env python
from __future__ import print_function
################################################################################
#
#
# drmaa_wrapper.py
#
# Copyright (C) 2013 Leo Goodstadt
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Portions of code from adapted from:
#
# http://stackoverflow.com/questions/375427/non-blocking-read-on-a-subprocess-pipe-in-python
# Courtesy of J.F. Sebastian
# Use is licensed under the "Creative Commons Attribution Share Alike license"
# See http://stackexchange.com/legal
#
#################################################################################
"""
********************************************
:mod:`ruffus.cmdline` -- Overview
********************************************
.. moduleauthor:: Leo Goodstadt <ruffus@llew.org.uk>
#
# Using drmaa
#
from ruffus import *
import drmaa_wrapper
"""
import sys, os
import stat
#
# tempfile for drmaa scripts
#
import tempfile
import datetime
import subprocess
import time
import sys
import subprocess
import threading
try:
from Queue import Queue, Empty
except ImportError:
from queue import Queue, Empty # python 3.x
ON_POSIX = 'posix' in sys.builtin_module_names
if sys.hexversion >= 0x03000000:
# everything is unicode in python3
path_str_type = str
else:
path_str_type = basestring
#_________________________________________________________________________________________
# error_drmaa_job
#_________________________________________________________________________________________
class error_drmaa_job(Exception):
"""
All exceptions throw in this module
"""
def __init__(self, *errmsg):
Exception.__init__(self, *errmsg)
#_________________________________________________________________________________________
# read_stdout_stderr_from_files
#_________________________________________________________________________________________
def read_stdout_stderr_from_files( stdout_path, stderr_path, logger = None, cmd_str = "", tries=5):
"""
Reads the contents of two specified paths and returns the strings
Thanks to paranoia approach contributed by Andreas Heger:
Retry just in case file system hasn't committed.
Logs error if files are missing: No big deal?
Cleans up files afterwards
Returns tuple of stdout and stderr.
"""
#
# delay up to 10 seconds until files are ready
#
for xxx in range(tries):
if os.path.exists( stdout_path ) and os.path.exists( stderr_path ):
break
time.sleep(2)
try:
stdout = open( stdout_path, "r" ).readlines()
except IOError:
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
msg = str(exceptionValue)
if logger:
logger.warning( "could not open stdout: %s for \n%s" % (msg, cmd_str))
stdout = []
try:
stderr = open( stderr_path, "r" ).readlines()
except IOError:
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
msg = str(exceptionValue)
if logger:
logger.warning( "could not open stderr: %s for \n%s" % (msg, cmd_str))
stderr = []
#
# cleanup ignoring errors
#
try:
os.unlink( stdout_path )
os.unlink( stderr_path )
except OSError:
pass
return stdout, stderr
#_________________________________________________________________________________________
# setup_drmaa_job
#_________________________________________________________________________________________
def setup_drmaa_job( drmaa_session, job_name, job_environment, working_directory, job_other_options):
job_template = drmaa_session.createJobTemplate()
if not working_directory:
job_template.workingDirectory = os.getcwd()
else:
job_template.workingDirectory = working_directory
if job_environment:
# dictionary e.g. { 'BASH_ENV' : '~/.bashrc' }
job_template.jobEnvironment = job_environment
job_template.args = []
if job_name:
job_template.jobName = job_name
else:
# nameless jobs sometimes breaks drmaa implementations...
job_template.jobName = "ruffus_job_" + "_".join(map(str, datetime.datetime.now().timetuple()[0:6]))
#
# optional job parameters
#
job_template.nativeSpecification = job_other_options
# separate stdout and stderr
job_template.joinFiles=False
return job_template
#_________________________________________________________________________________________
# write_job_script_to_temp_file
#_________________________________________________________________________________________
def write_job_script_to_temp_file( cmd_str, job_script_directory, job_name, job_other_options, job_environment, working_directory):
'''
returns (job_script_path, stdout_path, stderr_path)
'''
import sys
time_stmp_str = "_".join(map(str, datetime.datetime.now().timetuple()[0:6]))
# create script directory if necessary
# Ignore errors rather than test for existence to avoid race conditions
try:
os.makedirs(job_script_directory)
except:
pass
tmpfile = tempfile.NamedTemporaryFile(mode='w', prefix='drmaa_script_' + time_stmp_str + "__", dir = job_script_directory, delete = False)
#
# hopefully #!/bin/sh is universally portable among unix-like operating systems
#
tmpfile.write( "#!/bin/sh\n" )
#
# log parameters as suggested by Bernie Pope
#
for title, parameter in ( ("job_name", job_name, ),
("job_other_options", job_other_options,),
("job_environment", job_environment, ),
("working_directory", working_directory), ):
if parameter:
tmpfile.write( "#%s=%s\n" % (title, parameter))
tmpfile.write( cmd_str + "\n" )
tmpfile.close()
job_script_path = os.path.abspath( tmpfile.name )
stdout_path = job_script_path + ".stdout"
stderr_path = job_script_path + ".stderr"
os.chmod( job_script_path, stat.S_IRWXG | stat.S_IRWXU )
return (job_script_path, stdout_path, stderr_path)
#_________________________________________________________________________________________
# run_job_using_drmaa
#_________________________________________________________________________________________
def run_job_using_drmaa (cmd_str, job_name = None, job_other_options = "", job_script_directory = None, job_environment = None, working_directory = None, retain_job_scripts = False, logger = None, drmaa_session = None, verbose = 0):
"""
Runs specified command remotely using drmaa,
either with the specified session, or the module shared drmaa session
"""
import drmaa
#
# used specified session else module session
#
if drmaa_session is None:
raise error_drmaa_job( "Please specify a drmaa_session in run_job()")
#
# make job template
#
job_template = setup_drmaa_job( drmaa_session, job_name, job_environment, working_directory, job_other_options)
#
# make job script
#
if not job_script_directory:
job_script_directory = os.getcwd()
job_script_path, stdout_path, stderr_path = write_job_script_to_temp_file( cmd_str, job_script_directory, job_name, job_other_options, job_environment, working_directory)
job_template.remoteCommand = job_script_path
# drmaa paths specified as [hostname]:file_path.
# See http://www.ogf.org/Public_Comment_Docs/Documents/2007-12/ggf-drmaa-idl-binding-v1%2000%20RC7.pdf
job_template.outputPath = ":" + stdout_path
job_template.errorPath = ":" + stderr_path
#
# Run job and wait
#
jobid = drmaa_session.runJob(job_template)
if logger:
logger.debug( "job has been submitted with jobid %s" % str(jobid ))
try:
job_info = drmaa_session.wait(jobid, drmaa.Session.TIMEOUT_WAIT_FOREVER)
except Exception:
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
msg = str(exceptionValue)
# ignore message 24 in PBS
# code 24: drmaa: Job finished but resource usage information and/or termination status could not be provided.":
if not msg.startswith("code 24"): raise
if logger:
logger.info("Warning %s\n"
"The original command was:\n%s\njobid=jobid\n"
(msg.message, cmd_str,jobid) )
job_info = None
#
# Read output
#
stdout, stderr = read_stdout_stderr_from_files( stdout_path, stderr_path, logger, cmd_str)
job_info_str = ("The original command was: >> %s <<\n"
"The jobid was: %s\n"
"The job script name was: %s\n" %
(cmd_str,
jobid,
job_script_path))
def stderr_stdout_to_str (stderr, stdout):
"""
Concatenate stdout and stderr to string
"""
result = ""
if stderr:
result += "The stderr was: \n%s\n\n" % ("".join( stderr))
if stdout:
result += "The stdout was: \n%s\n\n" % ("".join( stdout))
return result
#
# Throw if failed
#
if job_info:
job_info_str += "Resources used: %s " % (job_info.resourceUsage)
if job_info.wasAborted:
raise error_drmaa_job( "The drmaa command was never ran but used %s:\n%s"
% (job_info.exitStatus, job_info_str + stderr_stdout_to_str (stderr, stdout)))
elif job_info.hasSignal:
raise error_drmaa_job( "The drmaa command was terminated by signal %i:\n%s"
% (job_info.exitStatus, job_info_str + stderr_stdout_to_str (stderr, stdout)))
elif job_info.hasExited:
if job_info.exitStatus:
raise error_drmaa_job( "The drmaa command was terminated by signal %i:\n%s"
% (job_info.exitStatus, job_info_str + stderr_stdout_to_str (stderr, stdout)))
#
# Decorate normal exit with some resource usage information
#
elif verbose:
def nice_mem_str(num):
"""
Format memory sizes
http://stackoverflow.com/questions/1094841/reusable-library-to-get-human-readable-version-of-file-size
"""
num = float(num)
for x in ['bytes','KB','MB','GB']:
if num < 1024.0:
return "%3.1f%s" % (num, x)
num /= 1024.0
return "%3.1f%s" % (num, 'TB')
try:
resource_usage_str = []
if 'maxvmem' in job_info.resourceUsage:
if 'mem' in job_info.resourceUsage:
resource_usage_str.append("Mem=%s(%s)" % (nice_mem_str(job_info.resourceUsage['maxvmem']), job_info.resourceUsage['mem']))
else:
resource_usage_str.append("Mem=%s" % nice_mem_str(job_info.resourceUsage['maxvmem']))
if 'ru_wallclock' in job_info.resourceUsage:
resource_usage_str.append("CPU wallclock= %.2gs" % float(job_info.resourceUsage['ru_wallclock']))
if len(resource_usage_str):
logger.info("Drmaa command used %s in running %s" % (", ".join(resource_usage_str), cmd_str))
else:
logger.info("Drmaa command successfully ran %s" % cmd_str)
except:
logger.info("Drmaa command used %s in running %s" % (job_info.resourceUsage, cmd_str))
#
# clean up job template
#
drmaa_session.deleteJobTemplate(job_template)
#
# Cleanup job script unless retain_job_scripts is set
#
if retain_job_scripts:
# job scripts have the jobid as an extension
os.rename(job_script_path, job_script_path + ".%s" % jobid )
else:
try:
os.unlink( job_script_path )
except OSError:
if logger:
logger.warning( "Temporary job script wrapper '%s' missing (and ignored) at clean-up" % job_script_path )
return stdout, stderr
def enqueue_output(out, queue, echo):
for line in iter(out.readline, ''):
queue.put(line)
if echo is not None:
echo.write(line)
echo.flush()
out.close()
#_________________________________________________________________________________________
# run_job_locally
#_________________________________________________________________________________________
def run_job_locally (cmd_str, logger = None, job_environment = None, working_directory = None, local_echo = False):
"""
Runs specified command locally instead of drmaa
"""
popen_params = {"args" : cmd_str,
"cwd" : working_directory if working_directory is not None else os.getcwd(),
"shell" : True,
"stdin" : subprocess.PIPE,
"stdout" : subprocess.PIPE,
"stderr" : subprocess.PIPE,
"bufsize" :1,
"universal_newlines" : True,
"close_fds" : ON_POSIX}
if job_environment is not None:
popen_params["env"] = job_environment
process = subprocess.Popen( **popen_params )
stderrQ = Queue()
stdoutQ = Queue()
stdout_t = threading.Thread(target=enqueue_output, args=(process.stdout, stdoutQ, sys.stdout if local_echo else None))
stderr_t = threading.Thread(target=enqueue_output, args=(process.stderr, stderrQ, sys.stderr if local_echo else None))
# if daemon = False, sub process cannot be interrupted by Ctrl-C
stdout_t.daemon = True
stderr_t.daemon = True
stdout_t.start()
stderr_t.start()
process.wait()
stdout_t.join()
stderr_t.join()
process.stdin.close()
process.stdout.close()
process.stderr.close()
stdout, stderr = [], []
try:
while True:
stdout.append(stdoutQ.get(False))
except:
pass
try:
while True:
stderr.append(stderrQ.get(False))
except:
pass
if process.returncode != 0:
raise error_drmaa_job( "The locally run command was terminated by signal %i:\n"
"The original command was:\n%s\n"
"The stderr was: \n%s\n\n"
"The stdout was: \n%s\n\n" %
(-process.returncode, cmd_str, "".join(stderr), "".join(stdout)) )
return stdout, stderr
#_________________________________________________________________________________________
# touch_output_files
#_________________________________________________________________________________________
def touch_output_files (cmd_str, output_files, logger = None):
"""
Touches output files instead of actually running the command string
"""
if not output_files or not len(output_files):
if logger:
logger.debug("No output files to 'touch' for command:\n%s")
return
# make sure is list
ltypes=(list, tuple)
if not isinstance(output_files, ltypes):
output_files = [output_files]
else:
output_files = list(output_files)
#
# flatten list of file names
# from http://rightfootin.blogspot.co.uk/2006/09/more-on-python-flatten.html
#
i = 0
while i < len(output_files):
while isinstance(output_files[i], ltypes):
if not output_files[i]:
output_files.pop(i)
i -= 1
break
else:
output_files[i:i + 1] = output_files[i]
i += 1
for f in output_files:
# ignore non strings
if not isinstance (f, path_str_type):
continue
# create file
if not os.path.exists(f):
# this should be guaranteed to close the new file immediately?
with open(f, 'w') as p: pass
# touch existing file
else:
os.utime(f, None)
#_________________________________________________________________________________________
# run_job
#_________________________________________________________________________________________
def run_job(cmd_str, job_name = None, job_other_options = None, job_script_directory = None,
job_environment = None, working_directory = None, logger = None,
drmaa_session = None, retain_job_scripts = False,
run_locally = False, output_files = None, touch_only = False, verbose = 0, local_echo = False):
"""
Runs specified command either using drmaa, or locally or only in simulation (touch the output files only)
"""
if touch_only:
touch_output_files (cmd_str, output_files, logger)
return "","",
if run_locally:
return run_job_locally (cmd_str, logger, job_environment, working_directory, local_echo)
return run_job_using_drmaa (cmd_str, job_name, job_other_options, job_script_directory, job_environment, working_directory, retain_job_scripts, logger, drmaa_session, verbose)
|
magosil86/ruffus
|
ruffus/drmaa_wrapper.py
|
Python
|
mit
| 18,668
|
from django.conf.urls import url, include
from . import views
from .vi import vi_views
app_name='polls'
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^welcome$', views.welcome, name='welcome'),
url(r'^login$', views.my_login, name='login'),
url(r'^logout$', views.my_logout, name='logout'),
#url(r'^(?P<pk>[0-9]+)/$', views.DetailView.as_view(), name='detail'),
#url(r'^(?P<pk>[0-9]+)/results/$', views.ResultsView.as_view(), name='results'),
#url(r'^(?P<question_id>[0-9]+)/vote/$', views.vote, name='vote'),
#url(r'^index1/$', views.index1, name='index1'),
url(r'^vi/$', vi_views.index, name='index'),
]
|
saulario/pruebas
|
python/mysite/polls/urls.py
|
Python
|
gpl-3.0
| 689
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import runpy
runpy.run_module('announce', run_name='__main__', alter_sys=True)
|
DonJayamanne/pythonVSCode
|
news/__main__.py
|
Python
|
mit
| 175
|
"""
Django settings for CoPub project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3fj4z_r+986))9bq78ge&+^6_5dy70$ea4sihenrnu7mi63*=-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'PubMed.apps.PubmedConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'CoPub.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'CoPub.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_ROOT = 'PubMed/static/'
STATIC_URL = '/static/'
|
sh4rkman/CoPub
|
CoPub/settings.py
|
Python
|
mit
| 3,162
|
#!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file license.txt or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 8332
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
|
liamcoau/WATCoin
|
watcoin/contrib/pyminer/pyminer.py
|
Python
|
apache-2.0
| 6,438
|
# -*- coding: utf-8 -*-
from openerp.http import request, STATIC_CACHE
from openerp.addons.web import http
import json
import io
from PIL import Image, ImageFont, ImageDraw
from openerp import tools
import cStringIO
import werkzeug.wrappers
import time
import logging
logger = logging.getLogger(__name__)
class Web_Editor(http.Controller):
#------------------------------------------------------
# Backend snippet
#------------------------------------------------------
@http.route('/web_editor/snippets', type='json', auth="user")
def snippets(self, **kwargs):
return request.env.ref('web_editor.snippets').render(None)
#------------------------------------------------------
# Backend html field
#------------------------------------------------------
@http.route('/web_editor/field/html', type='http', auth="user")
def FieldTextHtml(self, model=None, res_id=None, field=None, callback=None, **kwargs):
cr, uid, context = request.cr, request.uid, request.context
kwargs.update(
model=model,
res_id=res_id,
field=field,
datarecord=json.loads(kwargs['datarecord']),
debug='debug' in kwargs)
for k in kwargs:
if isinstance(kwargs[k], basestring) and kwargs[k].isdigit():
kwargs[k] = int(kwargs[k])
trans = dict(
lang=kwargs.get('lang', context.get('lang')),
translatable=kwargs.get('translatable'),
edit_translations=kwargs.get('edit_translations'),
editable=kwargs.get('enable_editor'))
context.update(trans)
kwargs.update(trans)
record = None
if model and kwargs.get('res_id'):
record = request.registry[model].browse(cr, uid, kwargs.get('res_id'), context)
kwargs.update(content=record and getattr(record, field) or "")
return request.render(kwargs.get("template") or "web_editor.FieldTextHtml", kwargs, uid=request.uid)
#------------------------------------------------------
# Backend html field in inline mode
#------------------------------------------------------
@http.route('/web_editor/field/html/inline', type='http', auth="user")
def FieldTextHtmlInline(self, model=None, res_id=None, field=None, callback=None, **kwargs):
kwargs['inline_mode'] = True
kwargs['dont_load_assets'] = not kwargs.get('enable_editor') and not kwargs.get('edit_translations')
return self.FieldTextHtml(model, res_id, field, callback, **kwargs)
#------------------------------------------------------
# convert font into picture
#------------------------------------------------------
@http.route([
'/web_editor/font_to_img/<icon>',
'/web_editor/font_to_img/<icon>/<color>',
'/web_editor/font_to_img/<icon>/<color>/<int:size>',
'/web_editor/font_to_img/<icon>/<color>/<int:size>/<int:alpha>',
], type='http', auth="none")
def export_icon_to_png(self, icon, color='#000', size=100, alpha=255, font='/web/static/lib/fontawesome/fonts/fontawesome-webfont.ttf'):
""" This method converts an unicode character to an image (using Font
Awesome font by default) and is used only for mass mailing because
custom fonts are not supported in mail.
:param icon : decimal encoding of unicode character
:param color : RGB code of the color
:param size : Pixels in integer
:param alpha : transparency of the image from 0 to 255
:param font : font path
:returns PNG image converted from given font
"""
# Make sure we have at least size=1
size = max(1, size)
# Initialize font
addons_path = http.addons_manifest['web']['addons_path']
font_obj = ImageFont.truetype(addons_path + font, size)
# if received character is not a number, keep old behaviour (icon is character)
icon = unichr(int(icon)) if icon.isdigit() else icon
# Determine the dimensions of the icon
image = Image.new("RGBA", (size, size), color=(0, 0, 0, 0))
draw = ImageDraw.Draw(image)
boxw, boxh = draw.textsize(icon, font=font_obj)
draw.text((0, 0), icon, font=font_obj)
left, top, right, bottom = image.getbbox()
# Create an alpha mask
imagemask = Image.new("L", (boxw, boxh), 0)
drawmask = ImageDraw.Draw(imagemask)
drawmask.text((-left, -top), icon, font=font_obj, fill=alpha)
# Create a solid color image and apply the mask
if color.startswith('rgba'):
color = color.replace('rgba', 'rgb')
color = ','.join(color.split(',')[:-1])+')'
iconimage = Image.new("RGBA", (boxw, boxh), color)
iconimage.putalpha(imagemask)
# Create output image
outimage = Image.new("RGBA", (boxw, size), (0, 0, 0, 0))
outimage.paste(iconimage, (left, top))
# output image
output = io.BytesIO()
outimage.save(output, format="PNG")
response = werkzeug.wrappers.Response()
response.mimetype = 'image/png'
response.data = output.getvalue()
response.headers['Cache-Control'] = 'public, max-age=604800'
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'GET, POST'
response.headers['Connection'] = 'close'
response.headers['Date'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime())
response.headers['Expires'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime(time.time()+604800*60))
return response
#------------------------------------------------------
# add attachment (images or link)
#------------------------------------------------------
@http.route('/web_editor/attachment/add', type='http', auth='user', methods=['POST'])
def attach(self, func, upload=None, url=None, disable_optimization=None, **kwargs):
# the upload argument doesn't allow us to access the files if more than
# one file is uploaded, as upload references the first file
# therefore we have to recover the files from the request object
Attachments = request.registry['ir.attachment'] # registry for the attachment table
uploads = []
message = None
if not upload: # no image provided, storing the link and the image name
name = url.split("/").pop() # recover filename
attachment_id = Attachments.create(request.cr, request.uid, {
'name': name,
'type': 'url',
'url': url,
'public': True,
'res_model': 'ir.ui.view',
}, request.context)
uploads += Attachments.read(request.cr, request.uid, [attachment_id], ['name', 'mimetype', 'checksum', 'url'], request.context)
else: # images provided
try:
attachment_ids = []
for c_file in request.httprequest.files.getlist('upload'):
data = c_file.read()
try:
image = Image.open(cStringIO.StringIO(data))
w, h = image.size
if w*h > 42e6: # Nokia Lumia 1020 photo resolution
raise ValueError(
u"Image size excessive, uploaded images must be smaller "
u"than 42 million pixel")
if not disable_optimization and image.format in ('PNG', 'JPEG'):
data = tools.image_save_for_web(image)
except IOError, e:
pass
attachment_id = Attachments.create(request.cr, request.uid, {
'name': c_file.filename,
'datas': data.encode('base64'),
'datas_fname': c_file.filename,
'public': True,
'res_model': 'ir.ui.view',
}, request.context)
attachment_ids.append(attachment_id)
uploads += Attachments.read(request.cr, request.uid, attachment_ids, ['name', 'mimetype', 'checksum', 'url'], request.context)
except Exception, e:
logger.exception("Failed to upload image to attachment")
message = unicode(e)
return """<script type='text/javascript'>
window.parent['%s'](%s, %s);
</script>""" % (func, json.dumps(uploads), json.dumps(message))
#------------------------------------------------------
# remove attachment (images or link)
#------------------------------------------------------
@http.route('/web_editor/attachment/remove', type='json', auth='user')
def remove(self, ids, **kwargs):
""" Removes a web-based image attachment if it is used by no view (template)
Returns a dict mapping attachments which would not be removed (if any)
mapped to the views preventing their removal
"""
cr, uid, context = request.cr, request.uid, request.context
Attachment = request.registry['ir.attachment']
Views = request.registry['ir.ui.view']
attachments_to_remove = []
# views blocking removal of the attachment
removal_blocked_by = {}
for attachment in Attachment.browse(cr, uid, ids, context=context):
# in-document URLs are html-escaped, a straight search will not
# find them
url = tools.html_escape(attachment.local_url)
ids = Views.search(cr, uid, ["|", ('arch_db', 'like', '"%s"' % url), ('arch_db', 'like', "'%s'" % url)], context=context)
if ids:
removal_blocked_by[attachment.id] = Views.read(
cr, uid, ids, ['name'], context=context)
else:
attachments_to_remove.append(attachment.id)
if attachments_to_remove:
Attachment.unlink(cr, uid, attachments_to_remove, context=context)
return removal_blocked_by
|
ChawalitK/odoo
|
addons/web_editor/controllers/main.py
|
Python
|
gpl-3.0
| 10,264
|
from link import Wrapper
from link.utils import list_to_dataframe
class NoSqlDB(Wrapper):
"""
wraps a database connection and extends the functionality
to do tasks like put queries into dataframes
"""
def __init__(self, wrap_name = None, **kwargs):
if kwargs:
self.__dict__.update(kwargs)
#get the connection and pass it to wrapper os the wrapped object
connection = self.create_connection()
super(NoSqlConnectionWrapper, self).__init__(wrap_name, connection)
def set_table(self, table):
self.table = table
def get_current_table(self, table=None):
"""
"""
if table:
return table
if self.table:
return self.table
raise Exception("No table defined or no default table")
def get(self, key, table=None):
"""
get the row or rows from a table (could do cool things with rows by
allowing for regex or searches
"""
pass
def put(self, key, column, value, table=None):
"""
put a key or keys back to the nosqldb
"""
pass
def _host_to_hostport(self, host):
"""
turn your host into a (host, port) combo
"""
#need to figure out the standard db port
(ret_host, ret_port) = ("", 8080)
host_info = host.split(":")
if len(host_info)>1:
ret_port = host_info[1]
ret_host = host_info[0]
return (ret_host, int(ret_port))
def create_connection(self):
"""
Override this function to create a depending on the type
of database
:returns: connection to the database you want to use
"""
pass
NoSqlConnectionWrapper = NoSqlDB
class HbaseDB(NoSqlConnectionWrapper):
"""
A connection wrapper for a sqlite database
"""
#from hbase import Hbase
def __init__(self, wrap_name=None, host=None, version='0.92'):
"""
A connection for a SqlLiteDb. Requires that sqlite3 is
installed into python
:param host: the host:port of the hbase thrift server
"""
self.version = version
(self.host, self.port) = self._host_to_hostport(host)
# TODO: Where would one configure the default port for link
super(HbaseNoSqlConnectionWrapper, self).__init__(wrap_name=wrap_name)
def create_connection(self):
"""
Override the create_connection from the DbConnectionWrapper
class which get's called in it's initializer
"""
import happybase
return happybase.Connection(self.host,
port=self.port,compat=self.version)
def __call__(self):
"""
Run's the command line sqlite application
"""
self.run_command('hbase shell')
HbaseNoSqlConnectionWrapper = HbaseDB
class MongoDB(NoSqlConnectionWrapper):
"""
A connection wrapper for a sqlite database
"""
def __init__(self, wrap_name=None, host=None, port=None, **kwargs):
"""
MongoDB wrapper to connect to mongo
:param host: the host:port of the hbase thrift server
"""
(self.host, self.port) = (host, port)
self.params = kwargs
# TODO: Where would one configure the default port for link
super(MongoDB, self).__init__(wrap_name=wrap_name)
def create_connection(self):
"""
Override the create_connection from the DbConnectionWrapper
class which get's called in it's initializer
"""
from pymongo import Connection
return Connection(self.host, port=self.port, **self.params)
def __call__(self):
"""
Run's the command line sqlite application
"""
self.run_command('mongo')
class CassandraDB(NoSqlConnectionWrapper):
"""
A connection wrapper for a sqlite database
"""
def __init__(self, wrap_name=None, nodes=None, default_fetch_size=None, **kwargs):
"""
CassandraDB wrapper to connect to a Cassandra cluster
:param nodes: a list of nodes to use for initial connection
"""
self.nodes = nodes
self.params = kwargs
self.default_fetch_size=default_fetch_size
# TODO: Where would one configure the default port for link
super(CassandraDB, self).__init__(wrap_name=wrap_name)
def create_connection(self):
"""
Override the create_connection from the DbConnectionWrapper
class which get's called in it's initializer
"""
from cassandra.cluster import Cluster
from cassandra.query import dict_factory
session = Cluster(self.nodes).connect()
# Don't return paged results
session.default_fetch_size = self.default_fetch_size
# Return in dictionary format for easy parsing to DataFrame
session.row_factory = dict_factory
return session
def __call__(self):
"""
Run's the command line sqlite application
"""
self.run_command('cqlsh')
|
uhjish/link
|
link/wrappers/nosqlwrappers.py
|
Python
|
apache-2.0
| 5,162
|
# -*-coding:iso-8859-1 -*
import pygame,pdb,math
from pygame.locals import *
from .constantes import *
from .horloge import *
from .interrupteur import *
from .mobile import *
from .observable import *
class Joueur(Mobile, Observable):
"""Classe représentant le joueur"""
def __init__(self, jeu, gestionnaire, x, y, c, fichier=FICHIER_JOUEUR_PAR_DEFAUT, persoCharset=CHARSET_JOUEUR_PAR_DEFAUT, couleurTransparente=COULEUR_TRANSPARENTE_FICHIER_JOUEUR_PAR_DEFAUT, dureeDeplacement=DUREE_DEPLACEMENT_JOUEUR_PAR_DEFAUT, frequenceAnimation=FREQUENCE_ANIMATION_JOUEUR_PAR_DEFAUT, frequenceDeplacement=FREQUENCE_DEPLACEMENT_JOUEUR_PAR_DEFAUT, nom=NOM_EVENEMENT_JOUEUR_PAR_DEFAUT, directionDepart=DIRECTION_DEPART_MOBILE_PAR_DEFAUT, longueurSprite=LONGUEUR_SPRITE_PAR_DEFAUT, largeurSprite=LARGEUR_SPRITE_PAR_DEFAUT):
"""Initialise le joueur
Paramètres obligatoires :
<jeu> est l'application entière.
<gestionnaire> est une instance du gestionnaire d'évènements.
<x><y> est la position initiale du mobile en indices de tiles.
<c> est l'indice de la couche sur laquelle est posé le joueur.
Paramètres facultatifs :
<fichier> est le nom de l'image située dans le dossier des ressources qui représente le mobile. Valeur par défaut dans les constantes.
<persoCharset> désigne la partie du <fichier> correspondant au joueur. Valeur par défaut dans les constantes.
<couleurTransparente> désigne la couleur transparente du <fichier>. Valeur par défaut dans les constantes.
<dureeDeplacement> désigne, en millisecondes, le temps que doit prendre un déplacement d'un tile à un autre. Valeur par défaut dans les constantes.
<frequenceAnimation> désigne le nombre de millisecondes, au sein d'un tile ou pas, entre deux animations. Valeur par défaut dans les constantes.
<frequenceDeplacement> désigne le nombre de déplacements du joueur au sein d'un tile (sans qu'il y ait forcément animation). Valeur par déf. dans les constantes.
<nom> désigne le <str> identifiant l'évènement Joueur. Valeur par défaut dans les constantes.
<directionDepart> désigne la direction que prend le joueur au départ. Valeur par défaut dans les constantes.
<largeurSprite> est la largeur du sprite. Valeur par défaut dans les constantes.
<longueurSprite> est la longueur du sprite. Valeur par défaut dans les constantes."""
Observable.__init__(self, "_positionCarte.left", "_positionCarte.top", "_c")
Mobile.__init__(self,jeu,gestionnaire,nom,x,y,c,fichier,couleurTransparente,persoCharset,dureeDeplacement=dureeDeplacement,frequenceAnimation=frequenceAnimation,frequenceDeplacement=frequenceDeplacement, directionDepart=directionDepart)
self._directions = dict(Haut=False,Bas=False,Gauche=False,Droite=False)
self._derniereDirection = "Aucune"
self._regardDansDirection = dict(Haut=False, Bas=False, Droite=False, Gauche=False)
self._jeu.joueur, self._appuiValidation, self._regardAttente, self._directionAttente, self._mouvement = self, False, True, str(self._direction), False
self._persoCharset = persoCharset
self._positionSource = Rect(0, 0, longueurSprite, largeurSprite)
def traiter(self):
Mobile.traiter(self)
if self._etapeTraitement is 0:
self._initialiserDeplacement(1, joueur=True, appuiJoueur=False, direction=self._directionRegard)
self._xDepart, self._yDepart = self._positionCarte.left, self._positionCarte.top
self._nomCarte = self._boiteOutils.nomCarte
hauteurTile = self._jeu.carteActuelle.hauteurTile
self._etapeTraitement += 1
if self._etapeTraitement is 1:
self._analyserAction()
self._deplacement()
def transfertCarte(self, x, y, c, direction):
"""Prépare l'apparition sur une nouvelle carte, en <x><y><c> avec un regard en <direction>."""
self._cOld, self._c = c, c
self._positionCarte.left, self._positionCarte.top = x * self._jeu.carteActuelle.hauteurTile, y * self._jeu.carteActuelle.hauteurTile
self._etapeMarche, self._etapeTraitement, self._pixelsParcourus, self._mouvement = 1, 0, 0, False
self._derniereDirection = "Aucune"
self._regardDansDirection = dict(Haut=False, Bas=False, Droite=False, Gauche=False)
self._direction, self._directionRegard = direction, direction
def _analyserAction(self):
"""Analyse la touche fléchée sur laquelle a appuyé le joueur et ajuste la direction du prochain déplacement en conséquence"""
event = self._jeu.event
if event.type is KEYDOWN and self._boiteOutils.joueurLibre.voir() is True:
if event.key == K_z:
self._gestionnaire.registerPosition(self._nom,int(self._xDepart/32),int(self._yDepart/32),self._c, joueur=True, appuiJoueur=True, direction=self._directionRegard)
self._appuiValidation = True
if event.key == K_LEFT or event.key == K_RIGHT or event.key == K_DOWN or event.key == K_LEFT or event.key == K_UP:
if event.key == K_LEFT:
self._directions["Gauche"] = True
self._derniereDirection = "Gauche"
elif event.key == K_RIGHT:
self._directions["Droite"] = True
self._derniereDirection = "Droite"
elif event.key == K_UP:
self._directions["Haut"] = True
self._derniereDirection = "Haut"
elif event.key == K_DOWN:
self._directions["Bas"] = True
self._derniereDirection = "Bas"
elif event.type is KEYUP:
if event.key == K_z:
self._appuiValidation = False
if event.key == K_LEFT or event.key == K_RIGHT or event.key == K_DOWN or event.key == K_LEFT or event.key == K_UP:
if event.key == K_LEFT:
self._directions["Gauche"] = False
elif event.key == K_RIGHT:
self._directions["Droite"] = False
elif event.key == K_UP:
self._directions["Haut"] = False
elif event.key == K_DOWN:
self._directions["Bas"] = False
if self._etapeMarche == 1:
nombreAppuis = 0
for (directionActuelle,booleen) in self._directions.items():
if booleen is True:
nombreAppuis += 1
directionChoisie = str(directionActuelle)
if nombreAppuis == 0: #Quand il n'y a aucun appui, pas de direction
self._majRegards()
self._direction = "Aucune"
elif nombreAppuis == 1: #Quand il y a un seul appui, la direction est celle de l'unique appui
self._majRegards()
self._direction = str(directionChoisie)
elif nombreAppuis > 1: #Quand il y a plusieurs appuis, on prend le plus récent
self._direction = str(self._derniereDirection)
self._majRegards()
def _majRegards(self):
"""Indique que le joueur doit, avant de se déplacer, regarder dans la direction de déplacement sauf dans celle qu'il emprunte déjà"""
for directionActuelle,regard in self._regardDansDirection.items():
if directionActuelle != self._direction and self._direction != "Aucune":
self._regardDansDirection[directionActuelle] = False
def teleporterSurPosition(self, xTile, yTile, direction, couche=-1):
if couche == -1:
couche = self._c
self._ajusterPositionSource(self, direction)
self._positionCarteOld.left, self._positionCarteOld.top = self._positionCarte.left, self._positionCarte.top
self._xTilePrecedent, self._yTilePrecedent = self._xTile, self._yTile
self._positionCarte.left, self._positionCarte.top = xTile*self._jeu.carteActuelle.hauteurTile, yTile*self._jeu.carteActuelle.hauteurTile
self._boiteOutils.teleporterSurPosition(self._positionCarte, couche, self._positionSource, self._nomTileset, self._couleurTransparente, self._nom)
self._gestionnaire.registerPosition(self._nom, self._xTile, self._yTile, couche, joueur=True, appuiJoueur=False, direction=self._directionRegard)
self._xDepart, self._yDepart = self._positionCarte.left, self._positionCarte.top
Horloge.initialiser(id(self), 1, 1)
self._jeu.carteActuelle.initialiserScrolling(self._positionCarte.left, self._positionCarte.top)
self._etapeMarche, self._pixelsParcourus, self._regardAttente, self._directionAttente = 1,0, False, str(self._direction)
def _deplacement(self):
"""Gère le déplacement"""
hauteurTile = self._jeu.carteActuelle.hauteurTile
carte = self._jeu.carteActuelle
direction = self._direction
if direction != "Aucune":
if self._regardDansDirection[direction] == True: #Si on a déjà regardé dans la direction
if Horloge.sonner(id(self), 1) is True: #Si le temps d'attente pour l'étape de marche suivante est passé
if self._pixelsParcourus < hauteurTile: #Si le déplacement n'est pas fini
deplacementPossible = False
(self._positionCarteFuture.left, self._positionCarteFuture.top) = self._coordonneesEtapeMarcheSuivante(direction=direction)
if self._etapeMarche == 1:
self._positionCarteSuivante = self._getPositionCarteSuivante(direction)
self._xTilePrecedent, self._yTilePrecedent = self._xTile, self._yTile
self._xTileSuivant, self._yTileSuivant = self._positionCarteSuivante.left/32, self._positionCarteSuivante.top/32
deplacementPossible = self._jeu.carteActuelle.deplacementPossible(self._positionCarteSuivante, self._c, self._nom)
if deplacementPossible is True or self._etapeMarche > 1:
if self._etapeMarche == 1:
carte.verifierScrollingPossible(self._xDepart, self._yDepart, direction)
self._positionCarteOld.left, self._positionCarteOld.top = self._positionCarte.left, self._positionCarte.top
self._positionCarte.left, self._positionCarte.top = self._positionCarteFuture.left, self._positionCarteFuture.top
if direction == "Haut" or direction == "Bas":
avancee = self._positionCarte.top - self._positionCarteOld.top
elif direction == "Gauche" or direction == "Droite":
avancee = self._positionCarte.left - self._positionCarteOld.left
carte.gererScrolling(avancee,direction)
if self._determinerAnimation():
self._ajusterPositionSource(self._enMarche.voir(), direction) #On trouve la position source du PNJ en marche
self._jeu.carteActuelle.poserPNJ(self._positionCarte, self._c, self._positionSource, self._nomTileset, self._couleurTransparente, self._nom, positionCarteSuivante=self._positionCarteSuivante)
Horloge.initialiser(id(self), 1, self._dureeMicroDeplacement, comptageTempsPasse=True) #On lance une autre étape
self._pixelsParcourus += self._getProchainNombrePixels()
self._etapeMarche += 1
else: #Il y a collision, on ne peut pas quitter le tile, donc on réinitialise
self._pixelsParcourus, self._etapeMarche, self._directionAttente = 0, 1, str(self._direction)
Horloge.initialiser(id(self), 1, 1)
self._direction = "Aucune"
else: #Le déplacement est fini, on réinitialise
self._gestionnaire.registerPosition(self._nom, self._xTile, self._yTile, self._c, joueur=True, appuiJoueur=False, direction=self._directionRegard)
self._xDepart, self._yDepart = self._positionCarte.left, self._positionCarte.top
Horloge.initialiser(id(self), 1, 1)
self._etapeMarche, self._pixelsParcourus, self._regardAttente, self._directionAttente = 1,0, False, str(self._direction)
self._direction = "Aucune"
if self._positionCarte.left != self._positionCarteOld.left or self._positionCarte.top != self._positionCarteOld.top:
self._mouvement = True
else:
self._mouvement = False
else:
nouvelleAnimation = self._determinerAnimation() #On regarde si une nouvelle animation est nécessaire
if nouvelleAnimation: #On l'applique si c'est le cas
self._ajusterPositionSource(self._enMarche.voir(), direction)
self._jeu.carteActuelle.poserPNJ(self._positionCarte, self._c, self._positionSource, self._nomTileset, self._couleurTransparente, self._nom)
else: #Si on n'a pas regardé, on regarde
self._positionCarteOld.left, self._positionCarteOld.top = self._positionCarte.left, self._positionCarte.top
self._ajusterPositionSource(False, direction) #On trouve la position source du PNJ en marche
self._jeu.carteActuelle.poserPNJ(self._positionCarte, self._c, self._positionSource, self._nomTileset, self._couleurTransparente, self._nom)
self._regardDansDirection[direction] = True
else:
if self._regardAttente is False:
self._ajusterPositionSource(False, self._directionAttente) #On trouve la position source du PNJ en marche
self._jeu.carteActuelle.poserPNJ(self._positionCarte, self._c, self._positionSource, self._nomTileset, self._couleurTransparente, self._nom)
self._regardAttente = True
def _getDirection(self):
return self._direction
def _getAppuiValidation(self):
return self._appuiValidation
def _getMouvement(self):
return self._mouvement
direction = property(_getDirection)
appuiValidation = property(_getAppuiValidation)
mouvement = property(_getMouvement)
|
Rastagong/narro
|
Releases/0.1/narro/joueur.py
|
Python
|
mit
| 14,568
|
from office365.entity import Entity
from office365.outlook.calendar.email_address import EmailAddress
class CalendarPermission(Entity):
"""
The permissions of a user with whom the calendar has been shared or delegated in an Outlook client.
Get, update, and delete of calendar permissions is supported on behalf of only the calendar owner.
Getting the calendar permissions of a calendar on behalf of a sharee or delegate returns
an empty calendar permissions collection.
Once a sharee or delegate has been set up for a calendar, you can update only the role property to change
the permissions of a sharee or delegate. You cannot update the allowedRoles, emailAddress, isInsideOrganization,
or isRemovable property. To change these properties, you should delete the corresponding calendarPermission
object and create another sharee or delegate in an Outlook client.
"""
@property
def email_address(self):
"""
Represents a sharee or delegate who has access to the calendar.
For the "My Organization" sharee, the address property is null. Read-only.
"""
return self.properties.get("emailAddress", EmailAddress())
|
vgrem/Office365-REST-Python-Client
|
office365/outlook/calendar/calendar_permission.py
|
Python
|
mit
| 1,204
|
## Week 3: Solving Minimization Problems
# Extending the client project example from last week.
# The project is worked by 2 types of workers (Manager, Offshore).
# The manager worked x hours on the project
# and offshore staff worked y hours, for at least 1200 hours.
# Managers cost 150/hour but bring in revenue of 350/hour.
# Offshore staff cost 35/hour but bring in revenue of 50/hour.
# We need to minimize the project cost and bring in revenue of at least 150,000.
# Below we solve using the Simplex Method and also in Python.
# Minimize: c = 150x + 35y
# Subject to: x + y ≥ 1,200
# 350x + 50y ≥ 150,000
# with: x, y ≥ 0.
from scipy.optimize import linprog as lp
from matplotlib.pyplot import *
from numpy import *
c = [-150, -35]
A = [[1, 1], [350, 50]]
b = [1200, 150000]
x0_bounds = (0, None)
x1_bounds = (0, None)
res = lp(c, A_ub = A, b_ub = b,
bounds = (x0_bounds, x1_bounds),
options = {"disp": True})
print(res)
# fun: -76500.0
# message: 'Optimization terminated successfully.'
# nit: 2
# slack: array([ 0., 0.])
# status: 0
# success: True
# x: array([ 300., 900.])
x = arange(0,3000,1)
y0 = arange(0,3000,1)
y1 = 1200.0 - x
y2 = 3000.0 - 7.0 * x
xlim(0,1250)
ylim(0,3500)
plot(x, y1, color = 'g')
plot(x, y2, color = 'b')
plot([300], [900], 'rs')
xfill = [0, 0, 300, 1200, 1250, 1250, 0]
yfill = [3500, 3000, 900, 0, 0, 3500, 3500]
fill_betweenx(yfill, xfill, 1250, color = 'grey', alpha = 0.2)
show()
|
sgranitz/northwestern
|
predict400/week3.py
|
Python
|
mit
| 1,482
|
"""
WSGI config for vcal_webgui project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os, sys
#make sure the next two lines are commented out to run locally.
#sys.path.append('/home/sachs/Documents/VirtualCalifornia/trunk/vcal_webgui')
#sys.path.append('/home/sachs/Documents/VirtualCalifornia/trunk/vcal_py/classes')
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "vcal_webgui.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "vcal_webgui.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
mksachs/WebVC
|
vcal_webgui/wsgi.py
|
Python
|
mit
| 1,663
|
"""
The md5 and sha modules are deprecated since Python 2.5, replaced by the
hashlib module containing both hash algorithms. Here, we provide a common
interface to the md5 and sha constructors, depending on system version.
"""
import sys
import warnings
warnings.warn("django.utils.hashcompat is deprecated; use hashlib instead",
PendingDeprecationWarning)
if sys.version_info >= (2, 5):
import hashlib
md5_constructor = hashlib.md5
md5_hmac = md5_constructor
sha_constructor = hashlib.sha1
sha_hmac = sha_constructor
else:
import md5
md5_constructor = md5.new
md5_hmac = md5
import sha
sha_constructor = sha.new
sha_hmac = sha
|
skevy/django
|
django/utils/hashcompat.py
|
Python
|
bsd-3-clause
| 688
|
# -*-*- encoding: utf-8 -*-*-
#
# gateway4labs is free software: you can redistribute it and/or modify
# it under the terms of the BSD 2-Clause License
# gateway4labs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
import json
import hashlib
import urllib2
from yaml import load as yload
from wtforms.fields import PasswordField
from flask.ext.wtf import Form, validators, TextField
from flask import request, redirect, url_for, session, Markup
from flask.ext import wtf
from flask.ext.admin import Admin, AdminIndexView, BaseView, expose
from flask.ext.admin.contrib.sqlamodel import ModelView
from flask.ext.login import current_user
from labmanager import ALGORITHM
from labmanager.babel import gettext, lazy_gettext
from labmanager.models import LtUser, Course, Laboratory, PermissionToLt, PermissionToCourse, RequestPermissionLT
from labmanager.views import RedirectView
from labmanager.db import db
from labmanager.rlms import get_manager_class
import labmanager.forms as forms
from labmanager.utils import data_filename
config = yload(open(data_filename('labmanager/config/config.yml')))
#################################################################
#
# Base class
#
class PleAuthManagerMixin(object):
def is_accessible(self):
if not current_user.is_authenticated():
return False
return session['usertype'] == 'lms' and current_user.access_level == 'admin'
class L4lPleModelView(PleAuthManagerMixin, ModelView):
def _handle_view(self, name, **kwargs):
if not self.is_accessible():
return redirect(url_for('login_ple', next=request.url))
return super(L4lPleModelView, self)._handle_view(name, **kwargs)
class L4lPleAdminIndexView(PleAuthManagerMixin, AdminIndexView):
def _handle_view(self, name, **kwargs):
if not self.is_accessible():
return redirect(url_for('login_ple', next=request.url))
return super(L4lPleAdminIndexView, self)._handle_view(name, **kwargs)
class L4lPleView(PleAuthManagerMixin, BaseView):
def __init__(self, session, **kwargs):
self.session = session
super(L4lPleView, self).__init__(**kwargs)
def _handle_view(self, name, **kwargs):
if not self.is_accessible():
return redirect(url_for('login_ple', next=request.url))
return super(L4lPleView, self)._handle_view(name, **kwargs)
##############################################
#
# Index
#
class PleAdminPanel(L4lPleAdminIndexView):
@expose()
def index(self):
return self.render("ple_admin/index.html")
###############################################
#
# User management
#
class PleUsersPanel(L4lPleModelView):
can_delete = True
can_edit = False
can_create = True
column_list = ['login', 'full_name', 'access_level']
form_columns = ('login','full_name', 'access_level', 'password')
column_labels = dict(login = lazy_gettext('login'),
full_name = lazy_gettext('full_name'),
access_level = lazy_gettext('access_level'),
password = lazy_gettext('password'))
sel_choices = [(level, level.title()) for level in config['user_access_level']]
form_overrides = dict(password=PasswordField, access_level=wtf.SelectField)
form_args = dict( access_level=dict( choices=sel_choices ),
login=dict(validators=forms.USER_LOGIN_DEFAULT_VALIDATORS[:]),
password=dict(validators=forms.USER_PASSWORD_DEFAULT_VALIDATORS[:]))
def __init__(self, session, **kwargs):
super(PleUsersPanel, self).__init__(LtUser, session, **kwargs)
def get_query(self, *args, **kwargs):
query_obj = super(PleUsersPanel, self).get_query(*args, **kwargs)
query_obj = query_obj.filter_by(lt = current_user.lt)
return query_obj
def get_count_query(self, *args, **kwargs):
query_obj = super(PleUsersPanel, self).get_count_query(*args, **kwargs)
query_obj = query_obj.filter_by(lt = current_user.lt)
return query_obj
def on_model_change(self, form, model):
# TODO: don't update password always
# Edit is false, so there is no possibility of changing data
model.lt = current_user.lt
model.password = unicode(hashlib.new(ALGORITHM,model.password.encode('utf8')).hexdigest())
def create_permission_to_lms_filter(session):
def filter():
return session.query(PermissionToLt).filter_by(lt = current_user.lt)
return staticmethod(filter)
def create_course_filter(session):
def filter():
return session.query(Course).filter_by(lt = current_user.lt)
return staticmethod(filter)
###############################################
#
# Laboratories
#
def local_id_formatter(v, c, laboratory, p):
for permission in laboratory.lab_permissions:
if permission.lt == current_user.lt:
return permission.local_identifier
return gettext('N/A')
def list_widgets_formatter(v, c, laboratory, p):
return Markup('<a href="%s">%s</a>' % (url_for('.list_widgets', local_identifier = local_id_formatter(v, c, laboratory, p)), gettext("list")))
def accessibility_formatter(v, c, lab, p):
mylt = current_user.lt
permission = db.session.query(PermissionToLt).filter_by(lt = mylt, laboratory = lab).first()
if not permission:
return gettext(u"Invalid permission")
if permission.accessible:
currently = gettext('This lab IS accesible')
labaccessible = 'false'
klass = 'btn-danger'
msg = gettext('Make not accessible')
else:
currently = gettext(u'This lab is NOT accesible')
labaccessible = 'true'
klass = 'btn-success'
msg = gettext('Make accessible')
return Markup("""<form method='POST' action='%(url)s' style="text-align: center">
%(currently)s <BR>
<input type='hidden' name='accessible_value' value='%(accessible_value)s'/>
<input type='hidden' name='permission_to_lt_id' value='%(permission_id)s'/>
<input class='btn %(klass)s' type='submit' value="%(msg)s"></input>
</form>""" % dict(
url = url_for('.change_accessibility'),
accessible_value = labaccessible,
permission_id = permission.id,
klass = klass,
msg = msg,
currently = currently,
))
def request_formatter(v, c, lab, p):
mylt = current_user.lt
laboratory_available = db.session.query(Laboratory).filter_by(available = '1', id = lab.id).first()
permission = db.session.query(PermissionToLt).filter_by(lt = mylt, laboratory = laboratory_available).first()
request = db.session.query(RequestPermissionLT).filter_by(lt = mylt, laboratory = laboratory_available).first()
# if there is not a pending request ...
if not request:
if not permission:
currently = gettext(u'Available for request')
lab_request = 'true'
klass = 'btn-success'
msg = gettext(u'Request laboratory')
permission_to_lt_id = ''
lab_id = lab.id
else:
currently = gettext(u'Ready to use')
lab_request = 'false'
klass = 'btn-danger'
msg = gettext(u'Delete laboratory')
permission_to_lt_id = permission.id
lab_id = lab.id
return Markup("""<form method='POST' action='%(url)s' style="text-align: center">
%(currently)s <BR>
<input type='hidden' name='lab_request' value='%(lab_request)s'/>
<input type='hidden' name='permission_to_lt_id' value='%(permission_to_lt_id)s'/>
<input type='hidden' name='lab_id' value='%(lab_id)s'/>
<input class='btn %(klass)s' type='submit' value="%(msg)s"></input>
</form>""" % dict(
url = url_for('.lab_request'),
lab_request = lab_request,
permission_to_lt_id = permission_to_lt_id,
klass = klass,
msg = msg,
currently = currently,
lab_id = lab_id,
))
else: # if there is a pending request, offer the possibility to cancel such request, or wait until the labmanager admin processes it.
currently = gettext(u'Access request pending')
klass = 'btn-danger'
msg = gettext(u'Delete access request')
return Markup("""<form method='POST' action='%(url)s' style="text-align: center">
%(currently)s <BR>
<input type='hidden' name='request_id' value='%(request_id)s'/>
<input class='btn %(klass)s' type='submit' value="%(msg)s"></input>
</form>""" % dict(
url = url_for('.cancel_lab_request'),
currently = currently,
request_id = request.id,
klass = klass,
msg = msg,
))
class PleInstructorLaboratoriesPanel(L4lPleModelView):
can_delete = False
can_edit = False
can_create = False
column_list = ['rlms', 'name', 'laboratory_id', 'local_identifier', 'widgets', 'accessible']
column_formatters = dict( local_identifier = local_id_formatter, widgets = list_widgets_formatter, accessible = accessibility_formatter )
column_labels = dict(rlms = lazy_gettext('rlms'),
name = lazy_gettext('name'),
laboratory_id = lazy_gettext('laboratory_id'),
local_identifier = lazy_gettext('local_identifier'),
widgets = lazy_gettext('widgets'),
accessible = lazy_gettext('accesible'))
column_descriptions = dict( accessible = lazy_gettext("Make this laboratory automatically accessible by any Graasp space belonging to the institution represented by this Learning Tool"))
def __init__(self, session, **kwargs):
super(PleInstructorLaboratoriesPanel, self).__init__(Laboratory, session, **kwargs)
def get_query(self, *args, **kwargs):
query_obj = super(PleInstructorLaboratoriesPanel, self).get_query(*args, **kwargs)
query_obj = query_obj.join(PermissionToLt).filter_by(lt = current_user.lt)
return query_obj
def get_count_query(self, *args, **kwargs):
query_obj = super(PleInstructorLaboratoriesPanel, self).get_count_query(*args, **kwargs)
query_obj = query_obj.join(PermissionToLt).filter_by(lt = current_user.lt)
return query_obj
@expose("/widgets/<local_identifier>/")
def list_widgets(self, local_identifier):
laboratory = self.session.query(Laboratory).join(PermissionToLt).filter_by(lt = current_user.lt, local_identifier = local_identifier).first()
if laboratory is None:
return self.render("ple_admin/errors.html", message = gettext("Laboratory not found"))
rlms_db = laboratory.rlms
RLMS_CLASS = get_manager_class(rlms_db.kind, rlms_db.version, rlms_db.id)
rlms = RLMS_CLASS(rlms_db.configuration)
widgets = rlms.list_widgets(laboratory.laboratory_id)
return self.render("ple_admin/list_widgets.html", widgets = widgets, institution_id = current_user.lt.name, lab_name = local_identifier)
@expose('/lab', methods = ['POST'])
def change_accessibility(self):
isaccessible = unicode(request.form['accessible_value']).lower() == 'true'
lt = current_user.lt
permission_id = int(request.form['permission_to_lt_id'])
permission = self.session.query(PermissionToLt).filter_by(id = permission_id, lt = lt).first()
if permission:
permission.accessible = isaccessible
self.session.commit()
return redirect(url_for('.index_view'))
class PleInstructorRequestLaboratoriesPanel(L4lPleModelView):
can_delete = False
can_edit = False
can_create = False
column_list = ['rlms', 'name', 'laboratory_id', 'request_access']
column_labels = dict(rlms=lazy_gettext('rlms'), name=lazy_gettext('name'), laboratory_id=lazy_gettext('laboratory_id'), request_access=lazy_gettext('request_access'))
column_formatters = dict(request_access = request_formatter)
column_descriptions = dict( request_access = lazy_gettext("Request access to a lab. The Labmanager admin must accept or deny your request."))
def __init__(self, session, **kwargs):
super(PleInstructorRequestLaboratoriesPanel, self).__init__(Laboratory, session, **kwargs)
def get_query(self, *args, **kwargs):
query_obj = super(PleInstructorRequestLaboratoriesPanel, self).get_query(*args, **kwargs)
#laboratories_query = self.session.query(Laboratory).filter_by(available = '1')
query_obj = query_obj.filter_by(available = '1')
# laboratories_query = self.session.query(Laboratory).filter_by(available = '1').join(PermissionToLt).filter_by(lt = current_user.lt)
return query_obj
def get_count_query(self, *args, **kwargs):
query_obj = super(PleInstructorRequestLaboratoriesPanel, self).get_count_query(*args, **kwargs)
query_obj = query_obj.filter_by(available = '1')
return query_obj
@expose('/lab_request', methods = ['GET','POST'])
def lab_request(self):
lab_request = unicode(request.form['lab_request']).lower() == 'true'
# if lab_request == true, then request the creation of permission. Else, delete the permission over this lab for this lt (no need to ask the labmanager admin to do this).
if lab_request:
lab_id = unicode(request.form['lab_id'])
lab = self.session.query(Laboratory).filter_by(id = lab_id).first()
request_perm = RequestPermissionLT(lt = current_user.lt, laboratory = lab, local_identifier = lab.default_local_identifier)
self.session.add(request_perm)
else:
permission_id = unicode(request.form['permission_to_lt_id'])
permission = self.session.query(PermissionToLt).filter_by(id = permission_id).first()
self.session.delete(permission)
self.session.commit()
return redirect(url_for('.index_view'))
@expose('/cancel_lab_request', methods = ['GET','POST'])
def cancel_lab_request(self):
req_id = unicode(request.form['request_id'])
req = self.session.query(RequestPermissionLT).filter_by(id = req_id).first()
self.session.delete(req)
self.session.commit()
return redirect(url_for('.index_view'))
#################################################
#
# Course management
#
def format_space_url(v, c, space, p):
shindig_url = space.lt.shindig_credentials[0]
assert shindig_url or True # Avoid pyflakes warnings
# shindig_space_url = '%s/rest/spaces/%s' % (shindig_url, space.context_id)
# contents = urllib2.urlopen(shindig_space_url).read()
# return json.loads(contents)['urls'][0]['value']
return Markup('<a target="_blank" href="https://graasp.epfl.ch/#item=space_%s">%s</a>' % (space.context_id, gettext('link')))
class PleSpacesPanel(L4lPleModelView):
can_create = can_edit = False
column_list = ['name', 'context_id', 'url']
form_columns = ('name', 'context_id')
column_formatters = dict( url = format_space_url )
column_labels = dict(name = lazy_gettext('name'),
context_id = lazy_gettext('context_id'),
url = lazy_gettext('url'))
def __init__(self, session, **kwargs):
super(PleSpacesPanel, self).__init__(Course, session, **kwargs)
def get_query(self, *args, **kwargs):
query_obj = super(PleSpacesPanel, self).get_query(*args, **kwargs)
query_obj = query_obj.filter_by(lt = current_user.lt)
return query_obj
def get_count_query(self, *args, **kwargs):
query_obj = super(PleSpacesPanel, self).get_count_query(*args, **kwargs)
query_obj = query_obj.filter_by(lt = current_user.lt)
return query_obj
def on_model_change(self, form, model):
model.lt = current_user.lt
class SpaceUrlForm(Form):
url = TextField(lazy_gettext('Space URL'), [validators.Length(min=6, max=200),
validators.URL()], description = lazy_gettext("Drop here the URL of the Space."), default = "http://graasp.epfl.ch/#item=space_1234")
def retrieve_space_name(numeric_identifier):
# Retrieve the space name from Shindig
shindig_url = current_user.lt.shindig_credentials[0].shindig_url
shindig_space_url = '%s/rest/spaces/%s' % (shindig_url, numeric_identifier)
shindig_space_contents_json = urllib2.urlopen(shindig_space_url).read()
shindig_space_contents = json.loads(shindig_space_contents_json)
space_name = shindig_space_contents.get('entry', {}).get('displayName')
return space_name
def create_new_space(numeric_identifier, space_name):
# Create the space
context_id = unicode(numeric_identifier)
course = Course(name = space_name, lt = current_user.lt, context_id = context_id)
# Add it to the database
db.session.add(course)
return course
def parse_space_url(url):
""" Given a Graasp URL, retrieve the space identifier (a number) """
# This is done in a different way if the space url ends with a number (the url contains space_) or if the url ends with a text (the url contains url=)
if 'space_' in url:
try:
context_id = int(url.split('space_')[1])
except:
raise Exception(gettext("Invalid format. Expected space_NUMBER"))
else:
return context_id
elif 'url=' in url:
try:
space_name = url.split('url=')[1]
json_file = 'http://graasp.epfl.ch/item3a/' + space_name + '.json'
json_response = urllib2.urlopen(json_file)
contents=json.loads(json_response.read())
context_id=contents['id']
return context_id
except:
raise Exception(gettext("Invalid format. Expected a valid Graasp space URL"))
raise Exception(gettext("Invalid format. Expected http://graasp.epfl.ch/#item=space_SOMETHING"))
class PleNewSpacesPanel(L4lPleView):
courses_panel_endpoint = 'ple_admin_courses'
@expose(methods = ['GET', 'POST'])
def index(self):
form = SpaceUrlForm()
permissions = current_user.lt.lab_permissions
lab_ids = dict([
(permission.local_identifier, {
'name' : permission.laboratory.name,
'checked' : request.form.get('lab_%s' % permission.local_identifier, 'off') in ('checked', 'on')
})
for permission in permissions ])
request_space_name = False
if form.validate_on_submit():
try:
context_id = parse_space_url(form.url.data)
except Exception as e:
form.url.errors.append(e.message)
else:
existing_course = self.session.query(Course).filter_by(lt = current_user.lt, context_id = context_id).first()
if existing_course:
form.url.errors.append(gettext(u"Space already registered"))
else:
space_name = retrieve_space_name(context_id)
# If space_name can not be retrieved (e.g., a closed or hidden space)
if not space_name:
# Try to get it from the form.
space_name = request.form.get('space_name')
# If it was possible, add the new space
if space_name:
course = create_new_space(context_id, space_name or gettext('Invalid name'))
labs_to_grant = [ lab_id for lab_id in lab_ids if lab_ids[lab_id]['checked'] ]
for lab_to_grant in labs_to_grant:
permission = [ permission for permission in permissions if permission.local_identifier == lab_to_grant ][0]
permission_to_course = PermissionToCourse(course = course, permission_to_lt = permission)
db.session.add(permission_to_course)
db.session.commit()
return redirect(url_for('%s.index_view' % self.courses_panel_endpoint))
# But if it was not possible to add it, add a new field called space_name
else:
request_space_name = True
return self.render("ple_admin/new_space.html", form = form, lab_ids = lab_ids, request_space_name = request_space_name)
class PlePermissionToSpacePanel(L4lPleModelView):
form_args = dict(
permission_to_lt = dict(query_factory = lambda : PlePermissionToSpacePanel.permission_to_lms_filter()),
course = dict(query_factory = lambda : PlePermissionToSpacePanel.course_filter()),
)
column_labels = dict(
permission_to_lt = lazy_gettext('Permission'),
course = lazy_gettext('Space'),
configuration = lazy_gettext('Configuration'),
)
def __init__(self, session, **kwargs):
super(PlePermissionToSpacePanel, self).__init__(PermissionToCourse, session, **kwargs)
PlePermissionToSpacePanel.permission_to_lms_filter = create_permission_to_lms_filter(self.session)
PlePermissionToSpacePanel.course_filter = create_course_filter(self.session)
def get_query(self, *args, **kwargs):
query_obj = super(PlePermissionToSpacePanel, self).get_query(*args, **kwargs)
query_obj = query_obj.join(Course).filter_by(lt = current_user.lt)
return query_obj
def get_count_query(self, *args, **kwargs):
query_obj = super(PlePermissionToSpacePanel, self).get_count_query(*args, **kwargs)
query_obj = query_obj.join(Course).filter_by(lt = current_user.lt)
return query_obj
##############################################
#
# Initialization
#
def init_ple_admin(app):
ple_admin_url = '/ple_admin'
i18n_labs = lazy_gettext(u'Labs')
ple_admin = Admin(index_view = PleAdminPanel(url=ple_admin_url, endpoint = 'ple_admin'), name = lazy_gettext(u'PLE admin'), url = ple_admin_url, endpoint = 'ple-admin')
ple_admin.add_view(PleInstructorLaboratoriesPanel( db.session, category = i18n_labs, name = lazy_gettext(u"Available labs"), endpoint = 'ple_admin_labs', url = 'labs/available'))
ple_admin.add_view(PleInstructorRequestLaboratoriesPanel( db.session, category = i18n_labs, name = lazy_gettext(u"Request new labs"), endpoint = 'ple_admin_request_labs', url = 'labs/request'))
i18n_spaces = lazy_gettext(u'Spaces')
ple_admin.add_view(PleNewSpacesPanel(db.session, category = i18n_spaces, name = lazy_gettext(u'New'), endpoint = 'ple_admin_new_courses', url = 'spaces/create'))
ple_admin.add_view(PleSpacesPanel(db.session, category = i18n_spaces, name = lazy_gettext(u'Spaces'), endpoint = 'ple_admin_courses', url = 'spaces'))
ple_admin.add_view(PlePermissionToSpacePanel(db.session, category = i18n_spaces, name = lazy_gettext(u'Permissions'), endpoint = 'ple_admin_course_permissions', url = 'spaces/permissions'))
ple_admin.add_view(PleUsersPanel(db.session, name = lazy_gettext(u'Users'), endpoint = 'ple_admin_users', url = 'users'))
ple_admin.add_view(RedirectView('logout', name = lazy_gettext(u'Log out'), endpoint = 'ple_admin_logout', url = 'logout'))
ple_admin.init_app(app)
# Uncomment for debugging purposes
# app.config['TRAP_BAD_REQUEST_ERRORS'] = True
|
labsland/labmanager
|
labmanager/views/ple/admin.py
|
Python
|
bsd-2-clause
| 24,701
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# REF [site] >>
# https://torchtext.readthedocs.io/en/latest/index.html
# https://github.com/pytorch/text
import io, time
import torch, torchtext
# REF [site] >>
# https://github.com/pytorch/text
# https://torchtext.readthedocs.io/en/latest/examples.html
def tutorial_example():
pos = torchtext.data.TabularDataset(
path='./torchtext_data/pos/pos_wsj_train.tsv', format='tsv',
fields=[('text', torchtext.data.Field()), ('labels', torchtext.data.Field())]
)
sentiment = torchtext.data.TabularDataset(
path='./torchtext_data/sentiment/train.json', format='json',
fields={
'sentence_tokenized': ('text', torchtext.data.Field(sequential=True)),
'sentiment_gold': ('labels', torchtext.data.Field(sequential=False))
}
)
#--------------------
my_custom_tokenizer = torchtext.data.utils.get_tokenizer('basic_english')
src = torchtext.data.Field(tokenize=my_custom_tokenizer)
trg = torchtext.data.Field(tokenize=my_custom_tokenizer)
mt_train = torchtext.datasets.TranslationDataset(
path='./torchtext_data/mt/wmt16-ende.train', exts=('.en', '.de'),
fields=(src, trg)
)
mt_dev = torchtext.data.TranslationDataset(
path='./torchtext_data/mt/newstest2014', exts=('.en', '.de'),
fields=(src, trg)
)
src.build_vocab(mt_train, max_size=80000)
trg.build_vocab(mt_train, max_size=40000)
# mt_dev shares the fields, so it shares their vocab objects.
train_iter = torchtext.data.BucketIterator(
dataset=mt_train, batch_size=32,
sort_key=lambda x: torchtext.data.interleave_keys(len(x.src), len(x.trg))
)
next(iter(train_iter))
#--------------------
TEXT = torchtext.data.Field()
LABELS = torchtext.data.Field()
train, val, test = torchtext.data.TabularDataset.splits(
path='./torchtext_data/pos_wsj/pos_wsj', train='_train.tsv',
validation='_dev.tsv', test='_test.tsv', format='tsv',
fields=[('text', TEXT), ('labels', LABELS)]
)
train_iter, val_iter, test_iter = torchtext.data.BucketIterator.splits(
(train, val, test), batch_sizes=(16, 256, 256),
sort_key=lambda x: len(x.text), device=0
)
TEXT.build_vocab(train)
LABELS.build_vocab(train)
def tokenizer_example():
tokenizer = torchtext.data.utils.get_tokenizer('basic_english')
tokens = tokenizer('You can now install TorchText using pip!')
print("tokenizer('You can now install TorchText using pip!') =", tokens)
def csv_iterator(data_filepath, ngrams):
tokenizer = torchtext.data.utils.get_tokenizer('basic_english')
with io.open(data_filepath, encoding='utf8') as fd:
reader = torchtext.utils.unicode_csv_reader(fd)
for row in reader:
tokens = ' '.join(row[1:])
yield torchtext.data.utils.ngrams_iterator(tokenizer(tokens), ngrams)
# REF [site] >> https://github.com/pytorch/text/blob/master/examples/vocab/vocab.py
def vocab_example():
csv_filepath = './torchtext_data/test.csv'
vocab_filepath = './torchtext_data/vocab.pth'
ngrams = 2
vocab = torchtext.vocab.build_vocab_from_iterator(csv_iterator(csv_filepath, ngrams))
torch.save(vocab, vocab_filepath)
print('Saved a vocab to {}.'.format(vocab_filepath))
def main():
#tutorial_example() # No data.
#tokenizer_example()
vocab_example()
#--------------------------------------------------------------------
if '__main__' == __name__:
main()
|
sangwook236/general-development-and-testing
|
sw_dev/python/rnd/test/language_processing/torchtext_test.py
|
Python
|
gpl-2.0
| 3,292
|
# -*- coding: utf-8 -*-
"""
Occam 1D GUI
-----------------
All encompassing ploting data, model and model response.
JP 2017
"""
#
# =============================================================================
# Imports
# =============================================================================
import os
import sys
try:
from PyQt5 import QtCore, QtGui, QtWidgets
except ImportError:
raise ImportError("This version needs PyQt5")
import mtpy.modeling.occam1d as occam1d
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
from matplotlib.ticker import MultipleLocator
import matplotlib.gridspec as gridspec
import numpy as np
import matplotlib.pyplot as plt
class MyStream(QtCore.QObject):
"""
this class will emit a signal
"""
message = QtCore.pyqtSignal(str)
def __init__(self, parent=None):
super(MyStream, self).__init__(parent)
def write(self, message):
self.message.emit(str(message))
class Occam1D_GUI(QtWidgets.QMainWindow):
def __init__(self):
super(Occam1D_GUI, self).__init__()
self.ms = 5
self.lw = 1.5
self.data_marker_color = (0, 0, 0)
self.data_marker = 's'
self.model_marker_color = (1, 0, 0)
self.model_marker = 'x'
self.e_capthick = 1
self.e_capsize = 5
self.res_limits = None
self.phase_limits = None
self.subplot_wspace = .25
self.subplot_hspace = .0
self.subplot_right = .98
self.subplot_left = .08
self.subplot_top = .93
self.subplot_bottom = .08
self.legend_loc = 'upper center'
self.legend_pos = (.5, 1.15)
self.legend_marker_scale = 1
self.legend_border_axes_pad = .01
self.legend_label_spacing = 0.07
self.legend_handle_text_pad = .2
self.legend_border_pad = .15
self.fs = 11
self.ylabel_pad = 1.25
self.station = None
self.occam_data = None
self.occam_resp = None
self.dir_path = os.getcwd()
self.ui_setup()
def ui_setup(self):
"""
setup the window layout
"""
self.setWindowTitle("Run Occam 1D")
self.setWindowState(QtCore.Qt.WindowMaximized)
self.occam_widget = OccamWidget()
self.central_widget = self.setCentralWidget(self.occam_widget)
## menu bar
self.menu_bar = self.menuBar()
self.menu_data = self.menu_bar.addMenu("Data")
self.menu_model = self.menu_bar.addMenu("Model")
self.menu_help = self.menu_bar.addMenu('Help')
help_action = QtWidgets.QAction('help doc', self)
help_action.triggered.connect(self.display_help)
self.menu_help.addAction(help_action)
self.setMenuBar(self.menu_bar)
# set the actions for the data file menu item
# set an open option that on click opens an existing occam file
self.action_open_data = QtWidgets.QAction(self)
self.action_open_data.setText("&Open")
self.action_open_data.triggered.connect(self.get_data_file)
self.action_open_model = QtWidgets.QAction(self)
self.action_open_model.setText("&Open")
self.action_open_model.triggered.connect(self.get_model_file)
# add actions to menu
self.menu_data.addAction(self.action_open_data)
self.menu_model.addAction(self.action_open_model)
self.show()
#--------------------------------------------------------
# stream the output of occam 1D
self.my_stream = MyStream()
self.my_stream.message.connect(self.occam_widget.normal_output)
sys.stdout = self.my_stream
QtCore.QMetaObject.connectSlotsByName(self)
def display_help(self):
ll = ['***Be sure you have a working executable of Occam1D first***\n',
'To begin: ',
'\t* select an edi file to invert by clicking the ',
'\t "Get EDI File" button at the top left.',
'\t The TE mode will be plotted meanint the file was read',
'\t* Change the parameters in the Data, Model, and Startup fields',
'\t* Locate Occam1D on your system by clicking "Occam1D Path"',
'\t* Hit the "Run" button to run an inversion.',
'\t The first iteration will be plotted once it is finished.',
'',
'An L2 curve will be shown in the lower plot to give you an',
'idea of which iteration is the optimum. To change iterations',
'pick a number on the combination box labeled "Iteration".',
'',
'Change the parameters and try again.',
'',
'The output will be shown on the left handside.',
'',
'To save an image of the model and response click the disk icon']
help_string = '\n'.join(ll)
QtWidgets.QMessageBox.information(self.central_widget, 'Help', help_string)
def get_data_file(self):
fn_dialog = QtWidgets.QFileDialog()
fn = str(fn_dialog.getOpenFileName(caption='Choose Occam 1D data file',
filter='(*.dat);; (*.data)',
directory=self.dir_path)[0])
self.occam_widget.occam_data.read_data_file(fn)
self.dir_path = os.path.dirname(fn)
self.occam_widget.mpl_widget.plot_data(data_fn=self.occam_widget.occam_data.data_fn)
self.occam_widget.save_dir = self.dir_path
def get_model_file(self):
fn_dialog = QtWidgets.QFileDialog()
fn = str(fn_dialog.getOpenFileName(caption='Choose Occam 1D model file',
directory=self.dir_path)[0])
self.occam_widget.occam_model.read_model_file(fn)
self.dir_path = os.path.dirname(fn)
#==============================================================================
# Occam 1D widget
#==============================================================================
class OccamWidget(QtWidgets.QWidget):
"""
occam 1D widget
"""
def __init__(self):
super(OccamWidget, self).__init__()
self.occam_data = occam1d.Data()
self.occam_model = occam1d.Model()
self.occam_startup = occam1d.Startup()
self.occam_exec = ''
self.mpl_widget = OccamPlot()
self.mpl_widget.depth_limits = (0, self.occam_model.target_depth)
self.l2_widget = PlotL2()
self.l2_widget.l2_widget.mpl_connect('pick event', self.on_click)
self.l2_widget.l2_widget.setFocusPolicy(QtCore.Qt.ClickFocus)
self.l2_widget.l2_widget.setFocus()
self.res_err = 10.
self.phase_err = 5.
self.data_mode = 'Det'
self.edi_fn = ''
self.ss = 1.0
self.rotation_angle = 0.0
self.save_dir = None
self.station_dir = None
self.setup_ui()
def setup_ui(self):
"""
setup the user interface
"""
# font type to use for labels
label_font = QtGui.QFont()
label_font.setBold = True
label_font.setPointSize (16)
#---------------------------------------------------
self.get_occam_path_button = QtWidgets.QPushButton('Occam1D Path')
self.get_occam_path_button.clicked.connect(self.get_occam_path)
self.get_occam_path_edit = QtWidgets.QLineEdit()
self.get_occam_path_edit.setText(self.occam_exec)
self.get_occam_path_edit.editingFinished.connect(self.get_occam_path)
self.get_edi_button = QtWidgets.QPushButton('Get EDI File')
self.get_edi_button.clicked.connect(self.get_edi_file)
self.get_edi_edit = QtWidgets.QLineEdit()
self.get_edi_edit.setText(self.edi_fn)
self.get_edi_edit.editingFinished.connect(self.get_edi_file)
self.data_label = QtWidgets.QLabel('Data Parameters')
self.data_label.setFont(label_font)
self.data_res_err_label = QtWidgets.QLabel('Res. Error (%)')
self.data_res_err_edit = QtWidgets.QLineEdit()
self.data_res_err_edit.setText('{0:.2f}'.format(self.res_err))
self.data_res_err_edit.editingFinished.connect(self.set_res_err)
self.data_phase_err_label = QtWidgets.QLabel('Phase Error (%)')
self.data_phase_err_edit = QtWidgets.QLineEdit()
self.data_phase_err_edit.setText('{0:.2f}'.format(self.phase_err))
self.data_phase_err_edit.editingFinished.connect(self.set_phase_err)
self.data_mode_label = QtWidgets.QLabel('Mode')
self.data_mode_combo = QtWidgets.QComboBox()
self.data_mode_combo.addItem('Det')
self.data_mode_combo.addItem('TE')
self.data_mode_combo.addItem('TM')
self.data_mode_combo.activated[str].connect(self.set_data_mode)
self.data_ss_button = QtWidgets.QPushButton('Apply Static Shift')
self.data_ss_button.clicked.connect(self.apply_ss)
self.data_ss_edit = QtWidgets.QLineEdit()
self.data_ss_edit.setText('{0:.2f}'.format(self.ss))
self.data_ss_edit.editingFinished.connect(self.set_ss)
self.data_rotate_label = QtWidgets.QLabel("Rotation Angle (N=0, E=90)")
self.data_rotate_edit = QtWidgets.QLineEdit('{0:.2f}'.format(self.rotation_angle))
self.data_rotate_edit.editingFinished.connect(self.set_rotation_angle)
# vertical layer parameters
self.model_label = QtWidgets.QLabel('Model Parameters')
self.model_label.setFont(label_font)
self.n_layers_label = QtWidgets.QLabel('Number of Vertical Layers')
self.n_layers_edit = QtWidgets.QLineEdit()
self.n_layers_edit.setText('{0:.0f}'.format(self.occam_model.n_layers))
self.n_layers_edit.editingFinished.connect(self.set_n_layers)
self.z1_layer_label = QtWidgets.QLabel('Thicknes of 1st layer (m)')
self.z1_layer_edit = QtWidgets.QLineEdit()
self.z1_layer_edit.setText('{0:.2f}'.format(self.occam_model.z1_layer))
self.z1_layer_edit.editingFinished.connect(self.set_z1_layer)
self.z_target_label = QtWidgets.QLabel('Target Depth (m)')
self.z_target_edit = QtWidgets.QLineEdit()
self.z_target_edit.setText('{0:.2f}'.format(self.occam_model.target_depth))
self.z_target_edit.editingFinished.connect(self.set_z_target)
self.z_bottom_label = QtWidgets.QLabel('Bottom of the Model (m)')
self.z_bottom_edit = QtWidgets.QLineEdit()
self.z_bottom_edit.setText('{0:.2f}'.format(self.occam_model.bottom_layer))
self.z_bottom_edit.editingFinished.connect(self.set_z_bottom)
# starting resistivity
self.startup_label = QtWidgets.QLabel('Startup Parameters')
self.startup_label.setFont(label_font)
self.start_rho_label = QtWidgets.QLabel('Starting rho (Ohmm)')
self.start_rho_edit = QtWidgets.QLineEdit()
self.start_rho_edit.setText('{0:.2f}'.format(self.occam_startup.start_rho))
self.start_rho_edit.editingFinished.connect(self.set_rho)
self.max_iter_label = QtWidgets.QLabel('Num of Iterations')
self.max_iter_edit = QtWidgets.QLineEdit()
self.max_iter_edit.setText('{0:.0f}'.format(self.occam_startup.max_iter))
self.max_iter_edit.editingFinished.connect(self.set_max_iter)
self.target_rms_label = QtWidgets.QLabel('Target RMS')
self.target_rms_edit = QtWidgets.QLineEdit()
self.target_rms_edit.setText('{0:.2f}'.format(self.occam_startup.target_rms))
self.target_rms_edit.editingFinished.connect(self.set_target_rms)
self.start_roughness_label = QtWidgets.QLabel('Starting Roughness')
self.start_roughness_edit = QtWidgets.QLineEdit()
self.start_roughness_edit.setText('{0:.2f}'.format(self.occam_startup.start_rough))
self.start_roughness_edit.editingFinished.connect(self.set_start_rough)
self.start_lagrange_label = QtWidgets.QLabel('Starting Lagrange')
self.start_lagrange_edit = QtWidgets.QLineEdit()
self.start_lagrange_edit.setText('{0:.2f}'.format(self.occam_startup.start_lagrange))
self.start_lagrange_edit.editingFinished.connect(self.set_start_lagrange)
self.iter_combo_label = QtWidgets.QLabel('Plot Iteration')
self.iter_combo_edit = QtWidgets.QComboBox()
self.iter_combo_edit.addItem('1')
self.iter_combo_edit.activated[str].connect(self.set_iteration)
self.iter_combo_edit.setSizeAdjustPolicy(QtWidgets.QComboBox.AdjustToContents)
self.iter_combo_edit.setMinimumWidth(50)
self.output_box = QtWidgets.QTextEdit()
#---set the layout---------------
path_layout = QtWidgets.QHBoxLayout()
path_layout.addWidget(self.get_occam_path_button)
path_layout.addWidget(self.get_occam_path_edit)
data_grid = QtWidgets.QGridLayout()
data_grid.addWidget(self.data_label, 0, 0)
data_grid.addWidget(self.data_res_err_label, 1, 0)
data_grid.addWidget(self.data_res_err_edit, 1, 1)
data_grid.addWidget(self.data_phase_err_label, 2, 0)
data_grid.addWidget(self.data_phase_err_edit, 2, 1)
data_grid.addWidget(self.data_mode_label, 3, 0)
data_grid.addWidget(self.data_mode_combo, 3, 1)
data_grid.addWidget(self.data_ss_button, 4, 0)
data_grid.addWidget(self.data_ss_edit, 4, 1)
data_grid.addWidget(self.data_rotate_label, 5, 0)
data_grid.addWidget(self.data_rotate_edit, 5, 1)
model_grid = QtWidgets.QGridLayout()
model_grid.addWidget(self.model_label, 0, 0)
model_grid.addWidget(self.n_layers_label, 1, 0)
model_grid.addWidget(self.n_layers_edit, 1, 1)
model_grid.addWidget(self.z1_layer_label, 2, 0)
model_grid.addWidget(self.z1_layer_edit, 2, 1)
model_grid.addWidget(self.z_target_label, 3, 0)
model_grid.addWidget(self.z_target_edit, 3, 1)
model_grid.addWidget(self.z_bottom_label, 4, 0)
model_grid.addWidget(self.z_bottom_edit, 4, 1)
startup_grid = QtWidgets.QGridLayout()
startup_grid.addWidget(self.startup_label, 0, 0)
startup_grid.addWidget(self.target_rms_label, 1, 0)
startup_grid.addWidget(self.target_rms_edit, 1, 1)
startup_grid.addWidget(self.max_iter_label, 2, 0)
startup_grid.addWidget(self.max_iter_edit, 2, 1)
startup_grid.addWidget(self.start_rho_label, 3, 0)
startup_grid.addWidget(self.start_rho_edit, 3, 1)
startup_grid.addWidget(self.start_lagrange_label, 4, 0)
startup_grid.addWidget(self.start_lagrange_edit, 4, 1)
startup_grid.addWidget(self.start_roughness_label, 5, 0)
startup_grid.addWidget(self.start_roughness_edit, 5, 1)
run_button = QtWidgets.QPushButton()
run_button.setText('Run')
run_button.clicked.connect(self.run_occam)
run_button_edits = QtWidgets.QPushButton()
run_button_edits.setText('Run Edits')
run_button_edits.clicked.connect(self.run_occam_edits)
run_layout = QtWidgets.QHBoxLayout()
run_layout.addWidget(run_button)
run_layout.addWidget(run_button_edits)
run_layout.addWidget(self.iter_combo_label)
run_layout.addWidget(self.iter_combo_edit)
edi_layout = QtWidgets.QHBoxLayout()
edi_layout.addWidget(self.get_edi_button)
edi_layout.addWidget(self.get_edi_edit)
edit_layout = QtWidgets.QVBoxLayout()
edit_layout.addLayout(edi_layout)
edit_layout.addLayout(data_grid)
edit_layout.addLayout(model_grid)
edit_layout.addLayout(startup_grid)
edit_layout.addWidget(self.output_box)
edit_layout.addLayout(path_layout)
edit_layout.addLayout(run_layout)
bottom_plot_layout = QtWidgets.QHBoxLayout()
# bottom_plot_layout.addWidget(self.iter_combo_label)
# bottom_plot_layout.addWidget(self.iter_combo_edit)
bottom_plot_layout.addWidget(self.l2_widget)
plot_layout = QtWidgets.QGridLayout()
plot_layout.addWidget(self.mpl_widget, 0, 0, 1, 1)
plot_layout.addLayout(bottom_plot_layout, 2, 0, 2, 1)
# window_layout = QtWidgets.QHBoxLayout()
# window_layout.addLayout(edit_layout)
# window_layout.addLayout(plot_layout)
window_grid = QtWidgets.QGridLayout()
window_grid.addLayout(edit_layout, 0, 0, 1, 5)
window_grid.addLayout(plot_layout, 0, 5, 1, 1)
self.setLayout(window_grid)
QtCore.QMetaObject.connectSlotsByName(self)
def get_occam_path(self):
"""
get occam path
"""
occam_path_dialog = QtWidgets.QFileDialog()
fn = str(occam_path_dialog.getOpenFileName(
caption='Locate Occam1D executable')[0])
self.occam_exec = os.path.abspath(fn)
self.get_occam_path_edit.setText(self.occam_exec)
def get_edi_file(self):
"""
get edi file to invert
"""
if self.edi_fn is not '':
edi_path = os.path.dirname(self.edi_fn)
edi_dialog = QtWidgets.QFileDialog()
fn = str(edi_dialog.getOpenFileName(caption='Pick .edi file',
filter='*.edi',
directory=edi_path)[0])
else:
edi_dialog = QtWidgets.QFileDialog()
fn = str(edi_dialog.getOpenFileName(caption='Pick .edi file',
filter='*.edi')[0])
self.edi_fn = fn
self.get_edi_edit.setText(self.edi_fn)
station = os.path.basename(self.edi_fn)[:-4]
self.station_dir = os.path.join(os.path.dirname(self.edi_fn),
station)
if not os.path.isdir(self.station_dir):
os.mkdir(self.station_dir)
print('Made director {0}'.format(self.station_dir))
self.save_dir = os.path.join(self.station_dir)
# make an initial data file
self.occam_data.write_data_file(edi_file=self.edi_fn,
save_path=self.save_dir,
mode=self.data_mode,
res_err=self.res_err,
phase_err=self.phase_err,
thetar=self.rotation_angle)
self.mpl_widget.plot_data(data_fn=self.occam_data.data_fn)
def set_res_err(self):
self.res_err = float(str(self.data_res_err_edit.text()))
self.data_res_err_edit.setText('{0:.2f}'.format(self.res_err))
def set_phase_err(self):
self.phase_err = float(str(self.data_phase_err_edit.text()))
self.data_phase_err_edit.setText('{0:.2f}'.format(self.phase_err))
def set_data_mode(self, text):
self.data_mode = str(text)
self.occam_data.write_data_file(edi_file=self.edi_fn,
save_path=self.save_dir,
mode=self.data_mode,
res_err=self.res_err,
phase_err=self.phase_err,
thetar=self.rotation_angle)
self.mpl_widget.plot_data(data_fn=self.occam_data.data_fn)
def set_ss(self):
self.ss = float(str(self.data_ss_edit.text()))
self.data_ss_edit.setText('{0:.2f}'.format(self.ss))
def apply_ss(self):
self.mpl_widget.data_obj.res_te[0] /= 1./self.ss
self.mpl_widget.data_obj.res_tm[0] /= 1./self.ss
self.mpl_widget.data_obj.res_te[1] /= 1./self.ss
self.mpl_widget.data_obj.res_tm[1] /= 1./self.ss
self.rewrite_data_file()
self.mpl_widget.plot_data(data_fn=self.occam_data.data_fn)
def set_rotation_angle(self):
self.rotation_angle = float(str(self.data_rotate_edit.text()))
self.data_rotate_edit.setText('{0:.2f}'.format(self.rotation_angle))
self.occam_data.write_data_file(edi_file=self.edi_fn,
save_path=self.save_dir,
mode=self.data_mode,
res_err=self.res_err,
phase_err=self.phase_err,
thetar=self.rotation_angle)
self.mpl_widget.plot_data(data_fn=self.occam_data.data_fn)
def set_n_layers(self):
self.occam_model.n_layers = int(str(self.n_layers_edit.text()))
self.n_layers_edit.setText('{0:.0f}'.format(self.occam_model.n_layers))
def set_z1_layer(self):
self.occam_model.z1_layer = float(str(self.z1_layer_edit.text()))
self.z1_layer_edit.setText('{0:.2f}'.format(self.occam_model.z1_layer))
def set_z_target(self):
self.occam_model.target_depth = float(str(self.z_target_edit.text()))
self.z_target_edit.setText('{0:.2f}'.format(self.occam_model.target_depth))
self.mpl_widget.depth_limits = (0, self.occam_model.target_depth)
def set_z_bottom(self):
self.occam_model.bottom_layer = float(str(self.z_bottom_edit.text()))
self.z_bottom_edit.setText('{0:.2f}'.format(self.occam_model.bottom_layer))
def set_rho(self):
self.occam_startup.start_rho = float(str(self.start_rho_edit.text()))
self.start_rho_edit.setText('{0:.2f}'.format(self.occam_startup.start_rho))
def set_max_iter(self):
self.occam_startup.max_iter = int(str(self.max_iter_edit.text()))
self.max_iter_edit.setText('{0:.0f}'.format(self.occam_startup.max_iter))
def set_target_rms(self):
self.occam_startup.target_rms = float(str(self.target_rms_edit.text()))
self.target_rms_edit.setText('{0:.2f}'.format(self.occam_startup.target_rms))
def set_start_rough(self):
self.occam_startup.start_rough = float(str(self.start_roughness_edit.text()))
self.start_rough_edit.setText('{0:.2f}'.format(self.occam_startup.start_rough))
def set_start_lagrange(self):
self.occam_startup.start_lagrange = float(str(self.start_lagrange_edit.text()))
self.start_lagrange_edit.setText('{0:.2f}'.format(self.occam_startup.start_lagrange))
def _get_inv_folder(self):
"""
create an inversion folder for each run
"""
if self.save_dir is None:
dir_path = os.path.join(self.station_dir, self.data_mode)
if not os.path.isdir(dir_path):
os.mkdir(dir_path)
print('Made directory {0}'.format(dir_path))
dir_list = []
for roots, dirs, files in os.walk(dir_path):
dir_list.append(dirs)
inv_num = len(dir_list[0])+1
if self.occam_data.data_fn is None:
self.save_dir = os.path.join(self.station_dir, self.data_mode,
'Inv_{0:02}'.format(inv_num))
def run_occam(self):
"""
write all the needed files and run occam then plot
"""
self._get_inv_folder()
if not os.path.isdir(self.save_dir):
os.mkdir(self.save_dir)
print('Made directory {0}'.format(self.save_dir))
# write data file
if self.occam_data.data_fn is None:
self.occam_data.write_data_file(edi_file=self.edi_fn,
save_path=self.save_dir,
mode=self.data_mode,
res_err=self.res_err,
phase_err=self.phase_err,
thetar=self.rotation_angle)
else:
pass
# write model file
if self.occam_model.model_fn is None:
self.occam_model.write_model_file(save_path=self.save_dir)
# write startup file
self.occam_startup.data_fn = self.occam_data.data_fn
self.occam_startup.model_fn = self.occam_model.model_fn
self.occam_startup.write_startup_file(save_path=self.save_dir)
warning_txt = '\n'.join(['Cannot find Occam1D executable. ',
'Looked for {0}'.format(self.occam_exec),
'Click Occam1D button and rerun.'])
if not os.path.isfile(self.occam_exec):
QtWidgets.QMessageBox.warning(self, "Warning", warning_txt)
return
# run occam
occam_run = occam1d.Run(startup_fn=self.occam_startup.startup_fn,
occam_path=self.occam_exec,
mode=self.data_mode)
ini_resp_fn = os.path.join(self.save_dir,
'{0}_{1}.resp'.format(self.data_mode, 1))
ini_model_fn = os.path.join(self.save_dir,
'{0}_{1}.iter'.format(self.data_mode, 1))
ini_resp_fn = os.path.abspath(ini_resp_fn)
ini_model_fn = os.path.abspath(ini_model_fn)
self.mpl_widget.plot_data(data_fn=self.occam_data.data_fn,
resp_fn=ini_resp_fn,
iter_fn=ini_model_fn,
model_fn=self.occam_model.model_fn)
self.l2_widget.plot_l2(dir_path=self.save_dir,
model_fn=self.occam_model.model_fn)
# add iteration values to combo box
for ii in range(self.iter_combo_edit.count()):
self.iter_combo_edit.removeItem(0)
for ii in range(1, self.l2_widget.rms_arr.shape[0]):
self.iter_combo_edit.addItem(str(ii))
# resize the combo box to have width of max iteration
self.iter_combo_edit.resize(self.iter_combo_edit.size())
self.iter_combo_edit.update()
self.iter_combo_edit.repaint()
def on_click(self, event):
data_point = event.artist
iteration = data_point.get_xdata()[event.ind]
print('Picked iteration {0}'.format(iteration))
ini_resp_fn = os.path.join(self.save_dir,
'{0}_{1}.resp'.format(self.data_mode,
iteration))
ini_model_fn = os.path.join(self.save_dir,
'{0}_{1}.iter'.format(self.data_mode,
iteration))
def rewrite_data_file(self):
# write data file
nf = self.mpl_widget.data_obj.freq.shape[0]
mod_rho = np.zeros((nf, 2, 2))
mod_rho[:, 0, 1] = self.mpl_widget.data_obj.res_te[0]
mod_rho[:, 1, 0] = self.mpl_widget.data_obj.res_tm[0]
mod_rho_err = np.zeros((nf, 2, 2))
mod_rho_err[:, 0, 1] = self.mpl_widget.data_obj.res_te[1]
mod_rho_err[:, 1, 0] = self.mpl_widget.data_obj.res_tm[1]
mod_phi = np.zeros((nf, 2, 2))
mod_phi[:, 0, 1] = self.mpl_widget.data_obj.phase_te[0]
mod_phi[:, 1, 0] = self.mpl_widget.data_obj.phase_tm[0]
mod_phi_err = np.zeros((nf, 2, 2))
mod_phi_err[:, 0, 1] = self.mpl_widget.data_obj.phase_te[1]
mod_phi_err[:, 1, 0] = self.mpl_widget.data_obj.phase_tm[1]
mod_rp_tuple = (self.mpl_widget.data_obj.freq,
mod_rho,
mod_rho_err,
mod_phi,
mod_phi_err)
self.occam_data.write_data_file(rp_tuple=mod_rp_tuple,
save_path=self.save_dir,
mode=self.data_mode,
res_err='data',
phase_err='data',
thetar=0)
def run_occam_edits(self):
"""
write all the needed files and run occam then plot
"""
self._get_inv_folder()
if not os.path.isdir(self.save_dir):
os.mkdir(self.save_dir)
print('Made directory {0}'.format(self.save_dir))
self.rewrite_data_file()
# write model file
self.occam_model.write_model_file(save_path=self.save_dir)
# write startup file
self.occam_startup.data_fn = self.occam_data.data_fn
self.occam_startup.model_fn = self.occam_model.model_fn
self.occam_startup.write_startup_file(save_path=self.save_dir)
warning_txt = '\n'.join(['Cannot find Occam1D executable. ',
'Looked for {0}'.format(self.occam_exec),
'Click Occam1D button and rerun.'])
if not os.path.isfile(self.occam_exec):
QtWidgets.QMessageBox.warning(self, "Warning", warning_txt)
return
# run occam
occam_run = occam1d.Run(startup_fn=self.occam_startup.startup_fn,
occam_path=self.occam_exec,
mode=self.data_mode)
ini_resp_fn = os.path.join(self.save_dir,
'{0}_{1}.resp'.format(self.data_mode, 1))
ini_model_fn = os.path.join(self.save_dir,
'{0}_{1}.iter'.format(self.data_mode, 1))
ini_resp_fn = os.path.abspath(ini_resp_fn)
ini_model_fn = os.path.abspath(ini_model_fn)
self.mpl_widget.plot_data(data_fn=self.occam_data.data_fn,
resp_fn=ini_resp_fn,
iter_fn=ini_model_fn,
model_fn=self.occam_model.model_fn)
self.l2_widget.plot_l2(dir_path=self.save_dir,
model_fn=self.occam_model.model_fn)
# add iteration values to combo box
for ii in range(self.iter_combo_edit.count()):
self.iter_combo_edit.removeItem(0)
for ii in range(1, self.l2_widget.rms_arr.shape[0]):
self.iter_combo_edit.addItem(str(ii))
# resize the combo box to have width of max iteration
self.iter_combo_edit.resize(self.iter_combo_edit.size())
self.iter_combo_edit.update()
self.iter_combo_edit.repaint()
def set_iteration(self, text):
iteration = text
rms = self.l2_widget.rms_arr['rms'][int(iteration)-1]
roughness = self.l2_widget.rms_arr['roughness'][int(iteration)-1]
print('Iteration {0}, RMS={1:.2f}, Roughnes={2:.2f}'.format(
iteration, rms, roughness))
ini_resp_fn = os.path.join(self.save_dir,
'{0}_{1}.resp'.format(self.data_mode,
iteration))
ini_model_fn = os.path.join(self.save_dir,
'{0}_{1}.iter'.format(self.data_mode,
iteration))
ini_resp_fn = os.path.abspath(ini_resp_fn)
ini_model_fn = os.path.abspath(ini_model_fn)
self.mpl_widget.plot_data(data_fn=self.occam_data.data_fn,
resp_fn=ini_resp_fn,
iter_fn=ini_model_fn,
model_fn=self.occam_model.model_fn)
@QtCore.pyqtSlot(str)
def normal_output(self, message):
self.output_box.moveCursor(QtGui.QTextCursor.End)
self.output_box.insertPlainText(message)
#==============================================================================
# Mesh Plot
#==============================================================================
class OccamPlot(QtWidgets.QWidget):
"""
plotting the mesh
"""
def __init__(self):
super(OccamPlot, self).__init__()
self.subplot_wspace = .15
self.subplot_hspace = .2
self.subplot_right = .90
self.subplot_left = .085
self.subplot_top = .93
self.subplot_bottom = .1
self.fig = None
self.axr = None
self.axp = None
self.axm = None
self.res_limits = None
self.phase_limits = None
self.depth_scale = 'linear'
self.depth_units = 'km'
self.depth_limits = None
self.marker_data = 's'
self.marker_data_color = 'k'
self.marker_resp = 'h'
self.marker_resp_color = 'b'
self.marker_size = 2
self.lw = .75
self.ls = ':'
self.e_capthick = .75
self.e_capsize = 3
self.font_size = 8
self.data_obj = None
self.resp_obj = None
self.model_obj = None
self._ax = None
self._ax_index = None
self._err_list = []
self.setup_ui()
def setup_ui(self):
self.figure = Figure(dpi=200)
self.mpl_widget = FigureCanvas(self.figure)
#self.mpl_widget.setParent(self.central_widget)
self.mpl_widget.setFocusPolicy(QtCore.Qt.ClickFocus)
self.mpl_widget.setFocus()
# be able to edit the data
self.mpl_widget.mpl_connect('pick_event', self.on_pick)
self.mpl_widget.mpl_connect('axes_enter_event', self.in_axes)
self.figure.subplots_adjust(left=self.subplot_left,
right=self.subplot_right,
bottom=self.subplot_bottom,
top=self.subplot_top,
hspace=self.subplot_hspace,
wspace=self.subplot_wspace)
#make sure the figure takes up the entire plottable space
self.mpl_widget.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
# this is the Navigation widget
# it takes the Canvas widget and a parent
self.mpl_toolbar = NavigationToolbar(self.mpl_widget, self)
# set the layout for the plot
mpl_vbox = QtWidgets.QVBoxLayout()
mpl_vbox.addWidget(self.mpl_toolbar)
mpl_vbox.addWidget(self.mpl_widget)
self.setLayout(mpl_vbox)
self.mpl_widget.updateGeometry()
def plot_data(self, data_fn=None, resp_fn=None, model_fn=None,
iter_fn=None):
"""
plot response and depth model
"""
self.figure.clf()
d_kwargs = {'ls':self.ls,
'marker':self.marker_data,
'ms':self.marker_size,
'mfc':self.marker_data_color,
'mec':self.marker_data_color,
'color':self.marker_data_color,
'ecolor':self.marker_data_color,
'picker':2,
'lw':self.lw,
'elinewidth':self.lw,
'capsize':self.e_capsize,
'capthick':self.e_capthick}
r_kwargs = {'ls':self.ls,
'marker':self.marker_resp,
'ms':self.marker_size,
'mfc':self.marker_resp_color,
'mec':self.marker_resp_color,
'color':self.marker_resp_color,
'ecolor':self.marker_resp_color,
'picker':2,
'lw':self.lw,
'elinewidth':self.lw,
'capsize':self.e_capsize,
'capthick':self.e_capthick}
#make a grid of subplots
gs=gridspec.GridSpec(6, 5, hspace=self.subplot_hspace,
wspace=self.subplot_wspace)
#subplot resistivity
self.axr = self.figure.add_subplot(gs[:4, :4])
#subplot for phase
self.axp = self.figure.add_subplot(gs[4:,:4], sharex=self.axr)
#subplot for model
self.axm = self.figure.add_subplot(gs[:, 4])
#-----------------------------------------------------------------
#--> plot data apparent resistivity and phase-------------------------
if data_fn is not None:
d1 = occam1d.Data()
d1.read_data_file(data_fn)
#--> cut out missing data
rxy = np.where(d1.res_te[0] != 0)[0]
#--> TE mode Data
if len(rxy) > 0:
rte = self.axr.errorbar(1./d1.freq[rxy],
d1.res_te[0][rxy],
yerr=d1.res_te[1][rxy],
**d_kwargs)
#legend_marker_list_te.append(rte[0])
#legend_label_list_te.append('$Obs_{TE}$')
else:
pass
#--> cut out missing data
ryx = np.where(d1.res_tm[0] != 0)[0]
#--> TE mode Data
if len(ryx) > 0:
rtm = self.axr.errorbar(1./d1.freq[ryx],
d1.res_tm[0][ryx],
yerr=d1.res_tm[1][ryx],
**d_kwargs)
#legend_marker_list_te.append(rte[0])
#legend_label_list_te.append('$Obs_{TE}$')
else:
pass
#--------------------plot phase--------------------------------
#cut out missing data points first
pxy = np.where(d1.phase_te[0]!=0)[0]
#--> TE mode data
if len(pxy) > 0:
self.axp.errorbar(1./d1.freq[pxy],
d1.phase_te[0][pxy],
yerr=d1.phase_te[1][pxy],
**d_kwargs)
else:
pass
#cut out missing data points first
pyx = np.where(d1.phase_tm[0]!=0)[0]
#--> TE mode data
if len(pyx) > 0:
self.axp.errorbar(1./d1.freq[pyx],
d1.phase_tm[0][pyx],
yerr=d1.phase_tm[1][pyx],
**d_kwargs)
else:
pass
self.data_obj = occam1d.Data()
self.data_obj.read_data_file(data_fn)
#--> cut out missing data
rxy = np.where(self.data_obj.res_te[0] != 0)[0]
#--> TE mode Data
if len(rxy) > 0:
rte = self.axr.errorbar(1./self.data_obj.freq[rxy],
self.data_obj.res_te[0][rxy],
yerr=self.data_obj.res_te[1][rxy],
**d_kwargs)
#legend_marker_list_te.append(rte[0])
#legend_label_list_te.append('$Obs_{TE}$')
self._err_list.append([rte[1][0], rte[1][1], rte[2][0]])
else:
pass
#self._err_list.append([None, None, None])
#--> cut out missing data
ryx = np.where(self.data_obj.res_tm[0] != 0)[0]
#--> TE mode Data
if len(ryx) > 0:
rtm = self.axr.errorbar(1./self.data_obj.freq[ryx],
self.data_obj.res_tm[0][ryx],
yerr=self.data_obj.res_tm[1][ryx],
**d_kwargs)
#legend_marker_list_te.append(rte[0])
#legend_label_list_te.append('$Obs_{TE}$')
self._err_list.append([rtm[1][0], rtm[1][1], rtm[2][0]])
else:
pass
#self._err_list.append([None, None, None])
#--------------------plot phase--------------------------------
#cut out missing data points first
pxy = np.where(self.data_obj.phase_te[0]!=0)[0]
#--> TE mode data
if len(pxy) > 0:
pte =self.axp.errorbar(1./self.data_obj.freq[pxy],
self.data_obj.phase_te[0][pxy],
yerr=self.data_obj.phase_te[1][pxy],
**d_kwargs)
self._err_list.append([pte[1][0], pte[1][1], pte[2][0]])
else:
pass
#self._err_list.append([None, None, None])
#cut out missing data points first
pyx = np.where(self.data_obj.phase_tm[0]!=0)[0]
#--> TE mode data
if len(pyx) > 0:
ptm = self.axp.errorbar(1./self.data_obj.freq[pyx],
self.data_obj.phase_tm[0][pyx],
yerr=self.data_obj.phase_tm[1][pyx],
**d_kwargs)
self._err_list.append([ptm[1][0], ptm[1][1], ptm[2][0]])
else:
pass
#self._err_list.append([None, None, None])
#-----------------------------------------------------------------
#--> plot data apparent resistivity and phase-------------------------
if resp_fn is not None:
r1 = occam1d.Data()
r1.read_resp_file(resp_fn, data_fn=data_fn)
#--> cut out missing data
rxy = np.where(r1.res_te[2] != 0)[0]
#--> TE mode Data
if len(rxy) > 0:
rte = self.axr.errorbar(1./r1.freq[rxy],
r1.res_te[2][rxy],
yerr=None,
**r_kwargs)
self.resp_obj = occam1d.Data()
self.resp_obj.read_resp_file(resp_fn, data_fn=data_fn)
#--> cut out missing data
ryx = np.where(self.resp_obj.res_tm[2] != 0)[0]
#--> TE mode Data
if len(ryx) > 0:
rtmr = self.axr.errorbar(1./self.resp_obj.freq[ryx],
self.resp_obj.res_tm[2][ryx],
yerr=None,
**r_kwargs)
#--------------------plot phase--------------------------------
#cut out missing data points first
pxy = np.where(self.resp_obj.phase_te[2]!=0)[0]
#--> TE mode data
if len(pxy) > 0:
self.axp.errorbar(1./self.resp_obj.freq[pxy],
self.resp_obj.phase_te[2][pxy],
yerr=None,
**r_kwargs)
else:
pass
#cut out missing data points first
pyx = np.where(self.resp_obj.phase_tm[2]!=0)[0]
#--> TE mode data
if len(pyx) > 0:
self.axp.errorbar(1./self.resp_obj.freq[pyx],
self.resp_obj.phase_tm[2][pyx],
yerr=None,
**r_kwargs)
else:
pass
#--> set axis properties-----------------------------------------------
x_limits = (10**np.floor(np.log10(1. / self.data_obj.freq.max())),
10**np.ceil(np.log10(1. / self.data_obj.freq.min())))
self.axr.set_xscale('log', nonposx='clip')
self.axp.set_xscale('log', nonposx='clip')
self.axr.set_yscale('log', nonposy='clip')
self.axr.set_xlim(x_limits)
self.axp.set_xlim(x_limits)
self.axr.grid(True, alpha=.75, which='both',
color=(.75, .75, .75))
plt.setp(self.axr.xaxis.get_ticklabels(),visible=False)
self.axp.grid(True, alpha=.75, which='both',
color=(.75, .75, .75))
#self.axp.yaxis.set_major_locator(MultipleLocator(15))
#self.axp.yaxis.set_minor_locator(MultipleLocator(3))
if self.res_limits is not None:
self.axr.set_ylim(self.res_limits)
if self.phase_limits is not None:
self.axp.set_ylim(self.phase_limits)
self.axr.set_ylabel('App. Res. ($\Omega \cdot m$)',
fontdict={'size':self.font_size,'weight':'bold'})
self.axp.set_ylabel('Phase (deg)',
fontdict={'size':self.font_size,'weight':'bold'})
self.axp.set_xlabel('Period (s)',
fontdict={'size':self.font_size,'weight':'bold'})
#plt.suptitle(self.title_str,fontsize=self.font_size+2,fontweight='bold')
for ax in [self.axr, self.axp, self.axm]:
ax.tick_params(axis='both', which='major',
labelsize=self.font_size-2)
#--> plot depth model--------------------------------------------------
if model_fn is not None:
if self.depth_units == 'km':
dscale = 1000.
else:
dscale = 1.
#--> plot te models
self.model_obj = occam1d.Model()
self.model_obj.read_iter_file(iter_fn, model_fn)
plot_depth = self.model_obj.model_depth[1:]/dscale
plot_model = abs(10**self.model_obj.model_res[1:,1])
self.axm.semilogx(plot_model[::-1],
plot_depth[::-1],
ls='steps-',
color='b',
lw=self.lw)
if self.depth_limits == None:
dmin = min(plot_depth)
if dmin == 0:
dmin = 1
dmax = max(plot_depth)
self.depth_limits = (dmin, dmax)
if max(self.depth_limits) > plot_depth.max():
if self.depth_scale == 'log':
self.axm.set_ylim(ymin=max(self.depth_limits)/dscale,
ymax=max([1, min(self.depth_limits)/dscale]))
else:
self.axm.set_ylim(ymin=max(self.depth_limits)/dscale,
ymax=min(self.depth_limits)/dscale)
else:
if self.depth_scale == 'log':
self.axm.set_ylim(ymin=max(self.depth_limits),
ymax=max([1, min(self.depth_limits)]))
else:
self.axm.set_ylim(ymin=max(self.depth_limits),
ymax=min(self.depth_limits))
if self.depth_scale == 'log':
self.axm.set_yscale('log', nonposy='clip')
self.axm.set_ylabel('Depth ({0})'.format(self.depth_units),
fontdict={'size':self.font_size,'weight':'bold'})
self.axm.set_xlabel('Resistivity ($\Omega \cdot m$)',
fontdict={'size':self.font_size,'weight':'bold'})
self.axm.grid(True, which='both', alpha=.75, color=(.75, .75, .75))
self.axm.yaxis.set_label_position('right')
self.axm.yaxis.tick_right()
self.mpl_widget.draw()
def on_pick(self, event):
"""
edit data
"""
data_point = event.artist
data_period = data_point.get_xdata()[event.ind]
data_value = data_point.get_ydata()[event.ind]
p_index = np.where(1./self.data_obj.freq==data_period)[0][0]
# left click remove a point
if event.mouseevent.button == 1:
# editing resistivity
if self._ax_index == 0:
self.data_obj.res_te[0, p_index] = 0.0
self.data_obj.res_tm[0, p_index] = 0.0
if self._ax_index == 1:
self.data_obj.phase_te[0, p_index] = 0.0
self.data_obj.phase_tm[0, p_index] = 0.0
self._ax.plot(data_period, data_value,
color=(.7, .7, .7),
marker=self.marker_data,
ms=self.marker_size*2)
# right click change error bars
if event.mouseevent.button == 3:
# editing resistivity
if self._ax_index == 0:
te_err = self.data_obj.res_te[1, p_index]
tm_err = self.data_obj.res_tm[1, p_index]
self.data_obj.res_te[1, p_index] = te_err+0.2*te_err
self.data_obj.res_tm[1, p_index] = tm_err+0.2*tm_err
if self.data_obj.res_te[1, p_index] != 0:
print('Res err changed to: {0:.2f}'.format(
self.data_obj.res_te[1, p_index]))
if self.data_obj.res_tm[1, p_index] != 0:
print('Res err changed to: {0:.2f}'.format(
self.data_obj.res_tm[1, p_index]))
# make error bar array
eb = self._err_list[self._ax_index][2].get_paths()[p_index].vertices
# make ecap array
ecap_l = self._err_list[self._ax_index][0].get_data()[1][p_index]
ecap_u = self._err_list[self._ax_index][1].get_data()[1][p_index]
# change apparent resistivity error
neb_u = eb[0,1]-.1*abs(eb[0,1])
neb_l = eb[1,1]+.1*abs(eb[1,1])
ecap_l = ecap_l-.1*abs(ecap_l)
ecap_u = ecap_u+.1*abs(ecap_u)
#set the new error bar values
eb[0,1] = neb_u
eb[1,1] = neb_l
#reset the error bars and caps
ncap_l = self._err_list[self._ax_index][0].get_data()
ncap_u = self._err_list[self._ax_index][1].get_data()
ncap_l[1][p_index] = ecap_l
ncap_u[1][p_index] = ecap_u
#set the values
self._err_list[self._ax_index][0].set_data(ncap_l)
self._err_list[self._ax_index][1].set_data(ncap_u)
self._err_list[self._ax_index][2].get_paths()[p_index].vertices = eb
if self._ax_index == 1:
te_err = self.data_obj.phase_te[1, p_index]
tm_err = self.data_obj.phase_tm[1, p_index]
self.data_obj.phase_te[1, p_index] = te_err+te_err*.05
self.data_obj.phase_tm[1, p_index] = tm_err+tm_err*.05
if self.data_obj.phase_te[1, p_index] != 0:
print('Phase err changed to: {0:.2f}'.format(
self.data_obj.phase_te[1, p_index]))
if self.data_obj.phase_tm[1, p_index] != 0:
print('Phase err changed to: {0:.2f}'.format(
self.data_obj.phase_tm[1, p_index]))
# make error bar array
eb = self._err_list[self._ax_index][2].get_paths()[p_index].vertices
# make ecap array
ecap_l = self._err_list[self._ax_index][0].get_data()[1][p_index]
ecap_u = self._err_list[self._ax_index][1].get_data()[1][p_index]
# change apparent phase error
neb_u = eb[0,1]-.025*abs(eb[0,1])
neb_l = eb[1,1]+.025*abs(eb[1,1])
ecap_l = ecap_l-.025*abs(ecap_l)
ecap_u = ecap_u+.025*abs(ecap_u)
#set the new error bar values
eb[0,1] = neb_u
eb[1,1] = neb_l
#reset the error bars and caps
ncap_l = self._err_list[self._ax_index][0].get_data()
ncap_u = self._err_list[self._ax_index][1].get_data()
ncap_l[1][p_index] = ecap_l
ncap_u[1][p_index] = ecap_u
#set the values
self._err_list[self._ax_index][0].set_data(ncap_l)
self._err_list[self._ax_index][1].set_data(ncap_u)
self._err_list[self._ax_index][2].get_paths()[p_index].vertices = eb
# be sure to draw the adjustments
self._ax.figure.canvas.draw()
def in_axes(self, event):
for ax_index, ax in enumerate([self.axr, self.axp, self.axm]):
if event.inaxes == ax:
self._ax_index = ax_index
self._ax = ax
#==============================================================================
# plot L2
#==============================================================================
class PlotL2(QtWidgets.QWidget):
"""
plot the l2 curve and it will be pickable for each iteration
"""
def __init__(self, dir_path=None, model_fn=None):
super(PlotL2, self).__init__()
self.dir_path = dir_path
self.model_fn = model_fn
self.fig_dpi = 200
self.font_size = 8
self.subplot_right = .90
self.subplot_left = .085
self.subplot_top = .86
self.subplot_bottom = .15
self.rms_lw = 1
self.rms_marker = 'd'
self.rms_color = 'k'
self.rms_marker_size = 5
self.rms_median_color = 'red'
self.rms_mean_color = 'orange'
self.rough_lw = .75
self.rough_marker = 's'
self.rough_color = 'b'
self.rough_marker_size = 3
self.rough_font_size = 8
self.int = 1
self.setup_ui()
if self.dir_path is not None:
self.plot_l2()
def setup_ui(self):
self.figure = Figure(dpi=200)
self.l2_widget = FigureCanvas(self.figure)
#self.l2_widget.mpl_connect('pick event', self.on_click)
self.figure.subplots_adjust(left=self.subplot_left,
right=self.subplot_right,
bottom=self.subplot_bottom,
top=self.subplot_top)
#make sure the figure takes up the entire plottable space
self.l2_widget.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
# this is the Navigation widget
# it takes the Canvas widget and a parent
self.mpl_toolbar = NavigationToolbar(self.l2_widget, self)
# set the layout for the plot
mpl_vbox = QtWidgets.QVBoxLayout()
mpl_vbox.addWidget(self.mpl_toolbar)
mpl_vbox.addWidget(self.l2_widget)
self.setLayout(mpl_vbox)
self.l2_widget.updateGeometry()
def _get_iter_list(self):
"""
get all iteration files in dir_path
"""
if os.path.isdir(self.dir_path) == False:
raise IOError('Could not find {0}'.format(self.dir_path))
iter_list = [os.path.join(self.dir_path, fn)
for fn in os.listdir(self.dir_path)
if fn.find('.iter')>0]
self.rms_arr = np.zeros(len(iter_list),
dtype=np.dtype([('iteration', np.int),
('rms', np.float),
('roughness', np.float)]))
for ii, fn in enumerate(iter_list):
m1 = occam1d.Model()
m1.read_iter_file(fn, self.model_fn)
self.rms_arr[ii]['iteration'] = int(m1.itdict['Iteration'])
self.rms_arr[ii]['rms'] = float(m1.itdict['Misfit Value'])
self.rms_arr[ii]['roughness'] = float(m1.itdict['Roughness Value'])
self.rms_arr.sort(order='iteration')
def plot_l2(self, dir_path=None, model_fn=None):
"""
plot l2 curve rms vs iteration
"""
self.figure.clf()
if dir_path is not None:
self.dir_path = dir_path
if model_fn is not None:
self.model_fn = model_fn
self._get_iter_list()
nr = self.rms_arr.shape[0]
med_rms = np.median(self.rms_arr['rms'][1:])
mean_rms = np.mean(self.rms_arr['rms'][1:])
#set the dimesions of the figure
plt.rcParams['font.size'] = self.font_size
plt.rcParams['figure.subplot.left'] = self.subplot_left
plt.rcParams['figure.subplot.right'] = self.subplot_right
plt.rcParams['figure.subplot.bottom'] = self.subplot_bottom
plt.rcParams['figure.subplot.top'] = self.subplot_top
#make figure instance
#make a subplot for RMS vs Iteration
self.ax1 = self.figure.add_subplot(1, 1, 1)
#plot the rms vs iteration
l1, = self.ax1.plot(self.rms_arr['iteration'],
self.rms_arr['rms'],
'-k',
lw=1,
marker='d',
ms=5,
picker=3)
#plot the median of the RMS
m1, = self.ax1.plot(self.rms_arr['iteration'],
np.repeat(med_rms, nr),
ls='--',
color=self.rms_median_color,
lw=self.rms_lw*.75)
#plot the mean of the RMS
m2, = self.ax1.plot(self.rms_arr['iteration'],
np.repeat(mean_rms, nr),
ls='--',
color=self.rms_mean_color,
lw=self.rms_lw*.75)
self.ax2 = self.ax1.twinx()
l2, = self.ax2.plot(self.rms_arr['iteration'],
self.rms_arr['roughness'],
ls='-',
color=self.rough_color,
lw=self.rough_lw,
marker=self.rough_marker,
ms=self.rough_marker_size,
mfc=self.rough_color)
#make a legend
self.figure.legend([l1, l2, m1, m2],
['RMS', 'Roughness',
'Median_RMS={0:.2f}'.format(med_rms),
'Mean_RMS={0:.2f}'.format(mean_rms)],
ncol=4,
loc='upper center',
columnspacing=.25,
markerscale=.75,
handletextpad=.15,
borderaxespad=.02,
prop={'size':self.font_size})
#set the axis properties for RMS vs iteration
# self.ax1.yaxis.set_minor_locator(MultipleLocator(.1))
self.ax1.xaxis.set_minor_locator(MultipleLocator(1))
self.ax1.set_ylabel('RMS',
fontdict={'size':self.font_size+2,
'weight':'bold'})
self.ax1.set_xlabel('Iteration',
fontdict={'size':self.font_size+2,
'weight':'bold'})
self.ax1.grid(alpha=.25, which='both', lw=self.rough_lw)
self.ax2.set_ylabel('Roughness',
fontdict={'size':self.font_size+2,
'weight':'bold',
'color':self.rough_color})
self.ax1.set_ylim(np.floor(self.rms_arr['rms'][1:].min()),
np.ceil(self.rms_arr['rms'][1:].max()))
self.ax2.set_ylim(np.floor(self.rms_arr['roughness'][1:].min()),
np.ceil(self.rms_arr['roughness'][1:].max()))
for t2 in self.ax2.get_yticklabels():
t2.set_color(self.rough_color)
self.l2_widget.draw()
def on_click(self, event):
data_point = event.artist
iteration = data_point.get_xdata()[event.ind]
print(iteration)
#==============================================================================
# Main execution
#==============================================================================
def main():
#if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
ui = Occam1D_GUI()
ui.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
MTgeophysics/mtpy
|
mtpy/gui/occam1d_gui_qt5.py
|
Python
|
gpl-3.0
| 59,895
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Test unit for the miscutil/sequtils module. """
from invenio.testutils import InvenioTestCase
try:
from mock import patch
HAS_MOCK = True
except ImportError:
HAS_MOCK = False
from invenio.testutils import make_test_suite, run_test_suite
from invenio.dbquery import run_sql
from invenio.sequtils import SequenceGenerator
from invenio.sequtils_texkey import TexkeySeq, TexkeyNoAuthorError, \
task_run_core, TexkeyNoYearError
def get_bibrecord_mock(_):
return {'001': [([], ' ', ' ', '1086086', 1)],
'111': [([('a',
'Mock conference'),
('d', '14-16 Sep 2011'),
('x', '2050-09-14'),
('c', 'xxxxx')],
' ',
' ',
'',
3)],
'270': [([('m', 'dummy@dummy.com')], ' ', ' ', '', 5)],
'856': [([('u', 'http://dummy.com/')], '4', ' ', '', 6)],
'970': [([('a', 'CONF-XXXXXX')], ' ', ' ', '', 2)],
'980': [([('a', 'CONFERENCES')], ' ', ' ', '', 7)]}
def get_sample_texkey(num_test=0):
sample_records = []
xml = """
<record>
<datafield tag="100" ind1=" " ind2=" ">
<subfield code="a">Boyle, P.A.</subfield>
</datafield>
<datafield tag="700" ind1=" " ind2=" ">
<subfield code="a">Christ, N.H.</subfield>
</datafield>
<datafield tag="269" ind1=" " ind2=" ">
<subfield code="c">2012-12-06</subfield>
</datafield>
</record>
"""
sample_records.append(xml)
xml = """
<record>
<datafield tag="100" ind1=" " ind2=" ">
<subfield code="a">Broekhoven-Fiene, Hannah</subfield>
</datafield>
<datafield tag="700" ind1=" " ind2=" ">
<subfield code="a">Matthews, Brenda C.</subfield>
</datafield>
<datafield tag="269" ind1=" " ind2=" ">
<subfield code="c">2012-12-06</subfield>
</datafield>
</record>
"""
sample_records.append(xml)
xml = """
<record>
<datafield tag="269" ind1=" " ind2=" ">
<subfield code="b">CERN</subfield>
<subfield code="a">Geneva</subfield>
<subfield code="c">2012-11-06</subfield>
</datafield>
<datafield tag="300" ind1=" " ind2=" ">
<subfield code="a">16</subfield>
</datafield>
<datafield tag="710" ind1=" " ind2=" ">
<subfield code="g">ATLAS Collaboration</subfield>
</datafield>
</record>
"""
sample_records.append(xml)
xml = """
<record>
<datafield tag="269" ind1=" " ind2=" ">
<subfield code="b">CERN</subfield>
<subfield code="a">Geneva</subfield>
<subfield code="c">2012-11-06</subfield>
</datafield>
<datafield tag="300" ind1=" " ind2=" ">
<subfield code="a">16</subfield>
</datafield>
</record>
"""
sample_records.append(xml)
xml = """
<record>
<datafield tag="100" ind1=" " ind2=" ">
<subfield code="a">Broekhoven-Fiene, Hannah</subfield>
</datafield>
<datafield tag="700" ind1=" " ind2=" ">
<subfield code="a">Matthews, Brenda C.</subfield>
</datafield>
</record>
"""
sample_records.append(xml)
return sample_records[num_test]
class IntSeq(SequenceGenerator):
seq_name = 'test_int'
def _next_value(self, x):
return x + 1
class TestIntSequenceGeneratorClass(InvenioTestCase):
def test_sequence_next_int(self):
int_seq = IntSeq()
next_int = int_seq.next_value(1)
self.assertEqual(next_int, 2)
# Check if the value was stored in the DB
res = run_sql("""SELECT seq_value FROM seqSTORE
WHERE seq_value=%s AND seq_name=%s""",
(2, int_seq.seq_name))
self.assertEqual(int(res[0][0]), 2)
# Clean DB entries
run_sql(""" DELETE FROM seqSTORE WHERE seq_name="test_int" """)
class TestCnumSequenceGeneratorClass(InvenioTestCase):
if HAS_MOCK:
@patch('invenio.bibedit_utils.get_bibrecord',
get_bibrecord_mock)
def test_get_next_cnum(self):
from invenio.sequtils_cnum import CnumSeq
cnum_seq = CnumSeq()
res = cnum_seq.next_value('xx')
self.assertEqual(res, 'C50-09-14')
res = cnum_seq.next_value('xx')
self.assertEqual(res, 'C50-09-14.1')
# Clean DB entries
run_sql(""" DELETE FROM seqSTORE
WHERE seq_name="cnum"
AND seq_value IN ("C50-09-14", "C50-09-14.1") """)
class TestTexkeySequenceGeneratorClass(InvenioTestCase):
def setUp(self):
self.texkey1 = ""
self.texkey2 = ""
self.texkey3 = ""
def test_get_next_texkey1(self):
""" Generate the first texkey """
texkey_seq = TexkeySeq()
self.texkey1 = texkey_seq.next_value(xml_record=get_sample_texkey(0))
self.assertEqual(self.texkey1[:-3], 'Boyle:2012')
def test_get_next_texkey2(self):
""" Generate the second texkey """
texkey_seq = TexkeySeq()
self.texkey2 = texkey_seq.next_value(xml_record=get_sample_texkey(1))
self.assertEqual(self.texkey2[:-3], 'Broekhoven-Fiene:2012')
def test_get_next_texkey3(self):
""" Generate the third texkey """
texkey_seq = TexkeySeq()
self.texkey3 = texkey_seq.next_value(xml_record=get_sample_texkey(2))
self.assertEqual(self.texkey3[:-3], 'ATLAS:2012')
def test_get_next_texkey_no_author(self):
""" Generate an error while getting a texkey with no author """
texkey_seq = TexkeySeq()
self.assertRaises(TexkeyNoAuthorError,
texkey_seq.next_value, xml_record=get_sample_texkey(3))
def test_get_next_texkey_no_year(self):
""" Generate an error while getting a texkey with no year """
texkey_seq = TexkeySeq()
self.assertRaises(TexkeyNoYearError,
texkey_seq.next_value, xml_record=get_sample_texkey(4))
def tearDown(self):
# Clean DB entries
run_sql(""" DELETE FROM seqSTORE
WHERE seq_name="texkey"
AND seq_value IN ("%s", "%s", "%s") """ % (self.texkey1,
self.texkey2,
self.texkey3))
class TestTexkeydaemonClass(InvenioTestCase):
def test_task_run_core(self):
""" Basic task_run_core check """
task_run_core()
TEST_SUITE = make_test_suite(TestIntSequenceGeneratorClass,
TestCnumSequenceGeneratorClass,
TestTexkeySequenceGeneratorClass,
TestTexkeydaemonClass)
if __name__ == "__main__":
run_test_suite(TEST_SUITE, warn_user=True)
|
CERNDocumentServer/invenio
|
modules/miscutil/lib/sequtils_regression_tests.py
|
Python
|
gpl-2.0
| 7,816
|
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests.mock import patch
from ansible.modules.network.onyx import onyx_linkagg
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxLinkaggModule(TestOnyxModule):
module = onyx_linkagg
def setUp(self):
super(TestOnyxLinkaggModule, self).setUp()
self.mock_get_config = patch.object(
onyx_linkagg.OnyxLinkAggModule,
"_get_port_channels")
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
self.mock_get_version = patch.object(
onyx_linkagg.OnyxLinkAggModule, "_get_os_version")
self.get_version = self.mock_get_version.start()
def tearDown(self):
super(TestOnyxLinkaggModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
self.mock_get_version.stop()
def load_fixture(self, config_file):
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
self.get_version.return_value = "3.6.5000"
def load_port_channel_fixture(self):
config_file = 'onyx_port_channel_show.cfg'
self.load_fixture(config_file)
def load_mlag_port_channel_fixture(self):
config_file = 'onyx_mlag_port_channel_show.cfg'
self.load_fixture(config_file)
def test_port_channel_no_change(self):
set_module_args(dict(name='Po22', state='present',
members=['Eth1/7']))
self.load_port_channel_fixture()
self.execute_module(changed=False)
def test_port_channel_remove(self):
set_module_args(dict(name='Po22', state='absent'))
self.load_port_channel_fixture()
commands = ['no interface port-channel 22']
self.execute_module(changed=True, commands=commands)
def test_port_channel_add(self):
set_module_args(dict(name='Po23', state='present',
members=['Eth1/8']))
self.load_port_channel_fixture()
commands = ['interface port-channel 23', 'exit',
'interface ethernet 1/8 channel-group 23 mode on']
self.execute_module(changed=True, commands=commands)
def test_port_channel_add_member(self):
set_module_args(dict(name='Po22', state='present',
members=['Eth1/7', 'Eth1/8']))
self.load_port_channel_fixture()
commands = ['interface ethernet 1/8 channel-group 22 mode on']
self.execute_module(changed=True, commands=commands)
def test_port_channel_remove_member(self):
set_module_args(dict(name='Po22', state='present'))
self.load_port_channel_fixture()
commands = ['interface ethernet 1/7 no channel-group']
self.execute_module(changed=True, commands=commands)
def test_mlag_port_channel_no_change(self):
set_module_args(dict(name='Mpo33', state='present',
members=['Eth1/8']))
self.load_mlag_port_channel_fixture()
self.execute_module(changed=False)
def test_mlag_port_channel_remove(self):
set_module_args(dict(name='Mpo33', state='absent'))
self.load_mlag_port_channel_fixture()
commands = ['no interface mlag-port-channel 33']
self.execute_module(changed=True, commands=commands)
def test_mlag_port_channel_add(self):
set_module_args(dict(name='Mpo34', state='present',
members=['Eth1/9']))
self.load_mlag_port_channel_fixture()
commands = ['interface mlag-port-channel 34', 'exit',
'interface ethernet 1/9 mlag-channel-group 34 mode on']
self.execute_module(changed=True, commands=commands)
def test_mlag_port_channel_add_member(self):
set_module_args(dict(name='Mpo33', state='present',
members=['Eth1/8', 'Eth1/9']))
self.load_mlag_port_channel_fixture()
commands = ['interface ethernet 1/9 mlag-channel-group 33 mode on']
self.execute_module(changed=True, commands=commands)
def test_mlag_port_channel_remove_member(self):
set_module_args(dict(name='Mpo33', state='present'))
self.load_mlag_port_channel_fixture()
commands = ['interface ethernet 1/8 no mlag-channel-group']
self.execute_module(changed=True, commands=commands)
|
drmrd/ansible
|
test/units/modules/network/onyx/test_onyx_linkagg.py
|
Python
|
gpl-3.0
| 4,801
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Module tests."""
from __future__ import absolute_import, print_function
from click.testing import CliRunner
from invenio_accounts.cli import roles_add, roles_create, roles_remove, \
users_activate, users_create, users_deactivate
def test_cli_createuser(script_info):
"""Test create user CLI."""
runner = CliRunner()
# Missing params
result = runner.invoke(
users_create, input='1234\n1234\n', obj=script_info)
assert result.exit_code != 0
# Create user
result = runner.invoke(
users_create,
['info@invenio-software.org', '--password', '123456'],
obj=script_info
)
assert result.exit_code == 0
def test_cli_createrole(script_info):
"""Test create user CLI."""
runner = CliRunner()
# Missing params
result = runner.invoke(
roles_create, ['-d', 'Test description'],
obj=script_info)
assert result.exit_code != 0
# Create role
result = runner.invoke(
roles_create,
['superusers', '-d', 'Test description'],
obj=script_info)
assert result.exit_code == 0
def test_cli_addremove_role(script_info):
"""Test add/remove role."""
runner = CliRunner()
# Create a user and a role
result = runner.invoke(
users_create,
['a@example.org', '--password', '123456'],
obj=script_info
)
assert result.exit_code == 0
result = runner.invoke(roles_create, ['superuser'], obj=script_info)
assert result.exit_code == 0
# User not found
result = runner.invoke(
roles_add, ['inval@example.org', 'superuser'],
obj=script_info)
assert result.exit_code != 0
# Add:
result = runner.invoke(
roles_add, ['a@example.org', 'invalid'],
obj=script_info)
assert result.exit_code != 0
result = runner.invoke(
roles_remove, ['inval@example.org', 'superuser'],
obj=script_info)
assert result.exit_code != 0
# Remove:
result = runner.invoke(
roles_remove, ['a@example.org', 'invalid'],
obj=script_info)
assert result.exit_code != 0
result = runner.invoke(
roles_remove, ['b@example.org', 'superuser'],
obj=script_info)
assert result.exit_code != 0
result = runner.invoke(
roles_remove, ['a@example.org', 'superuser'],
obj=script_info)
assert result.exit_code != 0
# Add:
result = runner.invoke(roles_add,
['a@example.org', 'superuser'],
obj=script_info)
assert result.exit_code == 0
result = runner.invoke(
roles_add,
['a@example.org', 'superuser'],
obj=script_info)
assert result.exit_code != 0
# Remove:
result = runner.invoke(
roles_remove, ['a@example.org', 'superuser'],
obj=script_info)
assert result.exit_code == 0
def test_cli_activate_deactivate(script_info):
"""Test create user CLI."""
runner = CliRunner()
# Create a user
result = runner.invoke(
users_create,
['a@example.org', '--password', '123456'],
obj=script_info
)
assert result.exit_code == 0
# Activate
result = runner.invoke(users_activate, ['in@valid.org'],
obj=script_info)
assert result.exit_code != 0
result = runner.invoke(users_deactivate, ['in@valid.org'],
obj=script_info)
assert result.exit_code != 0
result = runner.invoke(users_activate, ['a@example.org'],
obj=script_info)
assert result.exit_code == 0
result = runner.invoke(users_activate, ['a@example.org'],
obj=script_info)
assert result.exit_code == 0
# Deactivate
result = runner.invoke(users_deactivate,
['a@example.org'], obj=script_info)
assert result.exit_code == 0
result = runner.invoke(users_deactivate,
['a@example.org'], obj=script_info)
assert result.exit_code == 0
|
otron/invenio-accounts
|
tests/test_cli.py
|
Python
|
gpl-2.0
| 5,023
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "prueba.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
aztrock/django-host-extend
|
manage.py
|
Python
|
gpl-2.0
| 249
|
'''
/******************************************************************
*
* Copyright 2018 Samsung Electronics All Rights Reserved.
*
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
******************************************************************/
'''
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import io
import os
import java
import time
import re
import commands
import signal
import glob
import shutil
import string
import random
import datetime
import subprocess
import traceback
import threading
import datetime
from time import gmtime, strftime
sys.path.append("./DeviceLib.jar")
from robot.api import logger
from org.iotivity.test.rfw.common.devicecontroller import *
from org.iotivity.test.rfw.common.devicetestlib import *
from org.iotivity.test.rfw.common.devicecontroller.datamodel import *
from subprocess import Popen, PIPE
from xml.etree.ElementTree import ElementTree, ParseError
class Execution_Status(object):
INIT = 0
BUILD_IOTIVITY_FAIL = 2
BUILD_TEST_FAIL = 4
TEST_PRE_CONDITION_PASS = 5
TEST_PRE_CONDITION_FAIL = 6
BUILD_IOTIVITY = 'build_iotivity'
BUILD_TEST = 'build_test'
TEST_PRE_CONDITION = 'test_pre_condition'
PASS_STATUS_INDEX = 0
FAIL_STATUS_INDEX = PASS_STATUS_INDEX + 1
TIMEOUT_STATUS_INDEX = FAIL_STATUS_INDEX + 1
UNEXPECTED_STATUS_INDEX = TIMEOUT_STATUS_INDEX + 1
TOTAL_STATUS_INDEX = UNEXPECTED_STATUS_INDEX + 1
class Simulator(object):
def __init__(self, os_type, device_name, app_name, cmd_dir, run_command, expected_log, app_command):
self.os_type = os_type
self.device_name = device_name
self.app_name = app_name
self.cmd_dir = cmd_dir
self.run_command = run_command
self.expected_log = expected_log
self.app_command = app_command
self.log_saved = False
self.log = ''
self.status = False
class DeviceControlKeyword(object):
def __init__(self):
self._result = ''
self.multiDeviceManager = MultiDeviceManager.getInstance()
self.logManager = LogManager.getInstance()
self.build_commands = {}
self.log_fp = open('robot_jython_log.txt', 'w')
self.robot_execution_status = {}
self.robot_root = '.'
self.simulator_list = []
self.current_tc_list = []
self.tc_verdict_map = {}
## Device Manage Keyword ##
def add_device(self, device_type, device_id): # Tizen , Android
return self.multiDeviceManager.addDevice(str(device_type), str(device_id))
def add_specific_device(self, device_type, device_id, spec): # Linux, Tizen, Android
return self.multiDeviceManager.addDevice(str(device_type), str(device_id), str(spec))
def remove_device(self, device_id): # Linux, Tizen, Android
return self.multiDeviceManager.removeDevice(str(device_id))
def remove_all_devices(self): # Linux, Tizen , Android
return self.multiDeviceManager.removeAllDevices()
def get_deviceID(self, device_id): # Linux, Tizen , Android
return self.multiDeviceManager.getDevice(str(device_id)).getDeviceID()
#### Log ####
def log_to_console(self, arg):
logger.info('\n%s' % arg, also_console=True)
def set_log_file_directory(self, path):
self.logManager.setLogPath(str(path))
def start_device_log(self, device_id): # Tizen , Android
return self.logManager.registerDeviceLogger(self.multiDeviceManager.getDevice(str(device_id)).getDeviceID(),
self.multiDeviceManager.getDevice(str(device_id)), True)
def get_device_log(self, device_id): # Tizen, Android
app_logs = self.logManager.getDeviceLogger(self.multiDeviceManager.getDevice(str(device_id)).getDeviceID()).getLogs()
return self.remove_unwanted_characters(app_logs)
def clear_device_log(self, device_id): # Tizen, Android
return self.logManager.getDeviceLogger(
self.multiDeviceManager.getDevice(str(device_id)).getDeviceID()).cleanLog()
def get_filtered_device_log(self, device_id, filters): # Tizen , Android
self.multiDeviceManager.getDevice(str(device_id)).getLogManager().getLogs(str(filters))
return self.multiDeviceManager.getDevice(str(device_id)).getLogManager().getLogs(str(filters))
def clear_filtered_device_log(self, device_id, filters): # Tizen, Android
return self.logManager.getProcessLogger(str(device_id), str(filters)).cleanLog()
def start_filtered_device_log(self, device_id, filters): # Tizen, Android
self.multiDeviceManager.getDevice(str(device_id)).getLogManager().startLogging(str(filters))
return self.logManager.registerProcessLogger(str(device_id), self.multiDeviceManager.getDevice(str(device_id)),
str(filters))
def stop_filtered_device_log(self, device_id, filters): # Tizen, Android
self.multiDeviceManager.getDevice(str(device_id)).getLogManager().stopLogging(str(filters))
return self.logManager.unregisterProcessLogger(str(device_id),
self.multiDeviceManager.getDevice(str(device_id)), str(filters))
def start_application_log(self, device_id, app_id): # Linux
self.notify_loggers ('start_application_log in')
ob = None
try:
temp_id = self.multiDeviceManager.getDevice(str(device_id))
self.notify_loggers ('calling registerProcessLogger')
ob = self.logManager.registerProcessLogger(str(device_id), temp_id, str(app_id), True)
self.notify_loggers ('registerProcessLogger called')
except:
self.notify_loggers ('exception to call registerProcessLogger')
self.notify_loggers ('start_application_log out')
return ob
def clean_application_log(self, device_id, app_id): # Linux
return self.logManager.getProcessLogger(str(device_id), str(app_id)).cleanLog()
def get_application_log(self, device_id, app_id): # Linux
app_logs = self.logManager.getProcessLogger(str(device_id), str(app_id)).getLogs()
return self.remove_unwanted_characters(app_logs)
def remove_unwanted_characters(self, app_logs):
logs = ''
for line in app_logs.splitlines():
try:
line.decode('ascii')
logs = logs + line + '\n'
except:
part = ''
for ch in line:
value = ord(ch)
if value > 127:
part = part + '<Non-Ascii: ' + str(value) + '>'
else:
part = part + ch
logs = logs + part + '\n'
return logs
def set_application_log_filter(self, device_id, app_id): # Linux
return None
def wait_until_logging_stop(self, device_id, process_name, running_sec, time_out, expected_log=''):
backup_log = self.get_application_log(str(device_id), str(process_name))
i = 0
different = None
while (int(time_out) >= i):
time.sleep(int(1))
i = i + 1
new_log = self.get_application_log(str(device_id), str(process_name))
if expected_log and expected_log in new_log:
print('expected_log found')
break
if new_log == backup_log:
different = None
else:
i = 0
different = True
backup_log = self.get_application_log(str(device_id), str(process_name))
if expected_log and expected_log in backup_log:
print('expected_log found')
break
time.sleep(int(running_sec))
def get_test_manager_log_file_info(self, robot_root, os_type, tc_type, module):
directory = os.path.join(robot_root, 'report', tc_type, os_type)
prefix = 'test_manager' + '_' + module + '_' + os_type
suffix = '.log'
return directory, prefix, suffix
def wait_until_log_file_complete(self, device_id, process_name, running_sec, time_out, expected_log, folder_path, prefix, suffix):
i = 0
running_sec = int(running_sec)
time_out = int(time_out)
prev_size = self.get_test_manager_file_size(folder_path, prefix, suffix)
while (time_out >= i):
log = self.get_application_log(str(device_id), str(process_name))
if expected_log in log:
print('expected log found')
break
current_size = self.get_test_manager_file_size(folder_path, prefix, suffix)
if prev_size == current_size:
i = i + running_sec
current_time = strftime("%Y-%m-%d %H:%M:%S", gmtime())
msg = 'Time: {}, previous size: {}, current size: {}'
msg = msg.format(current_time, prev_size, current_size)
print(msg)
else:
i = 0
prev_size = current_size
time.sleep(running_sec)
def wait_until_expected_log_found(self, device_id, process_name, expected_log, maximum_waiting_time):
log = ''
i = 0
while (int(maximum_waiting_time) >= i):
time.sleep(int(1))
i = i + 1
log += self.get_application_log(str(device_id), str(process_name))
if expected_log in log:
return True
is_app_alive = self.is_application_running(str(device_id), str(process_name))
if is_app_alive != True:
break
print ('Expected log not found in given time')
return False
def wait_until_application_exit(self, device_id, process_name):
while (True):
is_app_alive = self.is_application_running(device_id, process_name)
if is_app_alive != True:
break
return True
def check_keyword(self, keyword, logs):
if re.search(str(keyword), str(logs)):
return True
else:
return False
def check_keyword_multiple_callback(self, keyword, logs):
list1 = re.findall(str(keyword), str(logs))
lengthList1 = len(list1)
if lengthList1 > 1:
return True
else:
return False
def check_keyword_single_callback(self, keyword, logs):
list1 = re.findall(str(keyword), str(logs))
lengthList1 = len(list1)
if lengthList1 == 1:
return True
else:
return False
## Screen Control Keyword ##
def send_virtual_key(self, device_id, key): # Tizen
print "******************************"
print device_id
print key
print "*****************************"
return self.multiDeviceManager.getDevice(str(device_id)).getScreenController().sendString(str(key))
def press_physical_button(self, device_id, key): # Tizen
return self.multiDeviceManager.getDevice(str(device_id)).getScreenController().pressKey(
KeyEventType.valueOf(str(key)))
def tap_device_screen(self, device_id, x, y): # Tizen, Android
coordinate = Coordinate(int(x), int(y))
return self.multiDeviceManager.getDevice(str(device_id)).getScreenController().tap(coordinate)
def drag_device_screen(self, device_id, fromX, fromY, toX, toY): # Tizen, Android
fromcoordinate = Coordinate(int(fromX), int(fromY))
tocoordinate = Coordinate(int(toX), int(toY))
return self.multiDeviceManager.getDevice(str(device_id)).getScreenController().drag(fromcoordinate,
tocoordinate)
## Process/Application Control Keyword ##
def execute_application(self, device_id, process_name, commands): # Linux, Tizen , Android
if type(commands) is list:
command = ''
for temp in commands:
command = command + ' ' + temp
else:
command = str(commands)
return self.multiDeviceManager.getDevice(str(device_id)).getProcessManager().createProcess(str(process_name), command.strip())
def input_command_to_application(self, device_id, process_name, command): # Linux
return self.multiDeviceManager.getDevice(str(device_id)).getProcessManager().inputProcessCommand(
str(process_name), str(command))
def terminate_application(self, device_id, process_name): # Linux, Tizen, Android
return self.multiDeviceManager.getDevice(str(device_id)).getProcessManager().removeProcess(str(process_name))
def is_application_running(self, device_id, process_name): # Linux
return self.multiDeviceManager.getDevice(str(device_id)).getProcessManager().isProcessRunning(str(process_name))
## FileManager Keyword ##
def is_file_exist(self, device_id, file_path, file_name): # Linux
print file_path + os.sep + file_name
return os.path.exists(file_path + os.sep + file_name)
def is_file_exist(self, bin_path, prefix, suffix):
file_list = [f for f in listdir(bin_path) if isfile(join(bin_path, f))]
for file_name in file_list:
if file_name.startswith(prefix) and file_name.endswith(suffix):
return True
return False
def push_file(self, device_id, src_path, des_path): # Tizen, Android
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().push(str(src_path), str(des_path))
def pull_file(self, device_id, src_path, des_path): # Tizen, Android
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().pull(str(src_path), str(des_path))
def change_working_directory(self, device_id, path): # Linux
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().changeWorkingDir(str(path))
def get_working_directory_path(self, device_id): # Linux
path = self.multiDeviceManager.getDevice(str(device_id)).getFileManager().getWorkingDir()
pos = path.rfind("/");
return path[0:pos]
def copy_file(self, device_id, source_file, destination_file): # Linux , Tizen, Android
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().copyFile(str(source_file),
str(destination_file))
def delete_file(self, device_id, file_path): # Linux
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().deleteFile(str(file_path))
def move_file(self, device_id, source_file, destination_file): # Linux
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().moveFile(str(source_file),
str(destination_file))
def is_exist_word_in_file(self, device_id, word, file_path):
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().isExistWordInFile(word, file_path)
def make_empty_file(self, device_id, file_path): # Linux
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().createFile(file_path)
def make_directory(self, device_id, dir_path): # Linux
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().mkdir(str(dir_path))
def copy_directory(self, device_id, source_dir, destination_dir): # Linux
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().copydir(str(source_dir),
str(destination_dir))
def move_directory(self, device_id, source_dir, destination_dir): # Linux
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().movedir(str(source_dir),
str(destination_dir))
def delete_directory(self, device_id, dir_path): # Linux
return self.multiDeviceManager.getDevice(str(device_id)).getFileManager().rmdir(str(dir_path))
def bluetooth_on(self, device_id): # Android
return self.multiDeviceManager.getDevice(str(device_id)).getNetworkManager().blueToothOn()
def bluetooth_off(self, device_id): # Android
return self.multiDeviceManager.getDevice(str(device_id)).getNetworkManager().blueToothOff()
def wifi_on(self, device_id): # Android
return self.multiDeviceManager.getDevice(str(device_id)).getNetworkManager().wifiOn()
def wifi_off(self, device_id): # Android
return self.multiDeviceManager.getDevice(str(device_id)).getNetworkManager().wifiOff()
def get_ip_address(self, device_id): # Android
return self.multiDeviceManager.getDevice(str(device_id)).getNetworkManager().getIPAddress()
def get_bluetooth_mac_address(self, device_id): # Android
return self.multiDeviceManager.getDevice(str(device_id)).getNetworkManager().getBluetoothMacAddress()
def set_sdk_path(self, device_id, sdk_path): # Android
return self.multiDeviceManager.getDevice(str(device_id)).getNetworkManager().setSDKPath(sdk_path)
## EOF ##
## Android Device Resolution Ratio ##
def get_android_real_id(self, device_id):
return self.multiDeviceManager.getDevice(str(device_id)).getDeviceID()
def get_android_resolution(self, device_id):
deviceID = self.multiDeviceManager.getDevice(str(device_id)).getDeviceID()
status, resolution_output = commands.getstatusoutput("adb -s %s shell wm size" % deviceID)
index = resolution_output.find('Physical size:')
print (resolution_output)
resolution_output = resolution_output[index:]
print (resolution_output)
resolution_array = resolution_output.split()
resolution = resolution_array[2]
return resolution
def get_android_screen_width(self, device_id):
resolution = self.get_android_resolution(device_id)
resolution_array = resolution.split('x')
screen_width = int(resolution_array[0])
return screen_width
def get_android_screen_height(self, device_id):
resolution = self.get_android_resolution(device_id)
resolution_array = resolution.split('x')
screen_height = int(resolution_array[1])
return screen_height
def get_android_xcoordinate(self, device_id, screen_width, xcoordinate):
xcoordinate = int(xcoordinate)
screen_width = int(screen_width)
changed_screen_Width = self.get_android_screen_width(device_id)
xchangedcoordinate = xcoordinate * changed_screen_Width / screen_width
return xchangedcoordinate
def get_android_ycoordinate(self, device_id, screen_height, ycoordinate):
ycoordinate = int(ycoordinate)
screen_height = int(screen_height)
changed_screen_height = self.get_android_screen_height(device_id)
ychangedcoordinate = ycoordinate * changed_screen_height / screen_height
return ychangedcoordinate
## Tizen Device Resolution Ratio ##
def get_tizen_resolution(self, device_id):
deviceID = self.multiDeviceManager.getDevice(str(device_id)).getDeviceID()
status, resolution_output = commands.getstatusoutput("sdb -s %s shell su -c xrandr" % deviceID)
return resolution_output
def get_tizen_screen_width(self, device_id):
resolution = self.get_tizen_resolution(device_id)
match = re.search("connected (\d+)x(\d+).* (\d+mm) x (\d+mm)", resolution)
if match:
screen_width = match.group(1)
return int(screen_width)
def get_tizen_screen_height(self, device_id):
resolution = self.get_tizen_resolution(device_id)
match = re.search("connected (\d+)x(\d+).* (\d+mm) x (\d+mm)", resolution)
if match:
screen_height = match.group(2)
return int(screen_height)
def get_tizen_xcoordinate(self, device_id, screen_width, xcoordinate):
xcoordinate = int(xcoordinate)
screen_width = int(screen_width)
changed_screen_Width = self.get_tizen_screen_width(device_id)
xchangedcoordinate = xcoordinate * changed_screen_Width / screen_width
return xchangedcoordinate
def get_tizen_ycoordinate(self, device_id, screen_height, ycoordinate):
ycoordinate = int(ycoordinate)
screen_height = int(screen_height)
changed_screen_height = self.get_tizen_screen_height(device_id)
ychangedcoordinate = ycoordinate * changed_screen_height / screen_height
return ychangedcoordinate
## Tizen Console ##
def execute_console_application(self, device_id, process_name, command): # Tizen
return self.multiDeviceManager.getDevice(str(device_id)).getConsoleManager().createProcess(str(process_name),
str(command))
def terminate_console_application(self, device_id, process_name): # Tizen
return self.multiDeviceManager.getDevice(str(device_id)).getConsoleManager().removeProcess(str(process_name))
def input_command_to_console_application(self, device_id, process_name, command): # Tizen
return self.multiDeviceManager.getDevice(str(device_id)).getConsoleManager().inputProcessCommand(
str(process_name), str(command))
def start_console_application_log(self, device_id, app_id): # Tizen
return self.logManager.registerConsoleProcessLogger(str(device_id),
self.multiDeviceManager.getDevice(str(device_id)),
str(app_id), True)
# Find and Replace #
def find_and_replace(self, fileName, old_text, new_text):
with open(fileName, 'rb') as f:
content = f.read()
with open(fileName, 'wb') as f:
temp = content.replace(old_text, new_text)
f.write(temp)
return True
# -----------------------------------------------------------------
def updatefiletest(self, filename, dico):
RE = '((' + '|'.join(dico.keys()) + ')\s*=)[^\r\n]*?(\r?\n|\r)'
pat = re.compile(RE)
def jojo(mat, dic=dico):
return dic[mat.group(2)].join(mat.group(1, 3))
with open(filename, 'rb') as f:
content = f.read()
with open(filename, 'wb') as f:
temp = pat.sub(jojo, content.decode())
# f.write(bytes(temp, 'UTF-8'))
f.write(temp)
return True
def update_configuration_set_network(self, conf_file, network):
vars = ['CA_IP', 'CA_LE', 'CA_EDR', 'CA_CLOUD']
new_values = ['1', '0', '0', '0']
if network == 'IP':
new_values = [' 1', ' 0', ' 0', ' 0']
if network == 'EDR':
new_values = [' 0', ' 1', ' 0', ' 0']
if network == 'LE':
new_values = [' 0', ' 0', ' 1', ' 0']
# GATT = BLE = 1, RFCOMM = EDR = 2
what_to_change = dict(zip(vars, new_values))
self.updatefiletest(conf_file, what_to_change)
return 'update configuration done'
def update_configuration_set_address(self, conf_file, address):
vars = ['IP']
new_values = [address]
what_to_change = dict(zip(vars, new_values))
self.updatefiletest(conf_file, what_to_change)
return 'update configuration done'
def update_configuration_set_port(self, conf_file, port):
vars = ['PORT ']
new_values = [str(port)]
what_to_change = dict(zip(vars, new_values))
self.updatefiletest(conf_file, what_to_change)
return 'update configuration done'
def update_configuration_set_secure_port(self, conf_file, secure_port):
vars = ['SECURE_PORT']
new_values = [str(secure_port)]
what_to_change = dict(zip(vars, new_values))
self.updatefiletest(conf_file, what_to_change)
return 'update configuration done'
def update_test_manager_configuration_set_tc_bin(self, conf_file):
vars = ['TC_BIN_DIR']
new_values = [' "../IotivitySECTest/bin"']
what_to_change = dict(zip(vars, new_values))
self.updatefiletest(conf_file, what_to_change)
return 'update configuration done'
# tizen configuration file change #
def update_test_manager_configuration_set_bin_tizen(self, conf_file, bin_path):
vars = ['TC_BIN_DIR']
new_values = [bin_path]
what_to_change = dict(zip(vars, new_values))
self.updatefiletest(conf_file, what_to_change)
return 'update configuration done'
def update_config(self, fileName, ip, other_ip, port, secure_port, network_flag="1000"):
out_file = open(fileName, 'w')
text = '[SIMULATOR]\n'
text = text + 'IP = ' + str(ip) + '\n'
text = text + 'OTHER_IP_LIST = ' + str(other_ip) + '\n'
text = text + 'PORT = ' + str(port) + '\n'
text = text + 'SECURE_PORT = ' + str(secure_port) + '\n'
text = text + 'CA_IP = ' + network_flag[0:1] + '\nCA_LE = ' + network_flag[1:2] + '\nCA_EDR = ' + network_flag[
2:3] + '\nCA_TCP = ' + network_flag[
3:4]
out_file.write(text)
out_file.close()
def update_android_config(self, fileName, ip, port, secure_port):
out_file = open(fileName, 'w')
text = 'IP:' + str(ip) + '\n'
text = text + 'UNSECURED PORT:' + str(port) + '\n'
text = text + 'SECURED PORT:' + str(secure_port) + '\n'
out_file.write(text)
out_file.close()
def get_executable_name_ca_sim(self, server_type, network):
if server_type == 'SERVER':
if network == 'IP':
return 'iotivity_sim_server'
else:
return 'iotivity_sim_server_tizen'
else:
if network == 'IP':
return 'iotivity_sim_client'
else:
return 'iotivity_sim_client_tizen'
def get_port(self, data, secured):
if secured == '1':
secured = 'true'
else:
secured = 'false'
findTxt = 'Secured: ' + secured
print findTxt
start = data.find('Secured: ' + secured)
if start == -1:
return '-1'
else:
securedStr = data[start - len('Secured: 0') - 11:start]
start = securedStr.find('Port: ')
if start == -1: return '-1'
endChar = '\n'
end = securedStr.find(endChar, start)
start = start + len('Port: ')
output = securedStr[start:end]
return output
# Upload Binary in Arduino Board
def upload_arduino_binary(self, binary_uploader, config_file, board_name, protocol, arduino_port, speed,
binary): # Linux
binary_cmd = binary_uploader + ' -C' + config_file + ' -v -v -v -p' + board_name + ' -c' + protocol + ' -P' + arduino_port + ' -b' + speed + ' -D -Uflash:w:' + binary + ':i'
return os.system(binary_cmd)
def run_application(self, device_id, path, command): # Linux
cd_cmd = 'cd ' + 'path'
os.system(cd_cmd)
return os.system(command)
def get_text(self, keyword, endchar, logs):
# start = logs.find(keyword)+len(keyword)
start = logs.find(keyword)
if (start == -1): return 0
start = start + len(keyword)
print start
end = logs.find(endchar, start)
print end
if (end == -1): end = len(logs)
print end
ip = logs[start:end]
ip = ip.strip()
return ip
# Utility function
def get_current_date(self, n):
a = (datetime.datetime.now())
b = a + datetime.timedelta(seconds=int(n))
print (a)
print (b)
c = b.strftime('%Y-%m-%d %H:%M:%S')
print (c)
return str(c)
def fetch_from_right(self, string, marker):
return string.split(marker)[-1].lstrip()
def check_keyword_count(self, keyword, logs, count):
list1 = re.findall(str(keyword), str(logs))
lengthList1 = len(list1)
if lengthList1 == int(count):
return True
else:
return False
def get_random_string(self, size):
return ''.join([random.choice(string.ascii_letters) for n in range(int(size))])
def write_file(self, path, text):
fh = open(path, "w")
fh.write(text)
fh.close()
return True
def read_file(self, path):
fh = open(path, "r")
text = ""
for line in fh:
text += line.rstrip()
fh.close()
return text
def update_list_item(self, value, index, *old_values):
new_values = list(old_values);
new_values[int(index)] = value
return new_values;
# ca linux functions
ca_linux_secure_server_addresses = []
ca_linux_non_secure_server_addresses = []
ca_linux_secure_client_addresses = []
ca_linux_non_secure_client_addresses = []
received_ca_linux_ip = ''
received_ca_linux_port = ''
def is_ip_port_exist_in_ca_linux(self, isClientValue, isSecureValue, ip, port):
isClient = int(isClientValue)
isSecure = int(isSecureValue)
if isClient == 1:
if isSecure == 1:
return self.ca_linux_secure_client_addresses.count(
ip) > 0 and self.ca_linux_secure_client_addresses.count(port) > 0
return self.ca_linux_non_secure_client_addresses.count(
ip) > 0 and self.ca_linux_non_secure_client_addresses.count(port) > 0
else:
if isSecure == 1:
return self.ca_linux_secure_server_addresses.count(
ip) > 0 and self.ca_linux_secure_server_addresses.count(port) > 0
return self.ca_linux_non_secure_server_addresses.count(
ip) > 0 and self.ca_linux_non_secure_server_addresses.count(port) > 0
def get_ca_linux_ip_port(self, isClientValue, isSecureValue, isPortValue, indexValue):
index = int(indexValue)
isClient = int(isClientValue)
isPort = int(isPortValue)
isSecure = int(isSecureValue)
index = index * 2 + isPort
if isClient == 1:
if isSecure == 1:
item = self.ca_linux_secure_client_addresses[index]
else:
item = self.ca_linux_non_secure_client_addresses[index]
else:
if isSecure == 1:
item = self.ca_linux_secure_server_addresses[index]
else:
item = self.ca_linux_non_secure_server_addresses[index]
return item
def get_ca_linux_network_size(self, isClientValue, isSecureValue):
isClient = int(isClientValue)
isSecure = int(isSecureValue)
if isClient == 1:
if isSecure == 1:
length = len(self.ca_linux_secure_client_addresses)
else:
length = len(self.ca_linux_non_secure_client_addresses)
else:
if isSecure == 1:
length = len(self.ca_linux_secure_server_addresses)
else:
length = len(self.ca_linux_non_secure_server_addresses)
return length // 2
def set_ca_linux_network_info(self, isClientValue, msg):
isClient = int(isClientValue)
if isClient == 1:
del self.ca_linux_secure_client_addresses[:]
del self.ca_linux_non_secure_client_addresses[:]
else:
del self.ca_linux_secure_server_addresses[:]
del self.ca_linux_non_secure_server_addresses[:]
lines = re.split('\n', msg)
for line in lines:
# print ('line: ' + line)
i = line.find('Address: ')
# print ('Address: ' + str(i))
if i >= 0:
k = line.find(':') + 2
ip = line[k:]
continue
i = line.find('Port: ')
# print ('Port: ' + str(i))
if i >= 0:
k = line.find(':') + 2
port = line[k:]
continue
i = line.find('Secured: true')
# print ('Secured: true: ' + str(i))
if i >= 0:
if isClient == 1:
self.ca_linux_secure_client_addresses.append(ip)
self.ca_linux_secure_client_addresses.append(port)
else:
self.ca_linux_secure_server_addresses.append(ip)
self.ca_linux_secure_server_addresses.append(port)
continue
i = line.find('Secured: false')
# print ('Secured: false: ' + str(i))
if i >= 0:
if isClient == 1:
self.ca_linux_non_secure_client_addresses.append(ip)
self.ca_linux_non_secure_client_addresses.append(port)
# print ('adding non secure client address')
else:
self.ca_linux_non_secure_server_addresses.append(ip)
self.ca_linux_non_secure_server_addresses.append(port)
# print ('adding non secure server address')
def get_received_ca_linux_ip(self):
return self.received_ca_linux_ip
def get_received_ca_linux_port(self):
return self.received_ca_linux_port
def set_ca_linux_received_ip_port(self, msg):
lines = re.split('\n', msg)
for line in lines:
i = line.find('Remote Address: ')
j = line.find('Port: ')
k = line.find('secured:')
if i >= 0 and j > i and k > j:
i = i + len('Remote Address: ')
self.received_ca_linux_ip = line[i:j - 1]
j = j + len('Port: ')
self.received_ca_linux_port = line[j:k - 1]
def get_ca_android_sampleapp_network_info(self, log):
ip = ''
non_secure_port = 0
secure_port = 0
mac = ''
type = 0
port = 0
network_ids = []
line_filter = 'I/JNI_INTERFACE_SAMPLE'
start_substring = '################## Network Information #######################'
end_substring = '##############################################################'
start_index = log.find(start_substring)
end_index = log.find(end_substring)
text = log[start_index:end_index]
print (text)
lines = text.splitlines()
for line in lines:
if line_filter in line:
if 'Type: ' in line:
index = line.find('Type: ') + len('Type: ')
type = int(line[index:])
elif 'Port: ' in line:
index = line.find('Port: ') + len('Port: ')
port = int(line[index:])
elif 'Secured: ' in line:
index = line.find('Secured: ') + len('Secured: ')
secured = int(line[index:])
elif 'Address: ' in line:
index = line.find('Address: ') + len('Address: ')
address = line[index:]
if type == 1:
ip = address
if secured == 0:
non_secure_port = port
else:
secure_port = port
else:
mac = address
network_ids.append(ip)
network_ids.append(non_secure_port)
network_ids.append(secure_port)
network_ids.append(mac)
return network_ids
def read_spec_xml(self, path, searching_platform, searching_tctype, searching_module, searching_testsuite):
cnt = 0
data = dict()
searching_platform = searching_platform.upper()
searching_tctype = searching_tctype.upper()
searching_module = searching_module.upper()
try:
doc = ElementTree(file=path)
testspec = doc.find('testspec')
for platform in testspec.findall('platform'):
platform_name = platform.get('name')
print ('platform_name: ' + platform_name)
if platform_name != searching_platform:
continue
for tctype in platform.findall('type'):
type_name = tctype.get('name')
print ('type_name: ' + type_name)
if type_name != searching_tctype:
continue
data[type_name] = dict()
for module in tctype.findall('module'):
module_name = module.get('name')
print ('module_name: ' + module_name)
if module_name != searching_module:
continue
for testsuite in module.findall('testsuite'):
suite_name = testsuite.get('name')
print ('suite_name: ' + suite_name)
if suite_name != searching_testsuite:
continue
list_tc = []
for testcase in testsuite.findall('testcase'):
print ('testcase: ' + testcase.get('name'))
cnt += 1
# print( str(cnt) + ': ' + testcase.get(GT_ATT.NAME))
list_tc.append(testcase.get('name'))
# data[type_name][suite_name] = list_tc
return list_tc
except ParseError:
print("There is a Parse Error on " + path)
return []
# PM Module Keywords
def create_pm_cbor_files(self, src_dir, dest_dir, json2cbor_tool_dir):
""" Create CBOR Files from JSON and keep it in required directories
:param dest_dir: Directory where JSON files are kept
:param dest_dir: Directory where CBOR files will be kept as backup
:param json2cbor_tool_dir: Directory where json2Cbor tool exists
:return: void
"""
json_files_src = []
cbor_files_src = []
json_file_list = glob.iglob(os.path.join(src_dir, '*.json'))
for json_file in json_file_list:
json_files_src.append(json_file)
cbor_file_temp = json_file[:-5] + '.dat'
cbor_files_src.append(cbor_file_temp)
os.chdir(json2cbor_tool_dir)
i = 0
for i in range(len(json_files_src)):
json_file = json_files_src[i]
cbor_file = cbor_files_src[i]
cmd = "./json2cbor " + json_file + " " + cbor_file
os.system(cmd)
i += 1
for cbor_file in cbor_files_src:
if os.path.isfile(cbor_file):
shutil.copy2(cbor_file, dest_dir)
shutil.copy2(cbor_file, dest_dir + os.sep + 'linux')
# CA Module Keywords
def get_junit_tc_names(self, tc_src_path, tc_type, suite_name):
import re
JUNIT_TC_PATTERN = re.compile(r'public\s+void\s+(?P<testcase>(Test|test)\w+)\s*\(\s*\)\s*\{', re.DOTALL)
filename = tc_src_path + os.sep + tc_type + os.sep + suite_name + ".java"
fp = open(filename, 'r')
source = fp.read()
suites = []
for tc in JUNIT_TC_PATTERN.findall(source):
suites.append(tc[0])
return suites
def get_split_strings(self, text, delimiter):
return text.split(delimiter)
def get_collection_length(self, *items):
return len(items)
def get_list(self, *items):
col = []
for item in items:
if type(item) is list:
for t_item in item:
col.append(t_item)
else:
col.append(item)
return col
def get_concatenated_string(self, dlm, *items):
s = str(items[0])
for i in range(1, len(items)):
s += dlm
s += str(items[i])
return s
def trim_string(self, s):
return s.strip()
def remove_extra_space(self, s1):
s2 = ''
if type(s1) == list:
s3 = s1
elif type(s1) == str or type(s1) == unicode:
s3 = s1.split()
print ('type: ' + str(type(s1)))
for s in s3:
if s2:
s2 += ' '
s2 += s
return s2
def get_file_name_from_run_command(self, s):
s = s.replace('java -jar ', '')
s = s.replace('stdbuf -oL ', '')
s = s.replace('./', '')
end_index = len(s)
index = s.find(' ')
if index >= 0:
end_index = index
s = s[:end_index]
sa = s.split(os.sep)
return sa[len(sa)-1]
def get_ip_address(self, network_interface):
f = os.popen('ifconfig %s | grep "inet\ addr" | cut -d: -f2 | cut -d" " -f1' % network_interface)
return f.read().strip()
def get_ipv6_address(self, network_interface):
command = "ifconfig %s | awk '/inet6/{print $3}'" % network_interface
f = os.popen(command)
addr = f.read().strip()
pos = addr.find('/')
addr = addr[:pos]
addr += '%' + network_interface
return addr
def update_common_config_file(self, file_name, *items):
index = 0
text = ''
keys = []
while index < len(items):
text += items[index] + '=' + items[index+1] + '\n'
keys.append(items[index])
index += 2
fp = open(file_name, 'r')
for line in fp:
key_value = line.split('=')
if len(key_value) != 2:
continue
if key_value[0] in keys:
continue
text += line
fp.close()
fp = open(file_name, 'w')
fp.write(text)
fp.close()
def is_build_needed(self, build_path, command):
os = 'linux'
if 'gbsbuild' in command:
os = 'tizen'
command = re.sub('-j\s+\d', '', command)
params = command.split(' ')
for param in params:
if param.startswith('TARGET_OS='):
os=param[len('TARGET_OS='):]
break
command = re.sub('TARGET_OS=[A-Za-z]+', '', command)
current_command = {}
for param in params:
if not param.startswith('TARGET_OS='):
split_param = param.split('=')
param_name = split_param[0]
param_value = ''
if len(split_param) > 1:
param_value = split_param[1]
current_command[param_name] = param_value
if not build_path in self.build_commands.keys():
self.build_commands[build_path] = {}
if not os in self.build_commands[build_path].keys():
self.build_commands[build_path][os] = {}
build_needed = False
if len(current_command.keys()) != len(self.build_commands[build_path][os]):
build_needed = True
for param_name in current_command.keys():
param_value = current_command[param_name]
if not param_name in self.build_commands[build_path][os].keys():
build_needed = True
elif self.build_commands[build_path][os][param_name] != param_value:
build_needed = True
return build_needed
def add_build_command(self, build_path, command):
os = 'linux'
if 'gbsbuild' in command:
os = 'tizen'
command = re.sub('-j\s+\d', '', command)
params = command.split(' ')
for param in params:
if param.startswith('TARGET_OS='):
os=param[len('TARGET_OS='):]
break
command = re.sub('TARGET_OS=[A-Za-z]+', '', command)
current_command = {}
for param in params:
if not param.startswith('TARGET_OS='):
split_param = param.split('=')
param_name = split_param[0]
param_value = ''
if len(split_param) > 1:
param_value = split_param[1]
current_command[param_name] = param_value
self.build_commands[build_path][os] = current_command
def get_os_from_build_command(self, command):
os = 'linux'
if 'gbsbuild' in command:
os = 'tizen'
elif 'TARGET_OS=android' in command:
os = 'android'
elif 'TARGET_OS=tizen' in command:
os = 'tizen'
return os
def is_substring_count_match(self, log, cnt, op, sub):
cnt_found = log.count(sub)
cnt_expected = int(cnt)
print ('found: ' + str(cnt_found) + ' , expected: ' + str(cnt_expected))
if op == '=':
return cnt_found == cnt_expected
if op == '!=':
return cnt_found != cnt_expected
if op == '>=':
return cnt_found >= cnt_expected
if op == '<=':
return cnt_found <= cnt_expected
if op == '>':
return cnt_found > cnt_expected
if op == '<':
return cnt_found < cnt_expected
def get_java_proxy_flag(self, http_proxy_address, https_proxy_address):
flag = ''
http_proxy_address = http_proxy_address.split(':')
if len(http_proxy_address) == 2:
flag += '-Dhttp.proxyHost=' + http_proxy_address[0] + ' -Dhttp.proxyPort=' + http_proxy_address[1] + ' '
https_proxy_address = https_proxy_address.split(':')
if len(https_proxy_address) == 2:
flag += '-Dhttps.proxyHost=' + https_proxy_address[0] + ' -Dhttps.proxyPort=' + https_proxy_address[1]
return flag
def get_tizen_build_success_text(self, build_command):
s1 = 'done building targets.'
s2 = 'Build is successful'
if 'scons ' in build_command:
return s1
return s2
def notify_loggers(self, txt):
print ('console logger printing the log')
print (txt)
self.log_fp.write(str(txt) + '\n')
self.log_fp.flush()
def match_any_string(self, txt, *list_of_substring):
for x in list_of_substring:
if x in txt:
return True
return False
def write_robot_execution_status(self, execution_part, os_type, module, tc_type, status):
delimiter = ':'
print (self.robot_root)
directory = os.path.join(self.robot_root, 'report')
if not os.path.exists(directory):
os.makedirs(directory)
execution_path = os.path.join(directory, 'robot_execution_status.txt')
fp = open(execution_path, 'a')
print (execution_path)
print (execution_part, os_type, module, tc_type, status)
if status:
verdict = 'pass'
else:
verdict = 'fail'
fp.write(verdict + delimiter + module + delimiter + os_type + delimiter + execution_part + delimiter + tc_type + '\n')
fp.flush()
fp.close()
def write_build_log(self, os_type, module, tc_type, build_command, status, log, build_part):
directory = os.path.join(self.robot_root, 'report', tc_type, os_type)
if not os.path.exists(directory):
os.makedirs(directory)
suffix = ''
params = build_command.split()
for param in params:
pair = param.split('=')
if len(pair) <= 1:
continue
key = pair[0].lower()
value = pair[1].lower()
if key == 'secured':
if value == '1':
suffix += '_secured'
else:
suffix += '_non-secured'
elif key == 'release':
if value == '1':
suffix += '_release'
else:
suffix += '_debug'
elif key == 'target_transport':
value = value.replace(',', '-')
suffix += '_' + value
elif key == 'framework':
suffix += '_' + value
filepath = os.path.join(directory, 'build' + '_' + build_part + '_' + module + suffix + '.txt')
print (self.robot_root, filepath, os_type, module, tc_type)
fp = open(filepath, 'w')
if status:
verdict = 'pass'
else:
verdict = 'fail'
content = 'Build Command: ' + build_command + '\n\n'
content += 'Build Status: ' + verdict + '\n\n'
content += 'Build Log:\n' + log + '\n'
fp.write(content)
fp.flush()
fp.close()
def write_iotivity_build_log(self, os_type, module, tc_type, build_command, status, log):
self.write_build_log(os_type, module, tc_type, build_command, status, log, 'iotivity')
def write_test_build_log(self, os_type, module, tc_type, build_command, status, log):
self.write_build_log(os_type, module, tc_type, build_command, status, log, 'test')
def write_precondition_log(self, os_type, module, tc_type, no):
directory = os.path.join(self.robot_root, 'report', tc_type, os_type)
if not os.path.exists(directory):
os.makedirs(directory)
run_command = self.simulator_list[no].run_command
if 'tizen' == os_type.lower():
if 'sdb ' in run_command and ' shell ' in run_command:
s = '/opt/usr/media/bin '
index = run_command.find(s)
if index >= 0:
run_command = run_command[index+len(s):].strip()
binary_filename = self.get_file_name_from_run_command(run_command)
filepath = os.path.join(directory, 'pre_condition' + '_' + module + '_' + binary_filename + '.txt')
print (self.robot_root, filepath, os_type, module, tc_type)
fp = open(filepath, 'w')
if self.simulator_list[no].status:
verdict = 'pass'
else:
verdict = 'fail'
content = 'Execution Directory: ' + self.simulator_list[no].cmd_dir + '\n\n'
content += 'Execution Command: ' + self.simulator_list[no].run_command + '\n\n'
content += 'Expected Log: ' + self.simulator_list[no].expected_log + '\n\n'
content += 'Execution Status: ' + verdict + '\n\n'
content += 'Execution Procedure:\n' + self.get_simulator_procedure(binary_filename) + '\n\n'
content += 'Execution Log:\n' + self.get_simulator_log(no) + '\n'
fp.write(content)
fp.flush()
fp.close()
def get_simulator_procedure(self, binary_name):
procedure_text = 'No procedure found'
try:
procedure_filepath = os.path.join(self.robot_root, '..', '..', '..', 'res', 'procedure', binary_name + '.txt')
if os.path.exists(procedure_filepath):
procedure_text = open(procedure_filepath, 'r').read()
else:
src_file_path = ''
file_list = []
for root, dirs, files in os.walk(os.path.join(self.robot_root, '..', '..', '..', 'src')):
for file_name in files:
short_name = os.path.basename(os.path.join(root, file_name))
if short_name.endswith('.cpp') or short_name.endswith('.c'):
if binary_name in short_name:
src_file_path = os.path.join(root, file_name)
elif binary_name.lower() in short_name.lower():
src_file_path = os.path.join(root, file_name)
elif binary_name.lower().replace('_', '') in short_name.lower():
src_file_path = os.path.join(root, file_name)
if src_file_path:
print(src_file_path)
contents = open(src_file_path, 'r').read()
p = re.compile(r'/\*\*[\s]+.+\*/\s*int\s*main\s*\(', re.DOTALL)
mat = re.search(p, contents)
if mat:
contents = contents[mat.start() : mat.end()]
contents = contents.replace('/**', '').replace('*/', '')
contents = re.sub(r'[ ]*\*', '', contents)
procedure_text = re.sub(r'int\s*main\s*\(', '', contents).strip()
except:
print("Unexpected error:", sys.exc_info()[0])
return procedure_text
def add_simulator(self, os_type, device_name, app_name, app_command, dlm):
app_commands = app_command.split(dlm)
cmd_dir = app_commands[0]
run_command = app_commands[1]
expected_log = app_commands[2]
print(cmd_dir)
print(run_command)
print(expected_log)
self.simulator_list.append(Simulator(os_type, device_name, app_name, cmd_dir, run_command, expected_log, app_command))
def clear_simulator_list(self):
del self.simulator_list[:]
def total_simulator_count(self):
return len(self.simulator_list)
def get_simulator_info(self, no):
simulator = self.simulator_list[no]
return simulator.os_type, simulator.device_name, simulator.app_name, simulator.run_command, simulator.expected_log, simulator.status
def get_simulator_device_name(self, no):
return self.simulator_list[no].device_name
def get_simulator_app_name(self, no):
return self.simulator_list[no].app_name
def get_simulator_app_command(self, no):
return self.simulator_list[no].app_command
def get_simulator_run_command(self, no):
return self.simulator_list[no].run_command
def set_simulator_status(self, no, status):
self.simulator_list[no].status = status
def set_simulator_log(self, no, log):
self.simulator_list[no].log_saved = True
self.simulator_list[no].log = log
def get_simulator_log(self, no):
if not self.simulator_list[no].log_saved:
try:
self.simulator_list[no].log = self.get_application_log(self.simulator_list[no].device_name, self.simulator_list[no].app_name)
self.simulator_list[no].log_saved = True
except:
print('causing problem when calling get_application_log')
return self.simulator_list[no].log
def set_robot_execution_status(self, os_type, module, tc_type, status):
print (os_type, module, tc_type, status)
if os_type not in self.robot_execution_status:
self.robot_execution_status[os_type] = {}
if module not in self.robot_execution_status[os_type]:
self.robot_execution_status[os_type][module] = {}
self.robot_execution_status[os_type][module][tc_type] = int(status)
def check_robot_execution_status(self, path, os_type, module, tc_type):
self.set_robot_root(path)
if os_type not in self.robot_execution_status:
return
if module not in self.robot_execution_status[os_type]:
return
if tc_type not in self.robot_execution_status[os_type][module]:
return
if self.robot_execution_status[os_type][module][tc_type] == Execution_Status.BUILD_IOTIVITY_FAIL:
self.write_robot_execution_status(Execution_Status.BUILD_IOTIVITY, os_type, module, tc_type, False)
elif self.robot_execution_status[os_type][module][tc_type] == Execution_Status.BUILD_TEST_FAIL:
self.write_robot_execution_status(Execution_Status.BUILD_TEST, os_type, module, tc_type, False)
elif self.robot_execution_status[os_type][module][tc_type] == Execution_Status.TEST_PRE_CONDITION_FAIL:
self.write_robot_execution_status(Execution_Status.TEST_PRE_CONDITION, os_type, module, tc_type, False)
def store_binary(self, iotivity_root, release_dir, os_type, tc_type, module):
backup_path = os.path.join(self.robot_root, 'report', tc_type, os_type, 'bin_' + module)
bin_path = os.path.abspath(os.path.join(self.robot_root, '..', '..', '..', 'bin'))
if os.path.exists(backup_path):
shutil.rmtree(backup_path)
os.makedirs(backup_path)
print (os_type.lower())
os_module_bin_path = bin_path + os.sep + os_type.lower() + os.sep + module
if 'tizen' in os_type.lower():
os.system('find ' + os_module_bin_path + ' -name \*.rpm -exec cp {} ' + backup_path + ' \;')
elif 'android' in os_type.lower():
os.system('find ' + os_module_bin_path + ' -name \*-debug.apk -exec cp {} ' + backup_path + ' \;')
os.system('cp -r ' + bin_path + os.sep + 'linux' + os.sep + '* ' + backup_path)
out_dir = os.path.join(iotivity_root, 'out', 'linux')
os.system('find ' + out_dir + os.sep + '*' + os.sep + release_dir + ' -name \*.so -exec cp {} ' + backup_path + ' \;')
def set_robot_root(self, path):
self.robot_root = path
print (self.robot_root)
def make_tc_list(self, directory, file_name):
file_path=os.path.join(directory, file_name)
f = open(file_path,'r')
contents = f.read()
print(contents)
f.close()
self.current_tc_list[:] = []
prefix_list = [' -f ', ' -u ', ' -c ', ' -g ']
param_list = ['binary_name', 'suite_name', 'tc_name', 'package_name']
contents = contents.strip()
for row in contents.split('\n'):
command = ''
for info in row.split(','):
for i in range(0, len(param_list)):
if info.split(':')[0].strip() == param_list[i] and len(info.split(':')[1].strip()) != 0:
command += ' ' + str(prefix_list[i] + info.split(':')[1].strip())
break
command = command.strip()
command = re.sub(' +', ' ', command)
self.current_tc_list.append(command)
self.tc_verdict_map[command] = {}
self.tc_verdict_map[command][Execution_Status.PASS_STATUS_INDEX] = 0
self.tc_verdict_map[command][Execution_Status.FAIL_STATUS_INDEX] = 0
self.tc_verdict_map[command][Execution_Status.TIMEOUT_STATUS_INDEX] = 0
self.tc_verdict_map[command][Execution_Status.UNEXPECTED_STATUS_INDEX] = 0
self.tc_verdict_map[command][Execution_Status.TOTAL_STATUS_INDEX] = 0
def purge_tc_list(self):
temp_list = []
for command in self.current_tc_list:
if self.tc_verdict_map[command][Execution_Status.PASS_STATUS_INDEX] >= 1:
del self.tc_verdict_map[command]
continue
if self.tc_verdict_map[command][Execution_Status.TIMEOUT_STATUS_INDEX] >= 2:
del self.tc_verdict_map[command]
continue
if self.tc_verdict_map[command][Execution_Status.TOTAL_STATUS_INDEX] >= 3:
del self.tc_verdict_map[command]
continue
temp_list.append(command)
self.current_tc_list[:] = []
self.current_tc_list = temp_list
def is_tc_list_exhausted(self):
return len(self.current_tc_list) == 0
def get_tc_list(self):
return self.current_tc_list
def set_tc_status(self, command, log):
print(command)
print(log)
self.tc_verdict_map[command][Execution_Status.TOTAL_STATUS_INDEX] += 1
start_text = '[ Result of '
end_text = ' ]'
start_pos = log.find(start_text)
print(start_pos)
if start_pos == -1:
return
log = log[start_pos+len(start_text):]
end_pos = log.find(end_text)
print(end_pos)
line = ''
if end_pos >= 0:
line = log[:end_pos]
print(line)
if line:
pos = line.find(':')
print(pos)
index = -1
if pos >= 0:
verdict = line[pos+1:]
verdict = verdict.strip().lower()
print('verdict: ' + verdict)
if 'pass' == verdict:
index = Execution_Status.PASS_STATUS_INDEX
elif 'fail' == verdict:
index = Execution_Status.FAIL_STATUS_INDEX
elif 'timeout' == verdict:
index = Execution_Status.TIMEOUT_STATUS_INDEX
elif 'unexpected' == verdict:
index = Execution_Status.UNEXPECTED_STATUS_INDEX
if index >= 0:
self.tc_verdict_map[command][index] += 1
def remove_param_from_command(self, command, filter_param):
params = command.split()
k = -1
for i in range(0, len(params)):
if params[i] == filter_param:
k = i
break
if k == -1:
return command
command = ''
for i in range(0, len(params)):
if i < k or i > k+1:
command += params[i] + ' '
return command.strip()
def get_test_manager_file_size(self, folder_path, prefix, suffix):
sz = 0
onlyfiles = [f for f in os.listdir(folder_path) if os.path.isfile(os.path.join(folder_path, f))]
for file_name in onlyfiles:
if file_name.startswith(prefix) and file_name.endswith(suffix):
print(file_name)
sz += os.path.getsize(os.path.join(folder_path, file_name))
return sz
def run_test_manager(self, robot_root, tc_type, os_type, module, cmd_dir, command):
command = str(command)
print(cmd_dir)
print(command)
current_dir = os.getcwd()
os.chdir(cmd_dir)
directory = os.path.join(robot_root, 'report', tc_type, os_type)
if not os.path.exists(directory):
os.makedirs(directory)
prefix = 'test_manager' + '_' + module + '_' + os_type
suffix = '.log'
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
def pull_output():
while(True):
ret_code = proc.poll() #returns None while subprocess is running
if ret_code is not None:
print('breaking output pulling')
break
t = threading.Thread(target=pull_output)
t.start()
minute = 60
prev_size = 0
while t.is_alive():
t.join(10 * minute)
print('current time:', strftime("%Y-%m-%d %H:%M:%S", gmtime()))
current_size = self.get_test_manager_file_size(directory, prefix, suffix)
print('prev_size:', prev_size)
print('current_size:', current_size)
if current_size == prev_size:
print ('hang detected')
proc.kill()
proc.terminate()
break
prev_size = current_size
os.chdir(current_dir)
def get_verdict_file_suffix(self, secured, command):
suffix = 'secured'
if secured == '0':
suffix = 'non-secure'
params = command.split()
for i in range (1, len(params), 2):
if params[i] == '-x':
suffix += '_' + params[i+1]
if params[i] == '-n':
suffix += '_' + params[i+1]
print (suffix)
return suffix
def execute_shell_command(self, cmd_dir, command, success_text):
command = str(command)
print(cmd_dir)
print(command)
current_dir = os.getcwd()
os.chdir(cmd_dir)
filepath = 'temp_shell_command_19900413.log'
fp = open(filepath, 'w')
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
def pull_output():
while(True):
ret_code = proc.poll() #returns None while subprocess is running
if ret_code is not None:
break
line = proc.stdout.readline().strip()
print(line)
fp.write(str(line) + '\n')
fp.flush()
t = threading.Thread(target=pull_output)
t.start()
flag = True
minute = 60
prev_size = 0
while t.is_alive():
t.join(10 * minute)
print('current time:', strftime("%Y-%m-%d %H:%M:%S", gmtime()))
current_size = os.path.getsize(filepath)
print('prev_size:', prev_size)
print('current_size:', current_size)
if current_size == prev_size:
print ('hang detected')
break
prev_size = current_size
fp.close()
fp = open(filepath, 'r')
content = fp.read()
fp.close()
if success_text and success_text not in content:
flag = False
os.chdir(current_dir)
return flag, content
def verify_build_success(self, build_command, build_log):
if 'scons' in build_command:
if 'scons: building terminated because of errors.' in build_log:
return False
return True
def get_build_info(self, command, secured):
i = 0
build_param = ''
params = command.split()
for param in params:
if '-b' == param:
build_param = params[i+1]
break
i += 1
if not build_param:
if True == secured or '1' == secured:
build_param = 'secured'
else:
build_param = 'non-secured'
build_param = '-b ' + build_param
return build_param
def main():
module = 're'
os_type = 'android'
prefix = 'test_manager' + '_' + module + '_' + os_type
suffix = '.log'
directory = '/home/srbd/Downloads/release/log_android/robot_fw_log'
a = DeviceControlKeyword()
print(a.get_test_manager_file_size(directory, prefix, suffix))
if __name__== "__main__":
main()
|
iotivity/iotivity
|
test/src/automation/robot/helper/DeviceControlKeyword.py
|
Python
|
apache-2.0
| 67,193
|
import unittest
from Pluss import add
class TestAdd(unittest.TestCase):
def setUp(self):
pass
def test_numbers_5_7(self):
self.assertEqual(add(5,7), 12)
if __name__ == '__main__':
unittest.main()
|
github4321/IS-105_2016_Gruppe92
|
uke03/Sebastian_Test_Pluss.py
|
Python
|
mit
| 233
|
#!/usr/bin/python3
# Copyright (c) 2017 Johannes Leupolz
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from pydbus import SessionBus
from pydbus import SystemBus
import asyncio, gbulb
from gi.repository.GLib import GError
#from hbmqtt.client import MQTTClient, ClientException
import paho.mqtt.client as mqtt
import subprocess
import os
import signal
import re
class BluetoothAudioBridge:
def __init__(self, loop):
self.loop = loop
self.DbusPulseAudioPath=""
self.DbusBluezOnSystemBus=True
self.DbusBluezBusName="org.bluez"
self.DbusBluezObjectPath="/org/bluez/hci0"
self.DbusBluezObject=None
self.DbusBluezReceivingFuture=None
self.DbusBluezDiscoveredDevices={}
self.DbusBluezUUIDsOfDevices={}
self.DbusBluezConnectedDevices={}
self.MqttPath="/BluetoothAudioBridge"
self.MqttServer="localhost"
self.MqttUsername="vhost:username"
self.MqttPassword="password"
self.MqttClient=None
self.MqttMessageQueue=asyncio.Queue()
self.MqttReceivingFuture=None
self.Continue=True
self.CancellationToken=self.loop.create_future()
self.TraceLevel=0
self.PollingCycle=3
self.mqttReceivedConnect=self.makeConnect
self.mqttReceivedPairAndTrust=self.makePairAndTrust
self.mqttReceivedScan=self.makeScan
self.dbusBtDeviceDetected=self.btDeviceDetected
self.dbusBtDeviceRemoved=self.btDeviceRemoved
self.dbusBtDeviceConnected=self.btDeviceConnected
self.dbusBtDeviceDisconnected=self.btDeviceDisconnected
self.dbusScanProcesses=0
self.btDeviceConfig = {}
self.btRunningProcesses = {}
def loadConfig(self,appConfig):
self.TraceLevel=appConfig["traceLevel"]
self.PollingCycle=appConfig["pollingCycle"]
self.btDeviceConfig = appConfig["bluetoothDevices"]
def trace(self,level,msg):
if self.TraceLevel >= level:
print(msg)
async def awaitOrStop(self,future):
# currently unused
done,pending = await asyncio.wait([self.CancellationToken, future],return_when=asyncio.FIRST_COMPLETED)
firstFinished=next(iter(done))
if firstFinished==self.CancellationToken:
#Note: pending tasks are still running
return (False,None)
#print(firstFinished)
#print(firstFinished.result())
return (True,firstFinished.result())
def makeConnect(self,message):
self.trace(0,"MQTT: received connect")
def makePairAndTrust(self,message):
self.trace(0,"MQTT: received pair and trust")
def makeScan(self,message):
self.scanProcesses=self.scanProcesses+1
self.trace(0,"MQTT: received scan")
asyncio.ensure_future(self.stopScanningIn30Seconds)
async def stopScanningIn30Seconds(self):
await asyncio.sleep(30)
self.scanProcesses=self.scanProcesses-1
if (self.scanProcesses==0):
self.trace(2,"stop scanning for devices")
async def mqttProcessMessages(self):
while self.Continue:
message=await self.MqttMessageQueue.get()
if message==None:
self.trace(0,"stopping message proccessing")
return
self.trace(1,"MQTT: received message")
if message.startswith("Connect"):
self.mqttReceivedConnect(message)
if message.startswith("Pair and trust"):
self.mqttReceivedConnect(message)
if message.startswith("Scan"):
self.mqttReceivedScan(message)
async def registerMqtt(self):
def on_connect(client, userdata, flags, rc):
self.trace(0,"Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("/BluetoothAudioBridge/commands")
def on_message(client, userdata, msg):
self.trace(1,msg.topic+" "+str(msg.payload))
msgDecoded=msg.payload.decode("utf-8")
asyncio.ensure_future(self.MqttMessageQueue.put(msgDecoded))
async def mqttReceiving():
while self.Continue:
self.trace(3,"MQTT: wait for message")
client.loop_read()
client.loop_write()
client.loop_misc()
await asyncio.sleep(0.1)
client.disconnect()
client.loop_read()
client.loop_write()
client.loop_misc()
self.MqttReceivingFuture.set_result(True)
asyncio.ensure_future(self.MqttMessageQueue.put(None)) # add final (empty) message into queue for a clean shutdown
def on_disconnect(client, userdata, rc):
if rc != 0:
self.trace(0,"Unexpected disconnection.")
client = mqtt.Client(client_id="thing-bluetoothbridge",)
client.on_connect = on_connect
client.on_message = on_message
client.on_disconnect = on_disconnect
client.username_pw_set(self.MqttUsername, password=self.MqttPassword)
client.connect(self.MqttServer, 1883, 60)
#register receiver
self.MqttReceivingFuture=self.loop.create_future()
asyncio.ensure_future(self.mqttProcessMessages())
asyncio.ensure_future(mqttReceiving())
self.trace(0,"registered on MQTT")
async def btDeviceDetected(self,address):
self.trace(0,"device detected "+address)
async def btDeviceRemoved(self,address):
self.trace(0,"device removed "+address)
def btClassIsAudio(self,btClass):
# https://www.bluetooth.com/specifications/assigned-numbers/baseband
major_service_audio_bit = 1<<21
major_device_audio_bit = 1<<10
is_audio_service = (major_service_audio_bit & btClass)==major_service_audio_bit
is_audio_device = (major_device_audio_bit & btClass)==major_device_audio_bit
return is_audio_service and is_audio_device
def btDeviceHasA2DPSink(self,uuids):
# https://github.com/pauloborges/bluez/blob/master/lib/uuid.h
if "0000110b-0000-1000-8000-00805f9b34fb" in uuids:
return True
return False
def stdoutOfPopen(self):
if self.TraceLevel < 3:
return subprocess.DEVNULL
return None
async def btDeviceConnected(self,address):
self.trace(0,"device connected "+address)
if address in self.btRunningProcesses:
processGroupToKill=self.btRunningProcesses[address].pid
os.killpg(os.getpgid(processGroupToKill), signal.SIGTERM)
await asyncio.sleep(1)
os.killpg(os.getpgid(processGroupToKill), signal.SIGKILL)
self.btRunningProcesses.pop(address,None)
deviceConfig=None
if address in self.btDeviceConfig:
deviceConfig = self.btDeviceConfig[address]
else:
uuids=self.DbusBluezUUIDsOfDevices[address]
if self.btDeviceHasA2DPSink(uuids) and "other_a2dp_sinks" in self.btDeviceConfig:
deviceConfig=self.btDeviceConfig["other_a2dp_sinks"]
if deviceConfig!=None:
if "onConnectCommand" in deviceConfig:
command=deviceConfig["onConnectCommand"]
if command:
commandToExecute=command.replace("$DEVICE",address)
self.btRunningProcesses[address]=subprocess.Popen(commandToExecute,shell=True, start_new_session=True,stdout=self.stdoutOfPopen(),stderr=self.stdoutOfPopen())
async def btDeviceDisconnected(self,address):
self.trace(0,"device disconnected "+address)
if address in self.btRunningProcesses:
processGroupToKill=self.btRunningProcesses[address].pid
os.killpg(os.getpgid(processGroupToKill), signal.SIGTERM)
await asyncio.sleep(1)
os.killpg(os.getpgid(processGroupToKill), signal.SIGKILL)
self.btRunningProcesses.pop(address,None)
deviceConfig=None
if address in self.btDeviceConfig:
deviceConfig = self.btDeviceConfig[address]
else:
uuids=self.DbusBluezUUIDsOfDevices[address]
if self.btDeviceHasA2DPSink(uuids) and "other_a2dp_sinks" in self.btDeviceConfig:
deviceConfig=self.btDeviceConfig["other_a2dp_sinks"]
if deviceConfig!=None:
if "onDisconnectCommand" in deviceConfig:
command=deviceConfig["onDisconnectCommand"]
if command:
commandToExecute=command.replace("$DEVICE",address)
self.btRunningProcesses[address]=subprocess.Popen(commandToExecute,shell=True, start_new_session=True,stdout=self.stdoutOfPopen(),stderr=self.stdoutOfPopen())
async def lookForDbusChanges(self):
deviceFilter = re.compile("^[/]\w+[/]\w+[/]\w+[/]dev_(?P<btmac>\w+)$")
while self.Continue:
self.trace(3,"DBUS: wait for device")
try:
self.trace(1,"DBUS: GetManagedObjects()")
managedObjects = await self.loop.run_in_executor(None, lambda: self.DbusBluezRootNode.GetManagedObjects())
await asyncio.sleep(0.5) # give PulseAudio a chance of connecting (not sure if necessary)
foundDevices={}
for objPath,obj in managedObjects.items():
match = deviceFilter.match(objPath)
if match:
btmac=match.group("btmac")
dev=obj[self.DbusBluezBusName+".Device1"]
foundDevices[btmac]=dev
self.trace(3,"Found "+str(len(foundDevices))+" devices")
removeDevices=[]
for oldDevice in self.DbusBluezDiscoveredDevices:
if oldDevice not in foundDevices:
removeDevices.append(oldDevice)
await self.dbusBtDeviceRemoved(oldDevice)
for removeDevice in removeDevices:
self.DbusBluezDiscoveredDevices.pop(removeDevice,None)
for foundDevice in foundDevices:
if foundDevice not in self.DbusBluezDiscoveredDevices:
self.DbusBluezDiscoveredDevices[foundDevice]=True
await self.dbusBtDeviceDetected(foundDevice)
# now check disconnect <-> connect
connectedDevices = {}
for foundDevice,dev in foundDevices.items():
if foundDevice not in self.DbusBluezUUIDsOfDevices:
self.DbusBluezUUIDsOfDevices[foundDevice] = dev["UUIDs"]
isConnected = dev["Connected"]
if isConnected :
connectedDevices[foundDevice]=True
disconnectedDevices=[]
for alreadyConnectedDevice in self.DbusBluezConnectedDevices:
if alreadyConnectedDevice not in connectedDevices:
disconnectedDevices.append(alreadyConnectedDevice)
await self.dbusBtDeviceDisconnected(alreadyConnectedDevice)
for disconnectedDevice in disconnectedDevices:
self.DbusBluezConnectedDevices.pop(disconnectedDevice,None)
for connectedDevice in connectedDevices:
if connectedDevice not in self.DbusBluezConnectedDevices:
self.DbusBluezConnectedDevices[connectedDevice]=True
await self.dbusBtDeviceConnected(connectedDevice)
except KeyError as err:
self.trace(0,"dbus error (KeyError)")
print(err)
self.trace(0,err)
except GError as err:
self.trace(0,"dbus error (GError)")
self.trace (0,err)
await asyncio.sleep(self.PollingCycle)
print("finished looking for dbus changes")
self.DbusBluezReceivingFuture.set_result(True)
async def registerDbus(self):
try:
if self.DbusBluezOnSystemBus:
self.DbusBluezObject = SystemBus()
else:
self.DbusBluezObject = SessionBus()
self.trace(0,"listening on D-BUS")
self.DbusBluezRootNode = self.DbusBluezObject.get(self.DbusBluezBusName,"/")
self.trace(0,"connected to org.bluez")
except GError as err:
self.trace(0,"dbus error (register)")
self.trace (0,err)
self.DbusBluezRootNode=None
if self.DbusBluezRootNode:
self.DbusBluezReceivingFuture=self.loop.create_future()
asyncio.ensure_future(self.lookForDbusChanges())
async def register(self):
await self.registerMqtt()
await self.registerDbus()
async def unregister(self):
self.Continue=False
if (self.DbusBluezReceivingFuture):
await self.DbusBluezReceivingFuture
self.DbusBluezReceivingFuture = None
if (self.MqttReceivingFuture):
await self.MqttReceivingFuture
self.MqttReceivingFuture=None
|
joleuger/bluetooth-monitor
|
core.py
|
Python
|
mit
| 14,157
|
# Purpose: AC1015 graphic builder
# Created: 10.03.2013
# Copyright (C) 2013, Manfred Moitzi
# License: MIT License
from __future__ import unicode_literals
__author__ = "mozman <mozman@gmx.at>"
class GraphicsFactoryAC1015(object):
def add_lwpolyline(self, points, dxfattribs=None):
if dxfattribs is None:
dxfattribs = {}
closed = dxfattribs.pop('closed', False)
lwpolyline = self.build_and_add_entity('LWPOLYLINE', dxfattribs)
lwpolyline.close(closed)
lwpolyline._setup_points(points)
return lwpolyline
|
lautr3k/RepRap-iTopie
|
odmt/ezdxf/ac1015/graphicsfactory.py
|
Python
|
gpl-3.0
| 569
|
import pyautogui
from rague.config import blt
def test_movement_system_on_dummy_entity_one_turn(world, dummy_entity):
world.entities.add(dummy_entity)
world.make_iteration()
assert dummy_entity.position.x == dummy_entity.position.y == 1
def test_movement_system_on_paralyzed_dummy(world, dummy_entity):
world.entities.add(dummy_entity)
del dummy_entity.velocity
while blt.has_input():
world.make_iteration()
assert dummy_entity.position.x == dummy_entity.position.y == 0
def test_player_up_movement(world, player):
world.entities.add(player)
pyautogui.typewrite(['up'])
while blt.has_input():
world.make_iteration()
assert player.position.x == 5 and player.position.y == 4
def test_player_right_movement(world, player):
world.entities.add(player)
pyautogui.typewrite(['right'])
while blt.has_input():
world.make_iteration()
assert player.position.x == 6 and player.position.y == 5
def test_player_circular_movement(world, player):
world.entities.add(player)
keys = ['up', 'right', 'down', 'left']
pyautogui.typewrite(keys)
while blt.has_input():
world.make_iteration()
assert player.position.x == 5 and player.position.y == 5
def test_player_not_passing_through_wall(world, player):
world.entities.add(player)
keys = ['right'] * 20
pyautogui.typewrite(keys)
while blt.has_input():
world.make_iteration()
assert player.position.x == 9 and player.position.y == 5
|
vitkarpenko/rague
|
tests/test_systems.py
|
Python
|
mit
| 1,514
|
class QueryBase(object):
def __or__(self, other):
return QueryGroup('or', self, other)
def __and__(self, other):
return QueryGroup('and', self, other)
def __invert__(self):
return QueryGroup('not', self)
class QueryGroup(QueryBase):
def __init__(self, operator, *args):
self.operator = operator
self.nodes = []
for node in args:
if not isinstance(node, QueryBase):
raise TypeError('Nodes must be Query objects.')
if isinstance(node, QueryGroup) and node.operator == operator:
self.nodes += node.nodes
else:
self.nodes.append(node)
def __repr__(self):
return '{0}({1})'.format(
self.operator.upper(),
', '.join(repr(node) for node in self.nodes)
)
class RawQuery(QueryBase):
def __init__(self, attribute, operator, argument):
self.attribute = attribute
self.operator = operator
self.argument = argument
def __repr__(self):
return 'RawQuery({}, {}, {})'.format(
self.attribute,
self.operator,
self.argument
)
|
chrisseto/modular-odm
|
modularodm/query/query.py
|
Python
|
apache-2.0
| 1,198
|
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import sys
import datetime
import unittest
from requests import Session
from azure.storage import (
AccessPolicy,
SharedAccessPolicy,
SignedIdentifier,
SignedIdentifiers,
)
from azure.storage.queue import (
QueueService,
QueueSharedAccessPermissions,
)
from azure.common import (
AzureHttpError,
AzureConflictHttpError,
AzureMissingResourceHttpError,
)
from tests.common_recordingtestcase import (
TestMode,
record,
)
from tests.storage_testcase import StorageTestCase
#------------------------------------------------------------------------------
TEST_QUEUE_PREFIX = 'mytestqueue'
#------------------------------------------------------------------------------
class StorageQueueTest(StorageTestCase):
def setUp(self):
super(StorageQueueTest, self).setUp()
self.qs = self._create_storage_service(QueueService, self.settings)
self.test_queues = []
self.creatable_queues = []
for i in range(10):
self.test_queues.append(self.get_resource_name(TEST_QUEUE_PREFIX + str(i)))
for i in range(4):
self.creatable_queues.append(
self.get_resource_name('mycreatablequeue' + str(i)))
if not self.is_playback():
for queue_name in self.test_queues:
self.qs.create_queue(queue_name)
def tearDown(self):
if not self.is_playback():
for queue_name in self.test_queues:
try:
self.qs.delete_queue(queue_name)
except:
pass
for queue_name in self.creatable_queues:
try:
self.qs.delete_queue(queue_name)
except:
pass
return super(StorageQueueTest, self).tearDown()
def _get_shared_access_policy(self, permission):
date_format = "%Y-%m-%dT%H:%M:%SZ"
start = datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
expiry = start + datetime.timedelta(hours=1)
return SharedAccessPolicy(
AccessPolicy(
start.strftime(date_format),
expiry.strftime(date_format),
permission
)
)
@record
def test_get_service_properties(self):
# This api doesn't apply to local storage
if self.qs.use_local_storage:
return
# Action
properties = self.qs.get_queue_service_properties()
# Asserts
self.assertIsNotNone(properties)
self.assertIsNotNone(properties.logging)
self.assertIsNotNone(properties.logging.retention_policy)
self.assertIsNotNone(properties.logging.version)
self.assertIsNotNone(properties.hour_metrics)
self.assertIsNotNone(properties.hour_metrics.retention_policy)
self.assertIsNotNone(properties.hour_metrics.version)
self.assertIsNotNone(properties.minute_metrics)
self.assertIsNotNone(properties.minute_metrics.retention_policy)
self.assertIsNotNone(properties.minute_metrics.version)
@record
def test_set_service_properties(self):
# This api doesn't apply to local storage
if self.qs.use_local_storage:
return
# Action
queue_properties = self.qs.get_queue_service_properties()
queue_properties.logging.read = True
self.qs.set_queue_service_properties(queue_properties)
properties = self.qs.get_queue_service_properties()
# Asserts
self.assertIsNotNone(properties)
self.assertIsNotNone(properties.logging)
self.assertIsNotNone(properties.logging.retention_policy)
self.assertIsNotNone(properties.logging.version)
self.assertIsNotNone(properties.hour_metrics)
self.assertIsNotNone(properties.hour_metrics.retention_policy)
self.assertIsNotNone(properties.hour_metrics.version)
self.assertIsNotNone(properties.minute_metrics)
self.assertIsNotNone(properties.minute_metrics.retention_policy)
self.assertIsNotNone(properties.minute_metrics.version)
self.assertTrue(properties.logging.read)
@record
def test_create_queue(self):
# Action
self.qs.create_queue(self.creatable_queues[0])
result = self.qs.get_queue_metadata(self.creatable_queues[0])
self.qs.delete_queue(self.creatable_queues[0])
# Asserts
self.assertIsNotNone(result)
self.assertEqual(result['x-ms-approximate-messages-count'], '0')
@record
def test_create_queue_already_exist(self):
# Action
created1 = self.qs.create_queue(self.creatable_queues[0])
created2 = self.qs.create_queue(self.creatable_queues[0])
# Asserts
self.assertTrue(created1)
self.assertFalse(created2)
@record
def test_create_queue_fail_on_exist(self):
# Action
created = self.qs.create_queue(self.creatable_queues[0], None, True)
with self.assertRaises(AzureConflictHttpError):
self.qs.create_queue(self.creatable_queues[0], None, True)
# Asserts
self.assertTrue(created)
@record
def test_create_queue_with_options(self):
# Action
self.qs.create_queue(
self.creatable_queues[1],
x_ms_meta_name_values={'val1': 'test', 'val2': 'blah'})
result = self.qs.get_queue_metadata(self.creatable_queues[1])
# Asserts
self.assertIsNotNone(result)
self.assertEqual(3, len(result))
self.assertEqual(result['x-ms-approximate-messages-count'], '0')
self.assertEqual('test', result['x-ms-meta-val1'])
self.assertEqual('blah', result['x-ms-meta-val2'])
@record
def test_delete_queue_not_exist(self):
# Action
deleted = self.qs.delete_queue(self.creatable_queues[0])
# Asserts
self.assertFalse(deleted)
@record
def test_delete_queue_fail_not_exist_not_exist(self):
# Action
with self.assertRaises(AzureMissingResourceHttpError):
self.qs.delete_queue(self.creatable_queues[0], True)
# Asserts
@record
def test_delete_queue_fail_not_exist_already_exist(self):
# Action
created = self.qs.create_queue(self.creatable_queues[0])
deleted = self.qs.delete_queue(self.creatable_queues[0], True)
# Asserts
self.assertTrue(created)
self.assertTrue(deleted)
@record
def test_list_queues(self):
# Action
queues = self.qs.list_queues()
for queue in queues:
pass
# Asserts
self.assertIsNotNone(queues)
self.assertEqual('', queues.marker)
self.assertEqual(0, queues.max_results)
self.assertTrue(len(self.test_queues) <= len(queues))
@record
def test_list_queues_with_options(self):
# Action
queues_1 = self.qs.list_queues(prefix=TEST_QUEUE_PREFIX, maxresults=3)
queues_2 = self.qs.list_queues(
prefix=TEST_QUEUE_PREFIX,
marker=queues_1.next_marker,
include='metadata')
# Asserts
self.assertIsNotNone(queues_1)
self.assertEqual(3, len(queues_1))
self.assertEqual(3, queues_1.max_results)
self.assertEqual('', queues_1.marker)
self.assertIsNotNone(queues_1[0])
self.assertIsNone(queues_1[0].metadata)
self.assertNotEqual('', queues_1[0].name)
# Asserts
self.assertIsNotNone(queues_2)
self.assertTrue(len(self.test_queues) - 3 <= len(queues_2))
self.assertEqual(0, queues_2.max_results)
self.assertEqual(queues_1.next_marker, queues_2.marker)
self.assertIsNotNone(queues_2[0])
self.assertIsNotNone(queues_2[0].metadata)
self.assertNotEqual('', queues_2[0].name)
@record
def test_set_queue_metadata(self):
# Action
self.qs.create_queue(self.creatable_queues[2])
self.qs.set_queue_metadata(
self.creatable_queues[2],
x_ms_meta_name_values={'val1': 'test', 'val2': 'blah'})
result = self.qs.get_queue_metadata(self.creatable_queues[2])
self.qs.delete_queue(self.creatable_queues[2])
# Asserts
self.assertIsNotNone(result)
self.assertEqual(3, len(result))
self.assertEqual('0', result['x-ms-approximate-messages-count'])
self.assertEqual('test', result['x-ms-meta-val1'])
self.assertEqual('blah', result['x-ms-meta-val2'])
@record
def test_put_message(self):
# Action. No exception means pass. No asserts needed.
self.qs.put_message(self.test_queues[0], 'message1')
self.qs.put_message(self.test_queues[0], 'message2')
self.qs.put_message(self.test_queues[0], 'message3')
self.qs.put_message(self.test_queues[0], 'message4')
@record
def test_get_messages(self):
# Action
self.qs.put_message(self.test_queues[1], 'message1')
self.qs.put_message(self.test_queues[1], 'message2')
self.qs.put_message(self.test_queues[1], 'message3')
self.qs.put_message(self.test_queues[1], 'message4')
result = self.qs.get_messages(self.test_queues[1])
# Asserts
self.assertIsNotNone(result)
self.assertEqual(1, len(result))
message = result[0]
self.assertIsNotNone(message)
self.assertNotEqual('', message.message_id)
self.assertEqual('message1', message.message_text)
self.assertNotEqual('', message.pop_receipt)
self.assertEqual('1', message.dequeue_count)
self.assertNotEqual('', message.insertion_time)
self.assertNotEqual('', message.expiration_time)
self.assertNotEqual('', message.time_next_visible)
@record
def test_get_messages_with_options(self):
# Action
self.qs.put_message(self.test_queues[2], 'message1')
self.qs.put_message(self.test_queues[2], 'message2')
self.qs.put_message(self.test_queues[2], 'message3')
self.qs.put_message(self.test_queues[2], 'message4')
result = self.qs.get_messages(
self.test_queues[2], numofmessages=4, visibilitytimeout=20)
# Asserts
self.assertIsNotNone(result)
self.assertEqual(4, len(result))
for message in result:
self.assertIsNotNone(message)
self.assertNotEqual('', message.message_id)
self.assertNotEqual('', message.message_text)
self.assertNotEqual('', message.pop_receipt)
self.assertEqual('1', message.dequeue_count)
self.assertNotEqual('', message.insertion_time)
self.assertNotEqual('', message.expiration_time)
self.assertNotEqual('', message.time_next_visible)
@record
def test_peek_messages(self):
# Action
self.qs.put_message(self.test_queues[3], 'message1')
self.qs.put_message(self.test_queues[3], 'message2')
self.qs.put_message(self.test_queues[3], 'message3')
self.qs.put_message(self.test_queues[3], 'message4')
result = self.qs.peek_messages(self.test_queues[3])
# Asserts
self.assertIsNotNone(result)
self.assertEqual(1, len(result))
message = result[0]
self.assertIsNotNone(message)
self.assertNotEqual('', message.message_id)
self.assertNotEqual('', message.message_text)
self.assertEqual('', message.pop_receipt)
self.assertEqual('0', message.dequeue_count)
self.assertNotEqual('', message.insertion_time)
self.assertNotEqual('', message.expiration_time)
self.assertEqual('', message.time_next_visible)
@record
def test_peek_messages_with_options(self):
# Action
self.qs.put_message(self.test_queues[4], 'message1')
self.qs.put_message(self.test_queues[4], 'message2')
self.qs.put_message(self.test_queues[4], 'message3')
self.qs.put_message(self.test_queues[4], 'message4')
result = self.qs.peek_messages(self.test_queues[4], numofmessages=4)
# Asserts
self.assertIsNotNone(result)
self.assertEqual(4, len(result))
for message in result:
self.assertIsNotNone(message)
self.assertNotEqual('', message.message_id)
self.assertNotEqual('', message.message_text)
self.assertEqual('', message.pop_receipt)
self.assertEqual('0', message.dequeue_count)
self.assertNotEqual('', message.insertion_time)
self.assertNotEqual('', message.expiration_time)
self.assertEqual('', message.time_next_visible)
@record
def test_clear_messages(self):
# Action
self.qs.put_message(self.test_queues[5], 'message1')
self.qs.put_message(self.test_queues[5], 'message2')
self.qs.put_message(self.test_queues[5], 'message3')
self.qs.put_message(self.test_queues[5], 'message4')
self.qs.clear_messages(self.test_queues[5])
result = self.qs.peek_messages(self.test_queues[5])
# Asserts
self.assertIsNotNone(result)
self.assertEqual(0, len(result))
@record
def test_delete_message(self):
# Action
self.qs.put_message(self.test_queues[6], 'message1')
self.qs.put_message(self.test_queues[6], 'message2')
self.qs.put_message(self.test_queues[6], 'message3')
self.qs.put_message(self.test_queues[6], 'message4')
result = self.qs.get_messages(self.test_queues[6])
self.qs.delete_message(
self.test_queues[6], result[0].message_id, result[0].pop_receipt)
result2 = self.qs.get_messages(self.test_queues[6], numofmessages=32)
# Asserts
self.assertIsNotNone(result2)
self.assertEqual(3, len(result2))
@record
def test_update_message(self):
# Action
self.qs.put_message(self.test_queues[7], 'message1')
list_result1 = self.qs.get_messages(self.test_queues[7])
self.qs.update_message(self.test_queues[7],
list_result1[0].message_id,
'new text',
list_result1[0].pop_receipt,
visibilitytimeout=0)
list_result2 = self.qs.get_messages(self.test_queues[7])
# Asserts
self.assertIsNotNone(list_result2)
message = list_result2[0]
self.assertIsNotNone(message)
self.assertNotEqual('', message.message_id)
self.assertEqual('new text', message.message_text)
self.assertNotEqual('', message.pop_receipt)
self.assertEqual('2', message.dequeue_count)
self.assertNotEqual('', message.insertion_time)
self.assertNotEqual('', message.expiration_time)
self.assertNotEqual('', message.time_next_visible)
def test_sas_read(self):
# SAS URL is calculated from storage key, so this test runs live only
if TestMode.need_recordingfile(self.test_mode):
return
# Arrange
self.qs.put_message(self.test_queues[0], 'message1')
token = self.qs.generate_shared_access_signature(
self.test_queues[0],
self._get_shared_access_policy(QueueSharedAccessPermissions.READ),
)
# Act
service = QueueService(
account_name=self.settings.STORAGE_ACCOUNT_NAME,
sas_token=token,
)
self._set_service_options(service, self.settings)
result = service.peek_messages(self.test_queues[0])
# Assert
self.assertIsNotNone(result)
self.assertEqual(1, len(result))
message = result[0]
self.assertIsNotNone(message)
self.assertNotEqual('', message.message_id)
self.assertEqual('message1', message.message_text)
def test_sas_add(self):
# SAS URL is calculated from storage key, so this test runs live only
if TestMode.need_recordingfile(self.test_mode):
return
# Arrange
token = self.qs.generate_shared_access_signature(
self.test_queues[0],
self._get_shared_access_policy(QueueSharedAccessPermissions.ADD),
)
# Act
service = QueueService(
account_name=self.settings.STORAGE_ACCOUNT_NAME,
sas_token=token,
)
self._set_service_options(service, self.settings)
result = service.put_message(self.test_queues[0], 'addedmessage')
# Assert
result = self.qs.get_messages(self.test_queues[0])
self.assertEqual('addedmessage', result[0].message_text)
def test_sas_update(self):
# SAS URL is calculated from storage key, so this test runs live only
if TestMode.need_recordingfile(self.test_mode):
return
# Arrange
self.qs.put_message(self.test_queues[0], 'message1')
token = self.qs.generate_shared_access_signature(
self.test_queues[0],
self._get_shared_access_policy(QueueSharedAccessPermissions.UPDATE),
)
result = self.qs.get_messages(self.test_queues[0])
# Act
service = QueueService(
account_name=self.settings.STORAGE_ACCOUNT_NAME,
sas_token=token,
)
self._set_service_options(service, self.settings)
service.update_message(
self.test_queues[0],
result[0].message_id,
'updatedmessage1',
result[0].pop_receipt,
visibilitytimeout=0,
)
# Assert
result = self.qs.get_messages(self.test_queues[0])
self.assertEqual('updatedmessage1', result[0].message_text)
def test_sas_process(self):
# SAS URL is calculated from storage key, so this test runs live only
if TestMode.need_recordingfile(self.test_mode):
return
# Arrange
self.qs.put_message(self.test_queues[0], 'message1')
token = self.qs.generate_shared_access_signature(
self.test_queues[0],
self._get_shared_access_policy(QueueSharedAccessPermissions.PROCESS),
)
# Act
service = QueueService(
account_name=self.settings.STORAGE_ACCOUNT_NAME,
sas_token=token,
)
self._set_service_options(service, self.settings)
result = service.get_messages(self.test_queues[0])
# Assert
self.assertIsNotNone(result)
self.assertEqual(1, len(result))
message = result[0]
self.assertIsNotNone(message)
self.assertNotEqual('', message.message_id)
self.assertEqual('message1', message.message_text)
def test_sas_signed_identifier(self):
# SAS URL is calculated from storage key, so this test runs live only
if TestMode.need_recordingfile(self.test_mode):
return
# Arrange
si = SignedIdentifier()
si.id = 'testid'
si.access_policy.start = '2011-10-11'
si.access_policy.expiry = '2018-10-12'
si.access_policy.permission = QueueSharedAccessPermissions.READ
identifiers = SignedIdentifiers()
identifiers.signed_identifiers.append(si)
resp = self.qs.set_queue_acl(self.test_queues[0], identifiers)
self.qs.put_message(self.test_queues[0], 'message1')
token = self.qs.generate_shared_access_signature(
self.test_queues[0],
SharedAccessPolicy(signed_identifier=si.id),
)
# Act
service = QueueService(
account_name=self.settings.STORAGE_ACCOUNT_NAME,
sas_token=token,
)
self._set_service_options(service, self.settings)
result = service.peek_messages(self.test_queues[0])
# Assert
self.assertIsNotNone(result)
self.assertEqual(1, len(result))
message = result[0]
self.assertIsNotNone(message)
self.assertNotEqual('', message.message_id)
self.assertEqual('message1', message.message_text)
@record
def test_get_queue_acl(self):
# Arrange
# Act
acl = self.qs.get_queue_acl(self.test_queues[0])
# Assert
self.assertIsNotNone(acl)
self.assertEqual(len(acl.signed_identifiers), 0)
@record
def test_get_queue_acl_iter(self):
# Arrange
# Act
acl = self.qs.get_queue_acl(self.test_queues[0])
for signed_identifier in acl:
pass
# Assert
self.assertIsNotNone(acl)
self.assertEqual(len(acl.signed_identifiers), 0)
self.assertEqual(len(acl), 0)
@record
def test_get_queue_acl_with_non_existing_queue(self):
# Arrange
# Act
with self.assertRaises(AzureMissingResourceHttpError):
self.qs.get_queue_acl(self.creatable_queues[0])
# Assert
@record
def test_set_queue_acl(self):
# Arrange
# Act
resp = self.qs.set_queue_acl(self.test_queues[0])
# Assert
self.assertIsNone(resp)
acl = self.qs.get_queue_acl(self.test_queues[0])
self.assertIsNotNone(acl)
@record
def test_set_queue_acl_with_empty_signed_identifiers(self):
# Arrange
# Act
identifiers = SignedIdentifiers()
resp = self.qs.set_queue_acl(self.test_queues[0], identifiers)
# Assert
self.assertIsNone(resp)
acl = self.qs.get_queue_acl(self.test_queues[0])
self.assertIsNotNone(acl)
self.assertEqual(len(acl.signed_identifiers), 0)
@record
def test_set_queue_acl_with_signed_identifiers(self):
# Arrange
# Act
si = SignedIdentifier()
si.id = 'testid'
si.access_policy.start = '2011-10-11'
si.access_policy.expiry = '2011-10-12'
si.access_policy.permission = QueueSharedAccessPermissions.READ
identifiers = SignedIdentifiers()
identifiers.signed_identifiers.append(si)
resp = self.qs.set_queue_acl(self.test_queues[0], identifiers)
# Assert
self.assertIsNone(resp)
acl = self.qs.get_queue_acl(self.test_queues[0])
self.assertIsNotNone(acl)
self.assertEqual(len(acl.signed_identifiers), 1)
self.assertEqual(len(acl), 1)
self.assertEqual(acl.signed_identifiers[0].id, 'testid')
self.assertEqual(acl[0].id, 'testid')
@record
def test_set_queue_acl_with_non_existing_queue(self):
# Arrange
# Act
with self.assertRaises(AzureMissingResourceHttpError):
self.qs.set_queue_acl(self.creatable_queues[0])
# Assert
@record
def test_with_filter(self):
# Single filter
called = []
def my_filter(request, next):
called.append(True)
return next(request)
qc = self.qs.with_filter(my_filter)
qc.put_message(self.test_queues[7], 'message1')
self.assertTrue(called)
del called[:]
# Chained filters
def filter_a(request, next):
called.append('a')
return next(request)
def filter_b(request, next):
called.append('b')
return next(request)
qc = self.qs.with_filter(filter_a).with_filter(filter_b)
qc.put_message(self.test_queues[7], 'message1')
self.assertEqual(called, ['b', 'a'])
@record
def test_unicode_create_queue_unicode_name(self):
# Action
self.creatable_queues[0] = u'啊齄丂狛狜'
with self.assertRaises(AzureHttpError):
# not supported - queue name must be alphanumeric, lowercase
self.qs.create_queue(self.creatable_queues[0])
# Asserts
@record
def test_unicode_get_messages_unicode_data(self):
# Action
self.qs.put_message(self.test_queues[1], u'message1㚈')
result = self.qs.get_messages(self.test_queues[1])
# Asserts
self.assertIsNotNone(result)
self.assertEqual(1, len(result))
message = result[0]
self.assertIsNotNone(message)
self.assertNotEqual('', message.message_id)
self.assertEqual(u'message1㚈', message.message_text)
self.assertNotEqual('', message.pop_receipt)
self.assertEqual('1', message.dequeue_count)
self.assertNotEqual('', message.insertion_time)
self.assertNotEqual('', message.expiration_time)
self.assertNotEqual('', message.time_next_visible)
@record
def test_unicode_update_message_unicode_data(self):
# Action
self.qs.put_message(self.test_queues[7], 'message1')
list_result1 = self.qs.get_messages(self.test_queues[7])
self.qs.update_message(self.test_queues[7],
list_result1[0].message_id,
u'啊齄丂狛狜',
list_result1[0].pop_receipt,
visibilitytimeout=0)
list_result2 = self.qs.get_messages(self.test_queues[7])
# Asserts
self.assertIsNotNone(list_result2)
message = list_result2[0]
self.assertIsNotNone(message)
self.assertNotEqual('', message.message_id)
self.assertEqual(u'啊齄丂狛狜', message.message_text)
self.assertNotEqual('', message.pop_receipt)
self.assertEqual('2', message.dequeue_count)
self.assertNotEqual('', message.insertion_time)
self.assertNotEqual('', message.expiration_time)
self.assertNotEqual('', message.time_next_visible)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
|
phonnz/azure-storage-python
|
tests/test_storage_queue.py
|
Python
|
apache-2.0
| 26,556
|
#
# Copyright 2005,2006,2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# See gnuradio-examples/python/digital for examples
"""
QAM16 modulation and demodulation.
"""
from gnuradio import gr, gru, modulation_utils
from math import pi, sqrt
import qam
import cmath
from pprint import pprint
# default values (used in __init__ and add_options)
_def_samples_per_symbol = 2
_def_excess_bw = 0.35
_def_gray_code = True
_def_verbose = False
_def_log = False
_def_costas_alpha = None
_def_gain_mu = 0.03
_def_mu = 0.05
_def_omega_relative_limit = 0.005
# /////////////////////////////////////////////////////////////////////////////
# QAM16 modulator
# /////////////////////////////////////////////////////////////////////////////
class qam16_mod(gr.hier_block2):
def __init__(self,
samples_per_symbol=_def_samples_per_symbol,
excess_bw=_def_excess_bw,
gray_code=_def_gray_code,
verbose=_def_verbose,
log=_def_log):
"""
Hierarchical block for RRC-filtered QPSK modulation.
The input is a byte stream (unsigned char) and the
output is the complex modulated signal at baseband.
@param samples_per_symbol: samples per symbol >= 2
@type samples_per_symbol: integer
@param excess_bw: Root-raised cosine filter excess bandwidth
@type excess_bw: float
@param gray_code: Tell modulator to Gray code the bits
@type gray_code: bool
@param verbose: Print information about modulator?
@type verbose: bool
@param debug: Print modualtion data to files?
@type debug: bool
"""
gr.hier_block2.__init__(self, "qam16_mod",
gr.io_signature(1, 1, gr.sizeof_char), # Input signature
gr.io_signature(1, 1, gr.sizeof_gr_complex)) # Output signature
self._samples_per_symbol = samples_per_symbol
self._excess_bw = excess_bw
self._gray_code = gray_code
if not isinstance(samples_per_symbol, int) or samples_per_symbol < 2:
raise TypeError, ("sbp must be an integer >= 2, is %d" % samples_per_symbol)
ntaps = 11 * samples_per_symbol
arity = pow(2, self.bits_per_symbol())
# turn bytes into k-bit vectors
self.bytes2chunks = \
gr.packed_to_unpacked_bb(self.bits_per_symbol(), gr.GR_MSB_FIRST)
if self._gray_code:
self.symbol_mapper = gr.map_bb(qam.binary_to_gray[arity])
else:
self.symbol_mapper = gr.map_bb(qam.binary_to_ungray[arity])
self.diffenc = gr.diff_encoder_bb(arity)
rot = 1.0
print "constellation with %d arity" % arity
rotated_const = map(lambda pt: pt * rot, qam.constellation[arity])
self.chunks2symbols = gr.chunks_to_symbols_bc(rotated_const)
# pulse shaping filter
self.rrc_taps = gr.firdes.root_raised_cosine(
self._samples_per_symbol, # gain (sps since we're interpolating by sps)
self._samples_per_symbol, # sampling rate
1.0, # symbol rate
self._excess_bw, # excess bandwidth (roll-off factor)
ntaps)
self.rrc_filter = gr.interp_fir_filter_ccf(self._samples_per_symbol, self.rrc_taps)
if verbose:
self._print_verbage()
if log:
self._setup_logging()
# Connect
self.connect(self, self.bytes2chunks, self.symbol_mapper, self.diffenc,
self.chunks2symbols, self.rrc_filter, self)
def samples_per_symbol(self):
return self._samples_per_symbol
def bits_per_symbol(self=None): # staticmethod that's also callable on an instance
return 4
bits_per_symbol = staticmethod(bits_per_symbol) # make it a static method. RTFM
def _print_verbage(self):
print "bits per symbol = %d" % self.bits_per_symbol()
print "Gray code = %s" % self._gray_code
print "RRS roll-off factor = %f" % self._excess_bw
def _setup_logging(self):
print "Modulation logging turned on."
self.connect(self.bytes2chunks,
gr.file_sink(gr.sizeof_char, "bytes2chunks.dat"))
self.connect(self.symbol_mapper,
gr.file_sink(gr.sizeof_char, "graycoder.dat"))
self.connect(self.diffenc,
gr.file_sink(gr.sizeof_char, "diffenc.dat"))
self.connect(self.chunks2symbols,
gr.file_sink(gr.sizeof_gr_complex, "chunks2symbols.dat"))
self.connect(self.rrc_filter,
gr.file_sink(gr.sizeof_gr_complex, "rrc_filter.dat"))
def add_options(parser):
"""
Adds QAM modulation-specific options to the standard parser
"""
parser.add_option("", "--excess-bw", type="float", default=_def_excess_bw,
help="set RRC excess bandwith factor [default=%default] (PSK)")
parser.add_option("", "--no-gray-code", dest="gray_code",
action="store_false", default=_def_gray_code,
help="disable gray coding on modulated bits (PSK)")
add_options=staticmethod(add_options)
def extract_kwargs_from_options(options):
"""
Given command line options, create dictionary suitable for passing to __init__
"""
return modulation_utils.extract_kwargs_from_options(qam16_mod.__init__,
('self',), options)
extract_kwargs_from_options=staticmethod(extract_kwargs_from_options)
# /////////////////////////////////////////////////////////////////////////////
# QAM16 demodulator
#
# /////////////////////////////////////////////////////////////////////////////
class qam16_demod(gr.hier_block2):
def __init__(self,
samples_per_symbol=_def_samples_per_symbol,
excess_bw=_def_excess_bw,
costas_alpha=_def_costas_alpha,
gain_mu=_def_gain_mu,
mu=_def_mu,
omega_relative_limit=_def_omega_relative_limit,
gray_code=_def_gray_code,
verbose=_def_verbose,
log=_def_log):
gr.hier_block2.__init__(self, "qam16_demod",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature(1, 1, gr.sizeof_char)) # Output signature
# do this
pass
def bits_per_symbol(self=None): # staticmethod that's also callable on an instance
return 4
bits_per_symbol = staticmethod(bits_per_symbol) # make it a static method. RTFM
#
# Add these to the mod/demod registry
#
# NOT READY TO BE USED YET -- ENABLE AT YOUR OWN RISK
#modulation_utils.add_type_1_mod('qam16', qam16_mod)
#modulation_utils.add_type_1_demod('qam16', qam16_demod)
|
UpYou/relay
|
my_gnuradio/blks2impl/qam16.py
|
Python
|
gpl-3.0
| 7,599
|
"""
Forum attachments cache
=======================
This module defines an abstraction allowing to put forum attachments into cache when users are
creating forum posts and topics.
"""
from io import BytesIO
from django.conf import settings
from django.core.cache import InvalidCacheBackendError, caches
from django.core.exceptions import ImproperlyConfigured
from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
from django.utils.datastructures import MultiValueDict
from machina.conf import settings as machina_settings
class AttachmentCache:
""" The attachments cache.
This one should be used with a FileBasedCache backend. But this can be overriden. The
attachments cache acts as a wrapper and ensure that the states (name, size, content type,
charset and content) of all files from any request.FILES dict are saved inside the considered
backend when calling the 'set' method. Conversely, the 'get' method will populate a dictionary
of InMemoryUploadedFile instances or TemporaryUploadedFile instancesby using these states.
"""
def __init__(self):
self.backend = self.get_backend()
def get_backend(self):
""" Returns the associated cache backend. """
try:
cache = caches[machina_settings.ATTACHMENT_CACHE_NAME]
except InvalidCacheBackendError:
raise ImproperlyConfigured(
'The attachment cache backend ({}) is not configured'.format(
machina_settings.ATTACHMENT_CACHE_NAME,
),
)
return cache
def set(self, key, files):
""" Stores the state of each file embedded in the request.FILES MultiValueDict instance.
This instance is assumed to be passed as the 'files' argument. Each state stored in the
cache is a dictionary containing the following values:
name
The name of the uploaded file.
size
The size of the uploaded file.
content_type
The content type of the uploaded file.
content_length
The content length of the uploaded file.
charset
The charset of the uploaded file.
content
The content of the uploaded file.
"""
files_states = {}
for name, upload in files.items():
# Generates the state of the file
state = {
'name': upload.name,
'size': upload.size,
'content_type': upload.content_type,
'charset': upload.charset,
'content': upload.file.read(),
}
files_states[name] = state
# Go to the first byte in the file for future use
upload.file.seek(0)
self.backend.set(key, files_states)
def get(self, key):
""" Regenerates a MultiValueDict instance containing the files related to all file states
stored for the given key.
"""
upload = None
files_states = self.backend.get(key)
files = MultiValueDict()
if files_states:
for name, state in files_states.items():
f = BytesIO()
f.write(state['content'])
# If the post is too large, we cannot use a
# InMemoryUploadedFile instance.
if state['size'] > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
upload = TemporaryUploadedFile(
state['name'],
state['content_type'],
state['size'],
state['charset'],
)
upload.file = f
else:
f = BytesIO()
f.write(state['content'])
upload = InMemoryUploadedFile(
file=f,
field_name=name,
name=state['name'],
content_type=state['content_type'],
size=state['size'],
charset=state['charset'],
)
files[name] = upload
# Go to the first byte in the file for future use
upload.file.seek(0)
return files
def delete(self, key):
""" Deletes a file associated with a specific key. """
self.backend.delete(key)
cache = AttachmentCache()
|
ellmetha/django-machina
|
machina/apps/forum_conversation/forum_attachments/cache.py
|
Python
|
bsd-3-clause
| 4,544
|
from behave import given, when, then
import json
import os
from django.contrib.auth.models import User, Group
from pybel import readfile
@given('I have valid stereochem compound with one stereocentre')
def step(context):
context.inchi = "InChI=1S/C10H12O2/c1-3-8-4-7(2)5-9(6-8)10(11)12/h4-6H,3H2,1-2H3,(H,11,12)"
context.post_data["ctab"] = """
12 12 0 0 0 0 999 V2000
0.4330 0.2500 0.0000 C 0 0 0 0 0 0
1.2990 -0.2500 0.0000 C 0 0 0 0 0 0
1.2990 -1.2500 0.0000 C 0 0 0 0 0 0
0.4330 -1.7500 0.0000 C 0 0 0 0 0 0
-0.4330 -1.2500 0.0000 C 0 0 0 0 0 0
-0.4330 -0.2500 0.0000 C 0 0 0 0 0 0
0.4330 1.2500 0.0000 C 0 0 0 0 0 0
1.2990 1.7500 0.0000 O 0 0 0 0 0 0
-0.4330 1.7500 0.0000 O 0 0 0 0 0 0
-1.2990 -1.7500 0.0000 C 0 0 0 0 0 0
-2.1651 -1.2500 0.0000 C 0 0 0 0 0 0
2.1651 -1.7500 0.0000 C 0 0 0 0 0 0
1 21 0 0 0
2 32 0 0 0
3 41 0 0 0
4 52 0 0 0
5 61 0 0 0
6 12 0 0 0
1 71 0 0 0
7 81 0 0 0
7 92 0 0 0
5 101 0 0 0
10 111 1 0 0
3 121 0 0 0
M END"""
@given('I have valid stereochem compound with multiple stereocentres')
def step(context):
context.inchi = "InChI=1S/C11H14O2/c1-3-8-5-9(4-2)7-10(6-8)11(12)13/h5-7H,3-4H2,1-2H3,(H,12,13)"
context.post_data["ctab"] = """
13 13 0 0 0 0 999 V2000
-0.0000 0.2500 0.0000 C 0 0 0 0 0 0
0.8660 -0.2500 0.0000 C 0 0 0 0 0 0
0.8660 -1.2500 0.0000 C 0 0 0 0 0 0
0.0000 -1.7500 0.0000 C 0 0 0 0 0 0
-0.8660 -1.2500 0.0000 C 0 0 0 0 0 0
-0.8660 -0.2500 0.0000 C 0 0 0 0 0 0
-0.0000 1.2500 0.0000 C 0 0 0 0 0 0
0.8660 1.7500 0.0000 O 0 0 0 0 0 0
-0.8660 1.7500 0.0000 O 0 0 0 0 0 0
-1.7321 -1.7500 0.0000 C 0 0 0 0 0 0
-2.5981 -1.2500 0.0000 C 0 0 0 0 0 0
1.7321 -1.7500 0.0000 C 0 0 0 0 0 0
2.5981 -1.2500 0.0000 C 0 0 0 0 0 0
1 21 0 0 0
2 32 0 0 0
3 41 0 0 0
4 52 0 0 0
5 61 0 0 0
6 12 0 0 0
1 71 0 0 0
7 81 0 0 0
7 92 0 0 0
5 101 0 0 0
10 111 1 0 0
3 121 0 0 0
12 131 6 0 0
M END"""
@given("I have valid stereochem compounds within a ChemDraw file")
def step(context):
# pull the contents of our chemdraw test file
fn = os.path.join(os.path.dirname(__file__), 'files/behave_cdxml.cdxml')
# convert the chemdraw file contents to mol
mols = [mol.write("smi").split("\t")[0] for mol in readfile('cdxml', fn)]
#file_contents = "".join(mols)
print(len(mols))
context.post_data["type"] = "Smiles"
context.post_data["objects"] = mols
# also populate our inchi list
@given("I have valid stereochem compounds within a SD file")
def step(context):
# pull the contents of our SD test file
fn = os.path.join(os.path.dirname(__file__), 'files/behave_sdf.sdf')
mols = [mol.write("smi").split("\t")[0] for mol in readfile('sdf', fn)]
print(len(mols))
context.post_data["type"] = "Smiles"
context.post_data["objects"] = mols
@then("I {action} my cbh_compound_batch to {projkey}")
def step(context, action=None, projkey=None, responsecode=202):
from cbh_core_model.models import Project
if action == "validate":
# something
path = "/dev/cbh_compound_batches/validate/"
func = context.api_client.post
context.post_data["projectKey"] = projkey
resp = func(
path,
format='json',
data=context.post_data,
)
assert int(resp.status_code) == int(responsecode)
elif action == 'create':
path = "/dev/cbh_compound_batches/"
func = context.api_client.post
context.post_data["projectKey"] = projkey
# print(context.post_data)
resp = func(
path,
format='json',
data=context.post_data,
)
assert resp.status_code == 201
@then("I {action} my stereochem compounds to {projkey}")
def step(context, action=None, projkey=None, responsecode=202):
from cbh_core_model.models import Project
if action == "validate":
# something
path = "/dev/cbh_compound_batches/validate_list/"
func = context.api_client.post
context.post_data["projectKey"] = projkey
resp = func(
path,
format='json',
data=context.post_data,
)
context.post_data['current_batch'] = context.ser.deserialize(
resp.content)["currentBatch"]
assert int(resp.status_code) == int(responsecode)
elif action == 'create':
path = "/dev/cbh_compound_batches/multi_batch_save/"
func = context.api_client.post
context.post_data["projectKey"] = projkey
# print(context.post_data)
resp = func(
path,
format='json',
data=context.post_data,
)
assert resp.status_code == 201
@then('retain its stereochemistry')
def step(context, action=None, projkey=None):
from cbh_core_model.models import Project, CBHCompoundBatch
from rdkit import Chem
from rdkit.Chem import AllChem, inchi
path = "/dev/cbh_compound_batches/"
resp = context.api_client.get(
path,
format='json',
data=context.post_data,
)
reg_cmpds = context.ser.deserialize(resp.content)["objects"]
# retrieve registered inchi
reg_inchi = reg_cmpds[0]['standardInchi']
# convert our ctab mol to inchi
#m = Chem.MolFromMolBlock(context.post_data["ctab"])
#mol_inchi = inchi.MolToInchi(m)
# we are now using a hard coded inchi from Chemicalize
mol_inchi = context.inchi
# assert they are equal
assert mol_inchi == reg_inchi
@then("retain all their stereochemistry")
def step(context, action=None, projkey=None):
# something here
from cbh_core_model.models import Project, CBHCompoundBatch
from rdkit import Chem
from rdkit.Chem import AllChem, inchi
path = "/dev/cbh_compound_batches/"
resp = context.api_client.get(
path,
format='json',
data=context.post_data,
)
reg_cmpds = context.ser.deserialize(resp.content)["objects"]
reg_inchis = []
# get a list of inchis from the reponse
for cmpd in reg_cmpds:
reg_inchis.append(cmpd['standardInchi'].strip())
fn = os.path.join(os.path.dirname(__file__), 'files/inchi-list.txt')
inchis = [mol.write("inchi").split("\t")[0].strip()
for mol in readfile('inchi', fn)]
# do an array subtraction of the hardcoded inchis from the registered inchis
# print(set(inchis))
print(len(inchis))
# print(set(reg_inchis))
print(len(reg_inchis))
diff = list(set(inchis) - set(reg_inchis))
print(len(diff))
# print(diff)
assert len(diff) == 0
|
thesgc/cbh_chembl_ws_extension
|
cbh_chembl_ws_extension/features/steps/stereochem.py
|
Python
|
mit
| 7,146
|
from __future__ import unicode_literals
from django.test import SimpleTestCase
from localflavor.is_.forms import (ISIdNumberField, ISPhoneNumberField,
ISPostalCodeSelect)
class ISLocalFlavorTests(SimpleTestCase):
def test_ISPostalCodeSelect(self):
f = ISPostalCodeSelect()
out = '''<select name="foo">
<option value="101">101 Reykjav\xedk</option>
<option value="103">103 Reykjav\xedk</option>
<option value="104">104 Reykjav\xedk</option>
<option value="105">105 Reykjav\xedk</option>
<option value="107">107 Reykjav\xedk</option>
<option value="108">108 Reykjav\xedk</option>
<option value="109">109 Reykjav\xedk</option>
<option value="110">110 Reykjav\xedk</option>
<option value="111">111 Reykjav\xedk</option>
<option value="112">112 Reykjav\xedk</option>
<option value="113">113 Reykjav\xedk</option>
<option value="116">116 Kjalarnes</option>
<option value="121">121 Reykjav\xedk</option>
<option value="123">123 Reykjav\xedk</option>
<option value="124">124 Reykjav\xedk</option>
<option value="125">125 Reykjav\xedk</option>
<option value="127">127 Reykjav\xedk</option>
<option value="128">128 Reykjav\xedk</option>
<option value="129">129 Reykjav\xedk</option>
<option value="130">130 Reykjav\xedk</option>
<option value="132">132 Reykjav\xedk</option>
<option value="150">150 Reykjav\xedk</option>
<option value="155">155 Reykjav\xedk</option>
<option value="170">170 Seltjarnarnes</option>
<option value="172">172 Seltjarnarnes</option>
<option value="190">190 Vogar</option>
<option value="200">200 K\xf3pavogur</option>
<option value="201">201 K\xf3pavogur</option>
<option value="202">202 K\xf3pavogur</option>
<option value="203">203 K\xf3pavogur</option>
<option value="210">210 Gar\xf0ab\xe6r</option>
<option value="212">212 Gar\xf0ab\xe6r</option>
<option value="220">220 Hafnarfj\xf6r\xf0ur</option>
<option value="221">221 Hafnarfj\xf6r\xf0ur</option>
<option value="222">222 Hafnarfj\xf6r\xf0ur</option>
<option value="225">225 \xc1lftanes</option>
<option value="230">230 Reykjanesb\xe6r</option>
<option value="232">232 Reykjanesb\xe6r</option>
<option value="233">233 Reykjanesb\xe6r</option>
<option value="235">235 Keflav\xedkurflugv\xf6llur</option>
<option value="240">240 Grindav\xedk</option>
<option value="245">245 Sandger\xf0i</option>
<option value="250">250 Gar\xf0ur</option>
<option value="260">260 Reykjanesb\xe6r</option>
<option value="270">270 Mosfellsb\xe6r</option>
<option value="271">271 Mosfellsb\xe6r</option>
<option value="276">276 Mosfellsb\xe6r</option>
<option value="300">300 Akranes</option>
<option value="301">301 Akranes</option>
<option value="302">302 Akranes</option>
<option value="310">310 Borgarnes</option>
<option value="311">311 Borgarnes</option>
<option value="320">320 Reykholt \xed Borgarfir\xf0i</option>
<option value="340">340 Stykkish\xf3lmur</option>
<option value="345">345 Flatey \xe1 Brei\xf0afir\xf0i</option>
<option value="350">350 Grundarfj\xf6r\xf0ur</option>
<option value="355">355 \xd3lafsv\xedk</option>
<option value="356">356 Sn\xe6fellsb\xe6r</option>
<option value="360">360 Hellissandur</option>
<option value="370">370 B\xfa\xf0ardalur</option>
<option value="371">371 B\xfa\xf0ardalur</option>
<option value="380">380 Reykh\xf3lahreppur</option>
<option value="400">400 \xcdsafj\xf6r\xf0ur</option>
<option value="401">401 \xcdsafj\xf6r\xf0ur</option>
<option value="410">410 Hn\xedfsdalur</option>
<option value="415">415 Bolungarv\xedk</option>
<option value="420">420 S\xfa\xf0av\xedk</option>
<option value="425">425 Flateyri</option>
<option value="430">430 Su\xf0ureyri</option>
<option value="450">450 Patreksfj\xf6r\xf0ur</option>
<option value="451">451 Patreksfj\xf6r\xf0ur</option>
<option value="460">460 T\xe1lknafj\xf6r\xf0ur</option>
<option value="465">465 B\xedldudalur</option>
<option value="470">470 \xdeingeyri</option>
<option value="471">471 \xdeingeyri</option>
<option value="500">500 Sta\xf0ur</option>
<option value="510">510 H\xf3lmav\xedk</option>
<option value="512">512 H\xf3lmav\xedk</option>
<option value="520">520 Drangsnes</option>
<option value="522">522 Kj\xf6rvogur</option>
<option value="523">523 B\xe6r</option>
<option value="524">524 Nor\xf0urfj\xf6r\xf0ur</option>
<option value="530">530 Hvammstangi</option>
<option value="531">531 Hvammstangi</option>
<option value="540">540 Bl\xf6ndu\xf3s</option>
<option value="541">541 Bl\xf6ndu\xf3s</option>
<option value="545">545 Skagastr\xf6nd</option>
<option value="550">550 Sau\xf0\xe1rkr\xf3kur</option>
<option value="551">551 Sau\xf0\xe1rkr\xf3kur</option>
<option value="560">560 Varmahl\xed\xf0</option>
<option value="565">565 Hofs\xf3s</option>
<option value="566">566 Hofs\xf3s</option>
<option value="570">570 Flj\xf3t</option>
<option value="580">580 Siglufj\xf6r\xf0ur</option>
<option value="600">600 Akureyri</option>
<option value="601">601 Akureyri</option>
<option value="602">602 Akureyri</option>
<option value="603">603 Akureyri</option>
<option value="610">610 Greniv\xedk</option>
<option value="611">611 Gr\xedmsey</option>
<option value="620">620 Dalv\xedk</option>
<option value="621">621 Dalv\xedk</option>
<option value="625">625 \xd3lafsfj\xf6r\xf0ur</option>
<option value="630">630 Hr\xedsey</option>
<option value="640">640 H\xfasav\xedk</option>
<option value="641">641 H\xfasav\xedk</option>
<option value="645">645 Fossh\xf3ll</option>
<option value="650">650 Laugar</option>
<option value="660">660 M\xfdvatn</option>
<option value="670">670 K\xf3pasker</option>
<option value="671">671 K\xf3pasker</option>
<option value="675">675 Raufarh\xf6fn</option>
<option value="680">680 \xde\xf3rsh\xf6fn</option>
<option value="681">681 \xde\xf3rsh\xf6fn</option>
<option value="685">685 Bakkafj\xf6r\xf0ur</option>
<option value="690">690 Vopnafj\xf6r\xf0ur</option>
<option value="700">700 Egilssta\xf0ir</option>
<option value="701">701 Egilssta\xf0ir</option>
<option value="710">710 Sey\xf0isfj\xf6r\xf0ur</option>
<option value="715">715 Mj\xf3ifj\xf6r\xf0ur</option>
<option value="720">720 Borgarfj\xf6r\xf0ur eystri</option>
<option value="730">730 Rey\xf0arfj\xf6r\xf0ur</option>
<option value="735">735 Eskifj\xf6r\xf0ur</option>
<option value="740">740 Neskaupsta\xf0ur</option>
<option value="750">750 F\xe1skr\xfa\xf0sfj\xf6r\xf0ur</option>
<option value="755">755 St\xf6\xf0varfj\xf6r\xf0ur</option>
<option value="760">760 Brei\xf0dalsv\xedk</option>
<option value="765">765 Dj\xfapivogur</option>
<option value="780">780 H\xf6fn \xed Hornafir\xf0i</option>
<option value="781">781 H\xf6fn \xed Hornafir\xf0i</option>
<option value="785">785 \xd6r\xe6fi</option>
<option value="800">800 Selfoss</option>
<option value="801">801 Selfoss</option>
<option value="802">802 Selfoss</option>
<option value="810">810 Hverager\xf0i</option>
<option value="815">815 \xdeorl\xe1ksh\xf6fn</option>
<option value="816">816 \xd6lfus</option>
<option value="820">820 Eyrarbakki</option>
<option value="825">825 Stokkseyri</option>
<option value="840">840 Laugarvatn</option>
<option value="845">845 Fl\xfa\xf0ir</option>
<option value="850">850 Hella</option>
<option value="851">851 Hella</option>
<option value="860">860 Hvolsv\xf6llur</option>
<option value="861">861 Hvolsv\xf6llur</option>
<option value="870">870 V\xedk</option>
<option value="871">871 V\xedk</option>
<option value="880">880 Kirkjub\xe6jarklaustur</option>
<option value="900">900 Vestmannaeyjar</option>
<option value="902">902 Vestmannaeyjar</option>
</select>'''
self.assertHTMLEqual(f.render('foo', 'bar'), out)
def test_ISIdNumberField(self):
error_atleast = ['Ensure this value has at least 10 characters (it has 9).']
error_invalid = ['Enter a valid Icelandic identification number. The format is XXXXXX-XXXX.']
error_atmost = ['Ensure this value has at most 11 characters (it has 12).']
error_notvalid = ['The Icelandic identification number is not valid.']
valid = {
'2308803449': '230880-3449',
'230880-3449': '230880-3449',
'230880 3449': '230880-3449',
'2308803440': '230880-3440',
}
invalid = {
'230880343': error_atleast + error_invalid,
'230880343234': error_atmost + error_invalid,
'abcdefghijk': error_invalid,
'2308803439': error_notvalid,
}
self.assertFieldOutput(ISIdNumberField, valid, invalid)
def test_ISPhoneNumberField(self):
error_invalid = ['Enter a valid value.']
error_atleast = ['Ensure this value has at least 7 characters (it has 6).']
error_atmost = ['Ensure this value has at most 8 characters (it has 9).']
valid = {
'1234567': '1234567',
'123 4567': '1234567',
'123-4567': '1234567',
}
invalid = {
'123-456': error_invalid,
'123456': error_atleast + error_invalid,
'123456555': error_atmost + error_invalid,
'abcdefg': error_invalid,
' 1234567 ': error_atmost + error_invalid,
' 12367 ': error_invalid
}
self.assertFieldOutput(ISPhoneNumberField, valid, invalid)
|
M157q/django-localflavor
|
tests/test_is.py
|
Python
|
bsd-3-clause
| 9,213
|
import os
import unittest
from coala_quickstart.generation.InfoCollector import (
collect_info)
from tests.TestUtilities import generate_files
package_json = """
{
"name": "awesome-packages",
"version": "0.8.0",
"license": "MIT",
"dependencies": {
"coffeelint": "~1",
"ramllint": ">=1.2.2 <1.2.4"
},
"files": ["dist"],
"man" : ["./man/foo.1", "./man/bar.1"]
}
"""
editorconfig = """
root = true
[*]
end_of_line = lf
insert_final_newline = true
[*.{js,py}]
charset = utf-8
trim_trailing_whitespace = true
indent_size = tab
tab_width = 4
[*.py]
indent_style = space
indent_size = 4
[{package.json,.travis.yml}]
indent_style = space
indent_size = 2
"""
gemfile = """
source "https://rubygems.org"
gem "puppet-lint", "2.1.1"
gem "rubocop", "0.47.1"
gem "scss_lint", require: false
gem "RedCloth", :require => "redcloth"
gem "omniauth", ">= 0.2.6", :git => "git://github.com/intridea/omniauth.git"
group :assets do
gem 'some-gem', source: "https://gems.example.com"
end
gem "rspec-rails", ">= 2.6.1", :group => [:development, :test]
end
"""
class InfoCollectorTest(unittest.TestCase):
def setUp(self):
self.uut = collect_info
self.test_dir = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'information_collector_testfiles')
def test_collected_info(self):
files_to_create = ['package.json', '.editorconfig', 'Gemfile']
target_file_contents = [package_json, editorconfig, gemfile]
with generate_files(
files_to_create,
target_file_contents,
self.test_dir) as gen_files:
collected_info = self.uut(self.test_dir)
expected_results = [
('TrailingWhitespaceInfo', ['.editorconfig'], 1),
('FinalNewlineInfo', ['.editorconfig'], 1),
('IndentStyleInfo', ['.editorconfig'], 2),
('IndentSizeInfo', ['.editorconfig'], 3),
('LineBreaksInfo', ['.editorconfig'], 1),
('CharsetInfo', ['.editorconfig'], 1),
('ProjectDependencyInfo', ['Gemfile', 'package.json'], 9),
('ManFilesInfo', ['package.json'], 1),
('LicenseUsedInfo', ['package.json'], 1),
('IncludePathsInfo', ['package.json'], 1)]
self.assertEqual(len(collected_info.keys()), len(expected_results))
for iname, isources, icount in expected_results:
self.assertEqual(len(collected_info[iname]), icount)
isources = [os.path.normcase(i) for i in isources]
for info in collected_info[iname]:
self.assertIn(info.source, isources)
|
MalkmusT/coala-quickstart
|
tests/generation/InfoCollectorTest.py
|
Python
|
agpl-3.0
| 2,740
|
import Queue
import logging
import threading
import time
import subprocess
import os
from datetime import datetime, timedelta
from mirrors.libmirrors import t2s
class Singleton(type):
"""Singleton Class for RepoManager."""
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class Repo(object):
def __init__(self, name, config):
"""A repo object which stores info about a single repo.
:param str name: Name of repo
:param config: running config options
:type config: ConfigParser.ConfigParser
"""
# ConfigParser Object which all of the repo info is stored under the repo name
self.config = config
# Name of the Repo
self.name = name
# inactive repos will not run
self.deactive = False
# Status of Repo Queue
self.queued = False
# Singleton of RepoManager
self.repo_manager = RepoManager()
# Contains rsync_thread
self.__sync = None
# Config Validation Section
if not self.config.has_option(self.name, 'source'):
raise RepoConfigError("No Source Defined".format(self.name), self.name)
if not self.config.has_option(self.name, 'destination'):
self.config.set(self.name, 'destination', './distro/')
directory = os.path.dirname(self.config.get(self.name, 'destination'))
if not os.path.exists(directory):
logging.info("Creating {0}".format(directory))
os.makedirs(directory)
if not self.config.has_option(self.name, 'rsync_args'):
raise RepoConfigError("No rsync_args Defined".format(self.name), self.name)
if not self.config.has_option(self.name, 'weight'):
self.config.set(self.name, 'weight', '0')
if self.config.has_option(self.name, 'deactive'):
self.deactive = self.config.getboolean(self.name, 'deactive')
else:
self.config.set(self.name, 'deactive', 'False')
if self.config.has_option(self.name, 'async_sleep') and self.config.has_option(self.name, 'hourly_sync'):
raise RepoConfigError("Both async_sleep and hourly_sync cannot be defined".format(self.name), self.name)
elif not self.config.has_option(self.name, 'async_sleep') and not self.config.has_option(self.name, 'hourly_sync'):
raise RepoConfigError("Either async_sleep or hourly_sync must be defined".format(self.name), self.name)
if not self.config.has_option(self.name, 'pre_command'):
self.config.set(self.name, 'pre_command', '')
if not self.config.has_option(self.name, 'post_command'):
self.config.set(self.name, 'post_command', '')
if not self.config.has_option(self.name, 'log_file'):
self.config.set(self.name, 'log_file', './log/{0}.log'.format(self.name))
logging.info("No log_file declared in {0}, defaulting to '{0}.log'".format(self.name))
# end config validation section
log_file = self.config.get(self.name, "log_file")
directory = os.path.dirname(log_file)
if not os.path.exists(directory):
logging.info("Creating {0}".format(directory))
try:
os.makedirs(directory)
except IOError:
logging.error("Failed to create {0}".format(directory))
try:
open(log_file, 'a').close()
logging.debug("{0} log file good for writing".format(self.name))
except IOError:
logging.error("Error opening {0} for writing".format(self.name))
if(self.deactive):
logging.info("{0} loaded successfully, but disabled".format(self.name))
else:
logging.info("{0} loaded successfully".format(self.name))
def is_alive(self):
"""Bool of syncing status."""
if self.__sync:
return bool(self.__sync.p)
return False
def running_time(self):
"""Total running time of active sync.
:rtype: int
:returns: An int of total syncing time elapsed
:rtype: None
:returns: None if not syncing
"""
if self.__sync:
if self.is_alive():
delta = datetime.now() - self.__sync.start_time
return delta - timedelta(microseconds=delta.microseconds)
def sleep_time(self):
"""Sleep duration of sleeping sync.
:rtype: int
:returns: A int of time elapsed since sleeping
:rtype: None
:returns: None if not in sleeping state
"""
if self.__sync:
if self.__sync.sleep_start:
delta = datetime.now() - self.__sync.sleep_start
return delta - timedelta(microseconds=delta.microseconds)
def time_remaining(self):
"""Return time left until sleep is over.
:rtype: int
:returns: A int of time remaining in sleep state
:rtype: None
:returns: None if not in sleeping state
"""
if self.__sync:
if self.__sync.sleep_start:
delta = timedelta(seconds=t2s(self.config.get(self.name, "async_sleep"))) - self.sleep_time()
return delta - timedelta(microseconds=delta.microseconds)
def terminate(self):
"""Send SIGTERM To the rsync process."""
if self.is_alive():
logging.info("Terminating {0}".format(self.name))
self.__sync.p.terminate()
def kill(self):
"""Send SIGKILL To the rsync process."""
if self.is_alive():
logging.info("KIlling {0}".format(self.name))
self.__sync.p.kill()
def __rebuild(self):
"""Destroy and recreate the rsync object and settings.
This will wipe all currently running rsync timers
"""
self.__sync = self.rsync_thread(self.name, self.config)
def start_sync(self):
"""Run an rsync against the repo source."""
self.__rebuild()
self.__sync.start()
class rsync_thread(threading.Thread):
"""Extended threading.Thread class to control rsync via subprocess.
:param str name: Name of repo
:param config: Running config options
:type config: Configparser.Configparser
"""
def __init__(self, name, config):
threading.Thread.__init__(self)
self.config = config
self.p = None
self.name = name
# Singleton of RepoManager
self.repo_manager = RepoManager()
self.start_time = None
self.finish_time = None
self.start_sleep = None
self.thread_timer = None
self.daemon = True
def run(self):
logging.debug("Opening {0} for writing".format(self.config.get(self.name, 'log_file')))
output_file = open(self.config.get(self.name, 'log_file'), 'a')
logging.debug("Running rsync with {0} {1} {2}".format(
self.config.get(self.name, "rsync_args"),
self.config.get(self.name, "source"),
self.config.get(self.name, "destination")))
self.start_time = datetime.now()
logging.info("Starting sync {0} at {1}".format(self.name, self.start_time))
self.p = subprocess.Popen("rsync {0} {1} {2}".format(
self.config.get(self.name, "rsync_args"),
self.config.get(self.name, "source"),
self.config.get(self.name, "destination")).split(),
shell=False,
stdout=output_file,
stderr=subprocess.STDOUT)
# bock until the subprocess is done
self.p.wait()
if self.config.get(self.name, "post_command"):
logging.debug("running post_cmd {0}".format(self.config.get(self.name, "post_command")))
self.post_cmd = subprocess.Popen("{0}".format(
self.config.get(self.name, "post_command")),
shell=True,
stdout=output_file,
stderr=subprocess.STDOUT)
self.post_cmd.wait()
logging.info("Done running post_command for {0}".format(self.name))
t = t2s(self.config.get(self.name, "async_sleep"))
self.thread_timer = threading.Timer(t, self.repo_manager.enqueue, [self.name])
self.thread_timer.start()
# Time that thread starts sleeping
self.sleep_start = datetime.now()
# clear out the current process when it finishes
self.p = None
# Remove state from running_syncs
self.repo_manager.running_syncs -= 1
self.finish_time = datetime.now()
logging.info("finished {0} at {1}, sleeping for {2}".format(self.name, self.finish_time, self.config.get(self.name, "async_sleep")))
logging.debug("closing {0}".format(self.config.get(self.name, 'log_file')))
output_file.close()
class RepoManager(object):
__metaclass__ = Singleton
def __init__(self, config):
"""Singleton manager of the repositories and threading.
:param config: Running config options
:type config: Configparser.Configparser
"""
# configparser object which all of the repomanager configs are stored under the GLOBAL Section
self.config = config
# priority queue for async processing
self.repo_queue = Queue.PriorityQueue(0)
# list of repo objects
self._repo_dict = dict()
if not self.config.has_section("GLOBAL"):
raise GlobalError("Config requires GLOBAL Section")
if not self.config.has_option('GLOBAL', 'async_processes'):
raise GlobalError("No async_processes value defined in GLOBAL")
if not self.config.has_option('GLOBAL', 'check_sleep'):
config.set("GLOBAL", 'check_sleep', '30')
# current running syncs: compared against max set in config
self.running_syncs = 0
self.async_thread = threading.Thread(name="async_control", target=self.__check_queue)
self.async_thread.daemon = True
self.async_thread.start()
def __check_queue(self):
"""Queue loop checker for async_control."""
while(True):
# Check for inactive repos
found = None
while not found:
repo = self.repo_queue.get()[1]
if not repo.deactive:
found = True
else:
# If inactive, toss aside
break
if self.running_syncs <= self.config.getint("GLOBAL", "async_processes"):
logging.debug("Acquired {0}".format(repo.name))
repo.queued = False
self.running_syncs += 1
logging.debug("Running Sync {0}, {1} slots available".format(repo.name, self.config.getint("GLOBAL", "async_processes")-self.running_syncs))
repo.start_sync()
else:
logging.debug("Requeuing {0}, no open threads".format(repo.name))
self.repo_queue.put([-11, repo])
time.sleep(30)
def get_repo(self, name):
"""Return repo object if exists.
:param str name: name of repo
:rtype: Repo
:returns: Repo Object
:rtype: None
:returns: None if no repo exists by passed in name
"""
if name in self._repo_dict:
return self._repo_dict[name]
def gen_repo(self):
"""Generator for repo_dict.
:rtype: Repo
:returns: Repo Object
"""
for name in self._repo_dict:
yield self._repo_dict[name]
def add_repo(self, name):
"""Create a repo for a section in the running config.
:param str name: Name of repo
:raises Repo.RepoConfigError: if no config exists for given repo name
"""
if self.get_repo(name):
raise RepoConfigError("Cannon create repo {0}, already created".format(name), name)
if self.config.has_section(name):
repo = Repo(name, self.config)
self._repo_dict[name] = repo
else:
raise RepoConfigError("Cannot create repo, section {0} does not exist".format(name), name)
def deactivate(self, name):
"""Deactivate repo from syncing.
:param str name: Name of repo
:raises Repo.RepoError: if no repo exists by given name
"""
if self.get_repo(name):
if self.get_repo(name).deactive:
# nothing to do, already deactive
return
self.get_repo(name).deactive = True
logging.info("Deactivating {0}".format(name))
else:
raise RepoError("No Repo Named {0}".format(name), name)
def activate(self, name):
"""Activate repo for syncing.
:param str name: Name of Repo
:raises Repo.RepoError: if no repo exists by given name
"""
if self.get_repo(name):
if not self.get_repo(name).deactive:
# nothing to do, already active
return
self.get_repo(name).deactive = False
self.enqueue(name)
logging.info("Activating {0}".format(name))
else:
raise RepoError("No Repo Named {0}".format(name), name)
def status(self, name):
"""Return status of Repo.
:param str name: Name of Repo
:rtype: str
:returns: str status of Repo
"""
if not self.get_repo(name):
raise RepoError("Repo {0} doesn't exist".format(name), name)
if self.get_repo(name).deactive:
return "{0} is deactive".format(name)
elif self.get_repo(name).queued:
return "{0} is queued".format(name)
elif self.get_repo(name).is_alive():
return "{0} is syncing, active for {1}".format(name, self.get_repo(name).running_time())
else:
return "{0} is sleeping, sync in {1}".format(name, self.get_repo(name).time_remaining())
def del_repo(self, name):
"""Delete repo object from dict.
:param str name: Name of repo
:raises Repo.RepoError: if no repo exists by passed in name.
"""
if self.get_repo(name):
del self._repo_dict[name]
else:
raise RepoError("Cannot delete repo, repo {0} does not exist".format(name))
def enqueue(self, name):
"""Add repo to the queue.
:param str name: Name of repo
:raises Repo.RepoError: if repo is already queued or doesn't exist
"""
if not self.get_repo(name):
raise RepoError("Repo {0} doesn't exist".format(name), name)
if self.get_repo(name).deactive:
raise RepoError("Failed to queue repo, {0} is deactive.".format(name), name)
if self.get_repo(name).queued:
raise RepoError("Failed to queue repo, {0} already queued.".format(name), name)
if self.get_repo(name).is_alive():
raise RepoError("Failed to queue Repo, {0} is syncing.".format(name), name)
self.repo_queue.put([self.config.get(name, "weight"), self.get_repo(name)])
self.get_repo(name).queued = True
class GlobalError(Exception):
def __init__(self, message):
"""Fatal GLOBAL Section Config Error."""
Exception.__init__(self, message)
self.message = message
class RepoConfigError(Exception):
def __init__(self, message, name):
"""Non-Fatal Repo config Error."""
Exception.__init__(self, message)
self.message = message
self.name = name
class RepoError(Exception):
def __init__(self, message, name):
"""Non-Fatal Repo Error."""
Exception.__init__(self, message)
self.message = message
self.name = name
|
ehouse/mirrors
|
mirrors/repo.py
|
Python
|
bsd-2-clause
| 16,133
|
from ..graph import Greengraph
from itertools import combinations_with_replacement
from nose.tools import assert_almost_equal
from nose.tools import assert_raises
from mock import patch
graph = Greengraph('London', 'Chiacago')
def test_geolocate():
start, end = (graph.geolocate(graph.start), graph.geolocate(graph.end))
true_start = (51.507351, -0.127759)
true_end = (41.878103, -87.629798)
points = zip(start,true_start) + zip(end,true_end)
for point, true_point in points:
assert_almost_equal(point, true_point, 3)
def test_geocoder():
with assert_raises(ValueError):
graph.geolocate('hjkbjhild') == None
def test_coodinates():
with assert_raises(ValueError):
graph.location_sequence((100.,0.),(45.,45.),20)
with assert_raises(ValueError):
graph.location_sequence((-100.,0.),(45.,45.),20)
with assert_raises(ValueError):
graph.location_sequence((90.,181.),(45.,45.),20)
with assert_raises(ValueError):
graph.location_sequence((0.,-181.),(45.,45.),20)
def test_location_sequence():
points = Greengraph.location_sequence(Greengraph('London','Texas'),
(10.,10.), (20.,20.), 10)
diffs = [points[i][0] - points[i-1][0] for i in range(1,10)]
for diff1, diff2 in combinations_with_replacement(diffs, 2):
assert_almost_equal(diff1, diff2)
for diff in diffs:
assert_almost_equal(diff, ((20.-10.)/9))
@patch('greengraph.google_map.Map.count_green')
@patch('greengraph.Greengraph.geolocate',return_value=(10.,10.))
def test_green_between(mock_geolocate,mock_map):
Greengraph('10.,10.','20.,20.').green_between(2)
assert mock_map.call_count == 2
def test_limits():
with assert_raises(RuntimeError):
while True:
graph.geolocate('Lapland')
|
padraic-padraic/MPHYSG001_CW1
|
greengraph/test/test_graph.py
|
Python
|
gpl-2.0
| 1,830
|
#!/usr/bin/env python
#
# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This is a utility for converting JavaScript source code into C-style
# char arrays. It is used for embedded JavaScript code in the V8
# library.
import os, re, sys, string
import optparse
import jsmin
import bz2
import textwrap
class Error(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
def ToCArray(byte_sequence):
result = []
for chr in byte_sequence:
result.append(str(ord(chr)))
joined = ", ".join(result)
return textwrap.fill(joined, 80)
def RemoveCommentsAndTrailingWhitespace(lines):
lines = re.sub(r'//.*\n', '\n', lines) # end-of-line comments
lines = re.sub(re.compile(r'/\*.*?\*/', re.DOTALL), '', lines) # comments.
lines = re.sub(r'\s+\n+', '\n', lines) # trailing whitespace
return lines
def ReadFile(filename):
file = open(filename, "rt")
try:
lines = file.read()
finally:
file.close()
return lines
EVAL_PATTERN = re.compile(r'\beval\s*\(')
WITH_PATTERN = re.compile(r'\bwith\s*\(')
def Validate(lines):
# Because of simplified context setup, eval and with is not
# allowed in the natives files.
if EVAL_PATTERN.search(lines):
raise Error("Eval disallowed in natives.")
if WITH_PATTERN.search(lines):
raise Error("With statements disallowed in natives.")
# Pass lines through unchanged.
return lines
def ExpandConstants(lines, constants):
for key, value in constants:
lines = key.sub(str(value), lines)
return lines
def ExpandMacroDefinition(lines, pos, name_pattern, macro, expander):
pattern_match = name_pattern.search(lines, pos)
while pattern_match is not None:
# Scan over the arguments
height = 1
start = pattern_match.start()
end = pattern_match.end()
assert lines[end - 1] == '('
last_match = end
arg_index = [0] # Wrap state into array, to work around Python "scoping"
mapping = { }
def add_arg(str):
# Remember to expand recursively in the arguments
replacement = expander(str.strip())
mapping[macro.args[arg_index[0]]] = replacement
arg_index[0] += 1
while end < len(lines) and height > 0:
# We don't count commas at higher nesting levels.
if lines[end] == ',' and height == 1:
add_arg(lines[last_match:end])
last_match = end + 1
elif lines[end] in ['(', '{', '[']:
height = height + 1
elif lines[end] in [')', '}', ']']:
height = height - 1
end = end + 1
# Remember to add the last match.
add_arg(lines[last_match:end-1])
result = macro.expand(mapping)
# Replace the occurrence of the macro with the expansion
lines = lines[:start] + result + lines[end:]
pattern_match = name_pattern.search(lines, start + len(result))
return lines
def ExpandMacros(lines, macros):
# We allow macros to depend on the previously declared macros, but
# we don't allow self-dependecies or recursion.
for name_pattern, macro in reversed(macros):
def expander(s):
return ExpandMacros(s, macros)
lines = ExpandMacroDefinition(lines, 0, name_pattern, macro, expander)
return lines
class TextMacro:
def __init__(self, args, body):
self.args = args
self.body = body
def expand(self, mapping):
result = self.body
for key, value in mapping.items():
result = result.replace(key, value)
return result
class PythonMacro:
def __init__(self, args, fun):
self.args = args
self.fun = fun
def expand(self, mapping):
args = []
for arg in self.args:
args.append(mapping[arg])
return str(self.fun(*args))
CONST_PATTERN = re.compile(r'^const\s+([a-zA-Z0-9_]+)\s*=\s*([^;]*);$')
MACRO_PATTERN = re.compile(r'^macro\s+([a-zA-Z0-9_]+)\s*\(([^)]*)\)\s*=\s*([^;]*);$')
PYTHON_MACRO_PATTERN = re.compile(r'^python\s+macro\s+([a-zA-Z0-9_]+)\s*\(([^)]*)\)\s*=\s*([^;]*);$')
def ReadMacros(lines):
constants = []
macros = []
for line in lines.split('\n'):
hash = line.find('#')
if hash != -1: line = line[:hash]
line = line.strip()
if len(line) is 0: continue
const_match = CONST_PATTERN.match(line)
if const_match:
name = const_match.group(1)
value = const_match.group(2).strip()
constants.append((re.compile("\\b%s\\b" % name), value))
else:
macro_match = MACRO_PATTERN.match(line)
if macro_match:
name = macro_match.group(1)
args = [match.strip() for match in macro_match.group(2).split(',')]
body = macro_match.group(3).strip()
macros.append((re.compile("\\b%s\\(" % name), TextMacro(args, body)))
else:
python_match = PYTHON_MACRO_PATTERN.match(line)
if python_match:
name = python_match.group(1)
args = [match.strip() for match in python_match.group(2).split(',')]
body = python_match.group(3).strip()
fun = eval("lambda " + ",".join(args) + ': ' + body)
macros.append((re.compile("\\b%s\\(" % name), PythonMacro(args, fun)))
else:
raise Error("Illegal line: " + line)
return (constants, macros)
INLINE_MACRO_PATTERN = re.compile(r'macro\s+([a-zA-Z0-9_]+)\s*\(([^)]*)\)\s*\n')
INLINE_MACRO_END_PATTERN = re.compile(r'endmacro\s*\n')
def ExpandInlineMacros(lines):
pos = 0
while True:
macro_match = INLINE_MACRO_PATTERN.search(lines, pos)
if macro_match is None:
# no more macros
return lines
name = macro_match.group(1)
args = [match.strip() for match in macro_match.group(2).split(',')]
end_macro_match = INLINE_MACRO_END_PATTERN.search(lines, macro_match.end());
if end_macro_match is None:
raise Error("Macro %s unclosed" % name)
body = lines[macro_match.end():end_macro_match.start()]
# remove macro definition
lines = lines[:macro_match.start()] + lines[end_macro_match.end():]
name_pattern = re.compile("\\b%s\\(" % name)
macro = TextMacro(args, body)
# advance position to where the macro defintion was
pos = macro_match.start()
def non_expander(s):
return s
lines = ExpandMacroDefinition(lines, pos, name_pattern, macro, non_expander)
HEADER_TEMPLATE = """\
// Copyright 2011 Google Inc. All Rights Reserved.
// This file was generated from .js source files by GYP. If you
// want to make changes to this file you should either change the
// javascript source files or the GYP script.
#include "src/v8.h"
#include "src/natives.h"
#include "src/utils.h"
namespace v8 {
namespace internal {
%(sources_declaration)s\
%(raw_sources_declaration)s\
template <>
int NativesCollection<%(type)s>::GetBuiltinsCount() {
return %(builtin_count)i;
}
template <>
int NativesCollection<%(type)s>::GetDebuggerCount() {
return %(debugger_count)i;
}
template <>
int NativesCollection<%(type)s>::GetIndex(const char* name) {
%(get_index_cases)s\
return -1;
}
template <>
int NativesCollection<%(type)s>::GetRawScriptsSize() {
return %(raw_total_length)i;
}
template <>
Vector<const char> NativesCollection<%(type)s>::GetRawScriptSource(int index) {
%(get_raw_script_source_cases)s\
return Vector<const char>("", 0);
}
template <>
Vector<const char> NativesCollection<%(type)s>::GetScriptName(int index) {
%(get_script_name_cases)s\
return Vector<const char>("", 0);
}
template <>
Vector<const byte> NativesCollection<%(type)s>::GetScriptsSource() {
return Vector<const byte>(sources, %(total_length)i);
}
template <>
void NativesCollection<%(type)s>::SetRawScriptsSource(Vector<const char> raw_source) {
DCHECK(%(raw_total_length)i == raw_source.length());
raw_sources = raw_source.start();
}
} // internal
} // v8
"""
SOURCES_DECLARATION = """\
static const byte sources[] = { %s };
"""
RAW_SOURCES_COMPRESSION_DECLARATION = """\
static const char* raw_sources = NULL;
"""
RAW_SOURCES_DECLARATION = """\
static const char* raw_sources = reinterpret_cast<const char*>(sources);
"""
GET_INDEX_CASE = """\
if (strcmp(name, "%(id)s") == 0) return %(i)i;
"""
GET_RAW_SCRIPT_SOURCE_CASE = """\
if (index == %(i)i) return Vector<const char>(raw_sources + %(offset)i, %(raw_length)i);
"""
GET_SCRIPT_NAME_CASE = """\
if (index == %(i)i) return Vector<const char>("%(name)s", %(length)i);
"""
def BuildFilterChain(macro_filename):
"""Build the chain of filter functions to be applied to the sources.
Args:
macro_filename: Name of the macro file, if any.
Returns:
A function (string -> string) that reads a source file and processes it.
"""
filter_chain = [ReadFile]
if macro_filename:
(consts, macros) = ReadMacros(ReadFile(macro_filename))
filter_chain.append(lambda l: ExpandConstants(l, consts))
filter_chain.append(lambda l: ExpandMacros(l, macros))
filter_chain.extend([
RemoveCommentsAndTrailingWhitespace,
ExpandInlineMacros,
Validate,
jsmin.JavaScriptMinifier().JSMinify
])
def chain(f1, f2):
return lambda x: f2(f1(x))
return reduce(chain, filter_chain)
class Sources:
def __init__(self):
self.names = []
self.modules = []
self.is_debugger_id = []
def IsDebuggerFile(filename):
return filename.endswith("-debugger.js")
def IsMacroFile(filename):
return filename.endswith("macros.py")
def PrepareSources(source_files):
"""Read, prepare and assemble the list of source files.
Args:
sources: List of Javascript-ish source files. A file named macros.py
will be treated as a list of macros.
Returns:
An instance of Sources.
"""
macro_file = None
macro_files = filter(IsMacroFile, source_files)
assert len(macro_files) in [0, 1]
if macro_files:
source_files.remove(macro_files[0])
macro_file = macro_files[0]
filters = BuildFilterChain(macro_file)
# Sort 'debugger' sources first.
source_files = sorted(source_files,
lambda l,r: IsDebuggerFile(r) - IsDebuggerFile(l))
result = Sources()
for source in source_files:
try:
lines = filters(source)
except Error as e:
raise Error("In file %s:\n%s" % (source, str(e)))
result.modules.append(lines);
is_debugger = IsDebuggerFile(source)
result.is_debugger_id.append(is_debugger);
name = os.path.basename(source)[:-3]
result.names.append(name if not is_debugger else name[:-9]);
return result
def BuildMetadata(sources, source_bytes, native_type):
"""Build the meta data required to generate a libaries file.
Args:
sources: A Sources instance with the prepared sources.
source_bytes: A list of source bytes.
(The concatenation of all sources; might be compressed.)
native_type: The parameter for the NativesCollection template.
Returns:
A dictionary for use with HEADER_TEMPLATE.
"""
total_length = len(source_bytes)
raw_sources = "".join(sources.modules)
# The sources are expected to be ASCII-only.
assert not filter(lambda value: ord(value) >= 128, raw_sources)
# Loop over modules and build up indices into the source blob:
get_index_cases = []
get_script_name_cases = []
get_raw_script_source_cases = []
offset = 0
for i in xrange(len(sources.modules)):
native_name = "native %s.js" % sources.names[i]
d = {
"i": i,
"id": sources.names[i],
"name": native_name,
"length": len(native_name),
"offset": offset,
"raw_length": len(sources.modules[i]),
}
get_index_cases.append(GET_INDEX_CASE % d)
get_script_name_cases.append(GET_SCRIPT_NAME_CASE % d)
get_raw_script_source_cases.append(GET_RAW_SCRIPT_SOURCE_CASE % d)
offset += len(sources.modules[i])
assert offset == len(raw_sources)
# If we have the raw sources we can declare them accordingly.
have_raw_sources = source_bytes == raw_sources
raw_sources_declaration = (RAW_SOURCES_DECLARATION
if have_raw_sources else RAW_SOURCES_COMPRESSION_DECLARATION)
metadata = {
"builtin_count": len(sources.modules),
"debugger_count": sum(sources.is_debugger_id),
"sources_declaration": SOURCES_DECLARATION % ToCArray(source_bytes),
"raw_sources_declaration": raw_sources_declaration,
"raw_total_length": sum(map(len, sources.modules)),
"total_length": total_length,
"get_index_cases": "".join(get_index_cases),
"get_raw_script_source_cases": "".join(get_raw_script_source_cases),
"get_script_name_cases": "".join(get_script_name_cases),
"type": native_type,
}
return metadata
def CompressMaybe(sources, compression_type):
"""Take the prepared sources and generate a sequence of bytes.
Args:
sources: A Sources instance with the prepared sourced.
compression_type: string, describing the desired compression.
Returns:
A sequence of bytes.
"""
sources_bytes = "".join(sources.modules)
if compression_type == "off":
return sources_bytes
elif compression_type == "bz2":
return bz2.compress(sources_bytes)
else:
raise Error("Unknown compression type %s." % compression_type)
def PutInt(blob_file, value):
assert(value >= 0 and value < (1 << 20))
size = 1 if (value < 1 << 6) else (2 if (value < 1 << 14) else 3)
value_with_length = (value << 2) | size
byte_sequence = bytearray()
for i in xrange(size):
byte_sequence.append(value_with_length & 255)
value_with_length >>= 8;
blob_file.write(byte_sequence)
def PutStr(blob_file, value):
PutInt(blob_file, len(value));
blob_file.write(value);
def WriteStartupBlob(sources, startup_blob):
"""Write a startup blob, as expected by V8 Initialize ...
TODO(vogelheim): Add proper method name.
Args:
sources: A Sources instance with the prepared sources.
startup_blob_file: Name of file to write the blob to.
"""
output = open(startup_blob, "wb")
debug_sources = sum(sources.is_debugger_id);
PutInt(output, debug_sources)
for i in xrange(debug_sources):
PutStr(output, sources.names[i]);
PutStr(output, sources.modules[i]);
PutInt(output, len(sources.names) - debug_sources)
for i in xrange(debug_sources, len(sources.names)):
PutStr(output, sources.names[i]);
PutStr(output, sources.modules[i]);
output.close()
def JS2C(source, target, native_type, compression_type, raw_file, startup_blob):
sources = PrepareSources(source)
sources_bytes = CompressMaybe(sources, compression_type)
metadata = BuildMetadata(sources, sources_bytes, native_type)
# Optionally emit raw file.
if raw_file:
output = open(raw_file, "w")
output.write(sources_bytes)
output.close()
if startup_blob:
WriteStartupBlob(sources, startup_blob);
# Emit resulting source file.
output = open(target, "w")
output.write(HEADER_TEMPLATE % metadata)
output.close()
def main():
parser = optparse.OptionParser()
parser.add_option("--raw", action="store",
help="file to write the processed sources array to.")
parser.add_option("--startup_blob", action="store",
help="file to write the startup blob to.")
parser.set_usage("""js2c out.cc type compression sources.js ...
out.cc: C code to be generated.
type: type parameter for NativesCollection template.
compression: type of compression used. [off|bz2]
sources.js: JS internal sources or macros.py.""")
(options, args) = parser.parse_args()
JS2C(args[3:], args[0], args[1], args[2], options.raw, options.startup_blob)
if __name__ == "__main__":
main()
|
aspectron/jsx
|
extern/v8/tools/js2c.py
|
Python
|
mit
| 16,982
|
#! -*- coding: utf-8 -*-
"""
@author: David Siroky (siroky@dasir.cz)
@license: MIT License (see LICENSE.txt or
U{http://www.opensource.org/licenses/mit-license.php})
"""
import unittest
import net_flow_vizu_dia
#############################################################################
#############################################################################
class Test(unittest.TestCase):
def test(self):
f = open("../data_example.yaml", "rb")
data = f.read()
f.close()
net_flow_vizu_dia.DiaConvertor(data).convert()
#def test_diacritics(self):
# f = open("data_diacritics.yaml", "rb")
# data = f.read()
# f.close()
# net_flow_vizu_dia.DiaConvertor(data).convert()
|
dsiroky/NetFlowVizu
|
tests/tests.py
|
Python
|
mit
| 758
|
import asyncio
import itertools
import random
import sys
import weakref
from operator import mul
from time import sleep
import dask
import pytest
from distributed import Nanny, Worker, wait, worker_client
from distributed.config import config
from distributed.metrics import time
from distributed.scheduler import key_split
from distributed.system import MEMORY_LIMIT
from distributed.utils_test import (
captured_logger,
gen_cluster,
inc,
nodebug_setup_module,
nodebug_teardown_module,
slowadd,
slowidentity,
slowinc,
)
from tlz import concat, sliding_window
# Most tests here are timing-dependent
setup_module = nodebug_setup_module
teardown_module = nodebug_teardown_module
@pytest.mark.skipif(
not sys.platform.startswith("linux"), reason="Need 127.0.0.2 to mean localhost"
)
@gen_cluster(client=True, nthreads=[("127.0.0.1", 2), ("127.0.0.2", 2)], timeout=20)
async def test_work_stealing(c, s, a, b):
[x] = await c._scatter([1], workers=a.address)
futures = c.map(slowadd, range(50), [x] * 50)
await wait(futures)
assert len(a.data) > 10
assert len(b.data) > 10
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 2)
async def test_dont_steal_expensive_data_fast_computation(c, s, a, b):
np = pytest.importorskip("numpy")
x = c.submit(np.arange, 1000000, workers=a.address)
await wait([x])
future = c.submit(np.sum, [1], workers=a.address) # learn that sum is fast
await wait([future])
cheap = [
c.submit(np.sum, x, pure=False, workers=a.address, allow_other_workers=True)
for i in range(10)
]
await wait(cheap)
assert len(s.who_has[x.key]) == 1
assert len(b.data) == 0
assert len(a.data) == 12
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 2)
async def test_steal_cheap_data_slow_computation(c, s, a, b):
x = c.submit(slowinc, 100, delay=0.1) # learn that slowinc is slow
await wait(x)
futures = c.map(
slowinc, range(10), delay=0.1, workers=a.address, allow_other_workers=True
)
await wait(futures)
assert abs(len(a.data) - len(b.data)) <= 5
@pytest.mark.avoid_travis
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 2)
async def test_steal_expensive_data_slow_computation(c, s, a, b):
np = pytest.importorskip("numpy")
x = c.submit(slowinc, 100, delay=0.2, workers=a.address)
await wait(x) # learn that slowinc is slow
x = c.submit(np.arange, 1000000, workers=a.address) # put expensive data
await wait(x)
slow = [c.submit(slowinc, x, delay=0.1, pure=False) for i in range(20)]
await wait(slow)
assert len(s.who_has[x.key]) > 1
assert b.data # not empty
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 10)
async def test_worksteal_many_thieves(c, s, *workers):
x = c.submit(slowinc, -1, delay=0.1)
await x
xs = c.map(slowinc, [x] * 100, pure=False, delay=0.1)
await wait(xs)
for w, keys in s.has_what.items():
assert 2 < len(keys) < 30
assert len(s.who_has[x.key]) > 1
assert sum(map(len, s.has_what.values())) < 150
@pytest.mark.xfail(reason="GH#3574")
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 2)
async def test_dont_steal_unknown_functions(c, s, a, b):
futures = c.map(inc, range(100), workers=a.address, allow_other_workers=True)
await wait(futures)
assert len(a.data) >= 95, [len(a.data), len(b.data)]
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 2)
async def test_eventually_steal_unknown_functions(c, s, a, b):
futures = c.map(
slowinc, range(10), delay=0.1, workers=a.address, allow_other_workers=True
)
await wait(futures)
assert len(a.data) >= 3, [len(a.data), len(b.data)]
assert len(b.data) >= 3, [len(a.data), len(b.data)]
@pytest.mark.skip(reason="")
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 3)
async def test_steal_related_tasks(e, s, a, b, c):
futures = e.map(
slowinc, range(20), delay=0.05, workers=a.address, allow_other_workers=True
)
await wait(futures)
nearby = 0
for f1, f2 in sliding_window(2, futures):
if s.who_has[f1.key] == s.who_has[f2.key]:
nearby += 1
assert nearby > 10
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 10, timeout=1000)
async def test_dont_steal_fast_tasks_compute_time(c, s, *workers):
np = pytest.importorskip("numpy")
x = c.submit(np.random.random, 10000000, workers=workers[0].address)
def do_nothing(x, y=None):
pass
# execute and meassure runtime once
await wait(c.submit(do_nothing, 1))
futures = c.map(do_nothing, range(1000), y=x)
await wait(futures)
assert len(s.who_has[x.key]) == 1
assert len(s.has_what[workers[0].address]) == 1001
@gen_cluster(client=True)
async def test_dont_steal_fast_tasks_blacklist(c, s, a, b):
# create a dependency
x = c.submit(slowinc, 1, workers=[b.address])
# If the blacklist of fast tasks is tracked somewhere else, this needs to be
# changed. This test requies *any* key which is blacklisted.
from distributed.stealing import fast_tasks
blacklisted_key = next(iter(fast_tasks))
def fast_blacklisted(x, y=None):
# The task should observe a certain computation time such that we can
# ensure that it is not stolen due to the blacklisting. If it is too
# fast, the standard mechansim shouldn't allow stealing
import time
time.sleep(0.01)
futures = c.map(
fast_blacklisted,
range(100),
y=x,
# Submit the task to one worker but allow it to be distributed else,
# i.e. this is not a task restriction
workers=[a.address],
allow_other_workers=True,
key=blacklisted_key,
)
await wait(futures)
# The +1 is the dependency we initially submitted to worker B
assert len(s.has_what[a.address]) == 101
assert len(s.has_what[b.address]) == 1
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)], timeout=20)
async def test_new_worker_steals(c, s, a):
await wait(c.submit(slowinc, 1, delay=0.01))
futures = c.map(slowinc, range(100), delay=0.05)
total = c.submit(sum, futures)
while len(a.task_state) < 10:
await asyncio.sleep(0.01)
b = await Worker(s.address, loop=s.loop, nthreads=1, memory_limit=MEMORY_LIMIT)
result = await total
assert result == sum(map(inc, range(100)))
for w in [a, b]:
assert all(isinstance(v, int) for v in w.data.values())
assert b.data
await b.close()
@gen_cluster(client=True, timeout=20)
async def test_work_steal_no_kwargs(c, s, a, b):
await wait(c.submit(slowinc, 1, delay=0.05))
futures = c.map(
slowinc, range(100), workers=a.address, allow_other_workers=True, delay=0.05
)
await wait(futures)
assert 20 < len(a.data) < 80
assert 20 < len(b.data) < 80
total = c.submit(sum, futures)
result = await total
assert result == sum(map(inc, range(100)))
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1), ("127.0.0.1", 2)])
async def test_dont_steal_worker_restrictions(c, s, a, b):
future = c.submit(slowinc, 1, delay=0.10, workers=a.address)
await future
futures = c.map(slowinc, range(100), delay=0.1, workers=a.address)
while len(a.task_state) + len(b.task_state) < 100:
await asyncio.sleep(0.01)
assert len(a.task_state) == 100
assert len(b.task_state) == 0
result = s.extensions["stealing"].balance()
await asyncio.sleep(0.1)
assert len(a.task_state) == 100
assert len(b.task_state) == 0
@gen_cluster(
client=True, nthreads=[("127.0.0.1", 1), ("127.0.0.1", 2), ("127.0.0.1", 2)]
)
async def test_steal_worker_restrictions(c, s, wa, wb, wc):
future = c.submit(slowinc, 1, delay=0.1, workers={wa.address, wb.address})
await future
ntasks = 100
futures = c.map(slowinc, range(ntasks), delay=0.1, workers={wa.address, wb.address})
while sum(len(w.task_state) for w in [wa, wb, wc]) < ntasks:
await asyncio.sleep(0.01)
assert 0 < len(wa.task_state) < ntasks
assert 0 < len(wb.task_state) < ntasks
assert len(wc.task_state) == 0
s.extensions["stealing"].balance()
await asyncio.sleep(0.1)
assert 0 < len(wa.task_state) < ntasks
assert 0 < len(wb.task_state) < ntasks
assert len(wc.task_state) == 0
@pytest.mark.skipif(
not sys.platform.startswith("linux"), reason="Need 127.0.0.2 to mean localhost"
)
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1), ("127.0.0.2", 1)])
async def test_dont_steal_host_restrictions(c, s, a, b):
future = c.submit(slowinc, 1, delay=0.10, workers=a.address)
await future
futures = c.map(slowinc, range(100), delay=0.1, workers="127.0.0.1")
while len(a.task_state) + len(b.task_state) < 100:
await asyncio.sleep(0.01)
assert len(a.task_state) == 100
assert len(b.task_state) == 0
result = s.extensions["stealing"].balance()
await asyncio.sleep(0.1)
assert len(a.task_state) == 100
assert len(b.task_state) == 0
@pytest.mark.skipif(
not sys.platform.startswith("linux"), reason="Need 127.0.0.2 to mean localhost"
)
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1), ("127.0.0.2", 2)])
async def test_steal_host_restrictions(c, s, wa, wb):
future = c.submit(slowinc, 1, delay=0.10, workers=wa.address)
await future
ntasks = 100
futures = c.map(slowinc, range(ntasks), delay=0.1, workers="127.0.0.1")
while len(wa.task_state) < ntasks:
await asyncio.sleep(0.01)
assert len(wa.task_state) == ntasks
assert len(wb.task_state) == 0
wc = await Worker(s.address, nthreads=1)
start = time()
while not wc.task_state or len(wa.task_state) == ntasks:
await asyncio.sleep(0.01)
assert time() < start + 3
await asyncio.sleep(0.1)
assert 0 < len(wa.task_state) < ntasks
assert len(wb.task_state) == 0
assert 0 < len(wc.task_state) < ntasks
@gen_cluster(
client=True, nthreads=[("127.0.0.1", 1, {"resources": {"A": 2}}), ("127.0.0.1", 1)]
)
async def test_dont_steal_resource_restrictions(c, s, a, b):
future = c.submit(slowinc, 1, delay=0.10, workers=a.address)
await future
futures = c.map(slowinc, range(100), delay=0.1, resources={"A": 1})
while len(a.task_state) + len(b.task_state) < 100:
await asyncio.sleep(0.01)
assert len(a.task_state) == 100
assert len(b.task_state) == 0
result = s.extensions["stealing"].balance()
await asyncio.sleep(0.1)
assert len(a.task_state) == 100
assert len(b.task_state) == 0
@gen_cluster(
client=True, nthreads=[("127.0.0.1", 1, {"resources": {"A": 2}})], timeout=3
)
async def test_steal_resource_restrictions(c, s, a):
future = c.submit(slowinc, 1, delay=0.10, workers=a.address)
await future
futures = c.map(slowinc, range(100), delay=0.2, resources={"A": 1})
while len(a.task_state) < 101:
await asyncio.sleep(0.01)
assert len(a.task_state) == 101
b = await Worker(s.address, loop=s.loop, nthreads=1, resources={"A": 4})
start = time()
while not b.task_state or len(a.task_state) == 101:
await asyncio.sleep(0.01)
assert time() < start + 3
assert len(b.task_state) > 0
assert len(a.task_state) < 101
await b.close()
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 5, timeout=20)
async def test_balance_without_dependencies(c, s, *workers):
s.extensions["stealing"]._pc.callback_time = 20
def slow(x):
y = random.random() * 0.1
sleep(y)
return y
futures = c.map(slow, range(100))
await wait(futures)
durations = [sum(w.data.values()) for w in workers]
assert max(durations) / min(durations) < 3
@gen_cluster(client=True, nthreads=[("127.0.0.1", 4)] * 2)
async def test_dont_steal_executing_tasks(c, s, a, b):
futures = c.map(
slowinc, range(4), delay=0.1, workers=a.address, allow_other_workers=True
)
await wait(futures)
assert len(a.data) == 4
assert len(b.data) == 0
@gen_cluster(
client=True,
nthreads=[("127.0.0.1", 1)] * 10,
config={"distributed.scheduler.default-task-durations": {"slowidentity": 0.2}},
)
async def test_dont_steal_few_saturated_tasks_many_workers(c, s, a, *rest):
s.extensions["stealing"]._pc.callback_time = 20
x = c.submit(mul, b"0", 100000000, workers=a.address) # 100 MB
await wait(x)
futures = [c.submit(slowidentity, x, pure=False, delay=0.2) for i in range(2)]
await wait(futures)
assert len(a.data) == 3
assert not any(w.task_state for w in rest)
@gen_cluster(
client=True,
nthreads=[("127.0.0.1", 1)] * 10,
worker_kwargs={"memory_limit": MEMORY_LIMIT},
config={"distributed.scheduler.default-task-durations": {"slowidentity": 0.2}},
)
async def test_steal_when_more_tasks(c, s, a, *rest):
s.extensions["stealing"]._pc.callback_time = 20
x = c.submit(mul, b"0", 50000000, workers=a.address) # 50 MB
await wait(x)
futures = [c.submit(slowidentity, x, pure=False, delay=0.2) for i in range(20)]
start = time()
while not any(w.task_state for w in rest):
await asyncio.sleep(0.01)
assert time() < start + 1
@gen_cluster(
client=True,
nthreads=[("127.0.0.1", 1)] * 10,
config={
"distributed.scheduler.default-task-durations": {
"slowidentity": 0.2,
"slow2": 1,
}
},
)
async def test_steal_more_attractive_tasks(c, s, a, *rest):
def slow2(x):
sleep(1)
return x
s.extensions["stealing"]._pc.callback_time = 20
x = c.submit(mul, b"0", 100000000, workers=a.address) # 100 MB
await wait(x)
futures = [c.submit(slowidentity, x, pure=False, delay=0.2) for i in range(10)]
future = c.submit(slow2, x, priority=-1)
while not any(w.task_state for w in rest):
await asyncio.sleep(0.01)
# good future moves first
assert any(future.key in w.task_state for w in rest)
def func(x):
sleep(1)
async def assert_balanced(inp, expected, c, s, *workers):
steal = s.extensions["stealing"]
steal._pc.stop()
counter = itertools.count()
tasks = list(concat(inp))
data_seq = itertools.count()
futures = []
for w, ts in zip(workers, inp):
for t in sorted(ts, reverse=True):
if t:
[dat] = await c.scatter([next(data_seq)], workers=w.address)
ts = s.tasks[dat.key]
# Ensure scheduler state stays consistent
old_nbytes = ts.nbytes
ts.nbytes = s.bandwidth * t
for ws in ts.who_has:
ws.nbytes += ts.nbytes - old_nbytes
else:
dat = 123
i = next(counter)
f = c.submit(
func,
dat,
key="%d-%d" % (int(t), i),
workers=w.address,
allow_other_workers=True,
pure=False,
priority=-i,
)
futures.append(f)
while len(s.rprocessing) < len(futures):
await asyncio.sleep(0.001)
for i in range(10):
steal.balance()
while steal.in_flight:
await asyncio.sleep(0.001)
result = [
sorted([int(key_split(k)) for k in s.processing[w.address]], reverse=True)
for w in workers
]
result2 = sorted(result, reverse=True)
expected2 = sorted(expected, reverse=True)
if config.get("pdb-on-err"):
if result2 != expected2:
import pdb
pdb.set_trace()
if result2 == expected2:
return
raise Exception("Expected: {}; got: {}".format(str(expected2), str(result2)))
@pytest.mark.parametrize(
"inp,expected",
[
([[1], []], [[1], []]), # don't move unnecessarily
([[0, 0], []], [[0], [0]]), # balance
([[0.1, 0.1], []], [[0], [0]]), # balance even if results in even
([[0, 0, 0], []], [[0, 0], [0]]), # don't over balance
([[0, 0], [0, 0, 0], []], [[0, 0], [0, 0], [0]]), # move from larger
([[0, 0, 0], [0], []], [[0, 0], [0], [0]]), # move to smaller
([[0, 1], []], [[1], [0]]), # choose easier first
([[0, 0, 0, 0], [], []], [[0, 0], [0], [0]]), # spread evenly
([[1, 0, 2, 0], [], []], [[2, 1], [0], [0]]), # move easier
([[1, 1, 1], []], [[1, 1], [1]]), # be willing to move costly items
([[1, 1, 1, 1], []], [[1, 1, 1], [1]]), # but don't move too many
(
[[0, 0], [0, 0], [0, 0], []], # no one clearly saturated
[[0, 0], [0, 0], [0], [0]],
),
(
[[4, 2, 2, 2, 2, 1, 1], [4, 2, 1, 1], [], [], []],
[[4, 2, 2, 2, 2], [4, 2, 1], [1], [1], [1]],
),
pytest.param(
[[1, 1, 1, 1, 1, 1, 1], [1, 1], [1, 1], [1, 1], []],
[[1, 1, 1, 1, 1], [1, 1], [1, 1], [1, 1], [1, 1]],
marks=pytest.mark.xfail(
reason="Some uncertainty based on executing stolen task"
),
),
],
)
def test_balance(inp, expected):
async def test(*args, **kwargs):
await assert_balanced(inp, expected, *args, **kwargs)
test = gen_cluster(
client=True,
nthreads=[("127.0.0.1", 1)] * len(inp),
config={
"distributed.scheduler.default-task-durations": {
str(i): 1 for i in range(10)
}
},
)(test)
test()
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 2, Worker=Nanny, timeout=20)
async def test_restart(c, s, a, b):
futures = c.map(
slowinc, range(100), delay=0.1, workers=a.address, allow_other_workers=True
)
while not s.processing[b.worker_address]:
await asyncio.sleep(0.01)
steal = s.extensions["stealing"]
assert any(st for st in steal.stealable_all)
assert any(x for L in steal.stealable.values() for x in L)
await c.restart(timeout=10)
assert not any(x for x in steal.stealable_all)
assert not any(x for L in steal.stealable.values() for x in L)
@gen_cluster(
client=True,
config={"distributed.scheduler.default-task-durations": {"slowadd": 0.001}},
)
async def test_steal_communication_heavy_tasks(c, s, a, b):
steal = s.extensions["stealing"]
x = c.submit(mul, b"0", int(s.bandwidth), workers=a.address)
y = c.submit(mul, b"1", int(s.bandwidth), workers=b.address)
futures = [
c.submit(
slowadd,
x,
y,
delay=1,
pure=False,
workers=a.address,
allow_other_workers=True,
)
for i in range(10)
]
while not any(f.key in s.rprocessing for f in futures):
await asyncio.sleep(0.01)
steal.balance()
while steal.in_flight:
await asyncio.sleep(0.001)
assert s.processing[b.address]
@gen_cluster(client=True)
async def test_steal_twice(c, s, a, b):
x = c.submit(inc, 1, workers=a.address)
await wait(x)
futures = [c.submit(slowadd, x, i, delay=0.2) for i in range(100)]
while len(s.tasks) < 100: # tasks are all allocated
await asyncio.sleep(0.01)
# Army of new workers arrives to help
workers = await asyncio.gather(*[Worker(s.address, loop=s.loop) for _ in range(20)])
await wait(futures)
has_what = dict(s.has_what) # take snapshot
empty_workers = [w for w, keys in has_what.items() if not len(keys)]
if len(empty_workers) > 2:
pytest.fail(
"Too many workers without keys (%d out of %d)"
% (len(empty_workers), len(has_what))
)
assert max(map(len, has_what.values())) < 30
await c._close()
await asyncio.gather(*[w.close() for w in workers])
@gen_cluster(client=True)
async def test_dont_steal_executing_tasks(c, s, a, b):
steal = s.extensions["stealing"]
future = c.submit(slowinc, 1, delay=0.5, workers=a.address)
while not a.executing:
await asyncio.sleep(0.01)
steal.move_task_request(
s.tasks[future.key], s.workers[a.address], s.workers[b.address]
)
await asyncio.sleep(0.1)
assert future.key in a.executing
assert not b.executing
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 2)
async def test_dont_steal_long_running_tasks(c, s, a, b):
def long(delay):
with worker_client() as c:
sleep(delay)
await c.submit(long, 0.1) # learn duration
await c.submit(inc, 1) # learn duration
long_tasks = c.map(long, [0.5, 0.6], workers=a.address, allow_other_workers=True)
while sum(map(len, s.processing.values())) < 2: # let them start
await asyncio.sleep(0.01)
start = time()
while any(t.key in s.extensions["stealing"].key_stealable for t in long_tasks):
await asyncio.sleep(0.01)
assert time() < start + 1
na = len(a.executing)
nb = len(b.executing)
incs = c.map(inc, range(100), workers=a.address, allow_other_workers=True)
await asyncio.sleep(0.2)
await wait(long_tasks)
for t in long_tasks:
assert (
sum(log[1] == "executing" for log in a.story(t))
+ sum(log[1] == "executing" for log in b.story(t))
) <= 1
@pytest.mark.xfail(
sys.version_info[:2] == (3, 8),
reason="Sporadic failure on Python 3.8",
strict=False,
)
@gen_cluster(client=True, nthreads=[("127.0.0.1", 5)] * 2)
async def test_cleanup_repeated_tasks(c, s, a, b):
class Foo:
pass
s.extensions["stealing"]._pc.callback_time = 20
await c.submit(slowidentity, -1, delay=0.1)
objects = [c.submit(Foo, pure=False, workers=a.address) for _ in range(50)]
x = c.map(
slowidentity, objects, workers=a.address, allow_other_workers=True, delay=0.05
)
del objects
await wait(x)
assert a.data and b.data
assert len(a.data) + len(b.data) > 10
ws = weakref.WeakSet()
ws.update(a.data.values())
ws.update(b.data.values())
del x
start = time()
while a.data or b.data:
await asyncio.sleep(0.01)
assert time() < start + 1
assert not s.who_has
assert not any(s.has_what.values())
assert not list(ws)
@gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 2)
async def test_lose_task(c, s, a, b):
with captured_logger("distributed.stealing") as log:
s.periodic_callbacks["stealing"].interval = 1
for i in range(100):
futures = c.map(
slowinc,
range(10),
delay=0.01,
pure=False,
workers=a.address,
allow_other_workers=True,
)
await asyncio.sleep(0.01)
del futures
out = log.getvalue()
assert "Error" not in out
@gen_cluster(client=True)
async def test_worker_stealing_interval(c, s, a, b):
from distributed.scheduler import WorkStealing
ws = WorkStealing(s)
assert ws._pc.callback_time == 100
with dask.config.set({"distributed.scheduler.work-stealing-interval": "500ms"}):
ws = WorkStealing(s)
assert ws._pc.callback_time == 500
# Default unit is `ms`
with dask.config.set({"distributed.scheduler.work-stealing-interval": 2}):
ws = WorkStealing(s)
assert ws._pc.callback_time == 2
|
blaze/distributed
|
distributed/tests/test_steal.py
|
Python
|
bsd-3-clause
| 23,436
|
#!/usr/bin/env python
# encoding: utf-8
from fabric.api import run, env
from cfg import aliyun3_cfg
from helper import update_sys
env.hosts = ['root@{host}'.format(host=aliyun3_cfg['host'])]
env.password = aliyun3_cfg['root_pass']
def update_maplet():
run('cp /opt/mapdisk/mapws/wcs_maplet/mapproxy.yaml /home/bk/wcs_maplet/')
def update():
run('supervisorctl restart all')
|
bukun/bkcase
|
DevOps/aliyun3_su.py
|
Python
|
mit
| 389
|
# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''commands to sign and verify changesets'''
import os, tempfile, binascii
from mercurial import util, commands, match, cmdutil
from mercurial import node as hgnode
from mercurial.i18n import _
cmdtable = {}
command = cmdutil.command(cmdtable)
testedwith = 'internal'
class gpg(object):
def __init__(self, path, key=None):
self.path = path
self.key = (key and " --local-user \"%s\"" % key) or ""
def sign(self, data):
gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
return util.filter(data, gpgcmd)
def verify(self, data, sig):
""" returns of the good and bad signatures"""
sigfile = datafile = None
try:
# create temporary files
fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
fp = os.fdopen(fd, 'wb')
fp.write(sig)
fp.close()
fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
fp = os.fdopen(fd, 'wb')
fp.write(data)
fp.close()
gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
"\"%s\" \"%s\"" % (self.path, sigfile, datafile))
ret = util.filter("", gpgcmd)
finally:
for f in (sigfile, datafile):
try:
if f:
os.unlink(f)
except OSError:
pass
keys = []
key, fingerprint = None, None
for l in ret.splitlines():
# see DETAILS in the gnupg documentation
# filter the logger output
if not l.startswith("[GNUPG:]"):
continue
l = l[9:]
if l.startswith("VALIDSIG"):
# fingerprint of the primary key
fingerprint = l.split()[10]
elif l.startswith("ERRSIG"):
key = l.split(" ", 3)[:2]
key.append("")
fingerprint = None
elif (l.startswith("GOODSIG") or
l.startswith("EXPSIG") or
l.startswith("EXPKEYSIG") or
l.startswith("BADSIG")):
if key is not None:
keys.append(key + [fingerprint])
key = l.split(" ", 2)
fingerprint = None
if key is not None:
keys.append(key + [fingerprint])
return keys
def newgpg(ui, **opts):
"""create a new gpg instance"""
gpgpath = ui.config("gpg", "cmd", "gpg")
gpgkey = opts.get('key')
if not gpgkey:
gpgkey = ui.config("gpg", "key", None)
return gpg(gpgpath, gpgkey)
def sigwalk(repo):
"""
walk over every sigs, yields a couple
((node, version, sig), (filename, linenumber))
"""
def parsefile(fileiter, context):
ln = 1
for l in fileiter:
if not l:
continue
yield (l.split(" ", 2), (context, ln))
ln += 1
# read the heads
fl = repo.file(".hgsigs")
for r in reversed(fl.heads()):
fn = ".hgsigs|%s" % hgnode.short(r)
for item in parsefile(fl.read(r).splitlines(), fn):
yield item
try:
# read local signatures
fn = "localsigs"
for item in parsefile(repo.vfs(fn), fn):
yield item
except IOError:
pass
def getkeys(ui, repo, mygpg, sigdata, context):
"""get the keys who signed a data"""
fn, ln = context
node, version, sig = sigdata
prefix = "%s:%d" % (fn, ln)
node = hgnode.bin(node)
data = node2txt(repo, node, version)
sig = binascii.a2b_base64(sig)
keys = mygpg.verify(data, sig)
validkeys = []
# warn for expired key and/or sigs
for key in keys:
if key[0] == "ERRSIG":
ui.write(_("%s Unknown key ID \"%s\"\n")
% (prefix, shortkey(ui, key[1][:15])))
continue
if key[0] == "BADSIG":
ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
continue
if key[0] == "EXPSIG":
ui.write(_("%s Note: Signature has expired"
" (signed by: \"%s\")\n") % (prefix, key[2]))
elif key[0] == "EXPKEYSIG":
ui.write(_("%s Note: This key has expired"
" (signed by: \"%s\")\n") % (prefix, key[2]))
validkeys.append((key[1], key[2], key[3]))
return validkeys
@command("sigs", [], _('hg sigs'))
def sigs(ui, repo):
"""list signed changesets"""
mygpg = newgpg(ui)
revs = {}
for data, context in sigwalk(repo):
node, version, sig = data
fn, ln = context
try:
n = repo.lookup(node)
except KeyError:
ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
continue
r = repo.changelog.rev(n)
keys = getkeys(ui, repo, mygpg, data, context)
if not keys:
continue
revs.setdefault(r, [])
revs[r].extend(keys)
for rev in sorted(revs, reverse=True):
for k in revs[rev]:
r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
ui.write("%-30s %s\n" % (keystr(ui, k), r))
@command("sigcheck", [], _('hg sigcheck REV'))
def check(ui, repo, rev):
"""verify all the signatures there may be for a particular revision"""
mygpg = newgpg(ui)
rev = repo.lookup(rev)
hexrev = hgnode.hex(rev)
keys = []
for data, context in sigwalk(repo):
node, version, sig = data
if node == hexrev:
k = getkeys(ui, repo, mygpg, data, context)
if k:
keys.extend(k)
if not keys:
ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
return
# print summary
ui.write("%s is signed by:\n" % hgnode.short(rev))
for key in keys:
ui.write(" %s\n" % keystr(ui, key))
def keystr(ui, key):
"""associate a string to a key (username, comment)"""
keyid, user, fingerprint = key
comment = ui.config("gpg", fingerprint, None)
if comment:
return "%s (%s)" % (user, comment)
else:
return user
@command("sign",
[('l', 'local', None, _('make the signature local')),
('f', 'force', None, _('sign even if the sigfile is modified')),
('', 'no-commit', None, _('do not commit the sigfile after signing')),
('k', 'key', '',
_('the key id to sign with'), _('ID')),
('m', 'message', '',
_('use text as commit message'), _('TEXT')),
('e', 'edit', False, _('invoke editor on commit messages')),
] + commands.commitopts2,
_('hg sign [OPTION]... [REV]...'))
def sign(ui, repo, *revs, **opts):
"""add a signature for the current or given revision
If no revision is given, the parent of the working directory is used,
or tip if no revision is checked out.
See :hg:`help dates` for a list of formats valid for -d/--date.
"""
mygpg = newgpg(ui, **opts)
sigver = "0"
sigmessage = ""
date = opts.get('date')
if date:
opts['date'] = util.parsedate(date)
if revs:
nodes = [repo.lookup(n) for n in revs]
else:
nodes = [node for node in repo.dirstate.parents()
if node != hgnode.nullid]
if len(nodes) > 1:
raise util.Abort(_('uncommitted merge - please provide a '
'specific revision'))
if not nodes:
nodes = [repo.changelog.tip()]
for n in nodes:
hexnode = hgnode.hex(n)
ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
hgnode.short(n)))
# build data
data = node2txt(repo, n, sigver)
sig = mygpg.sign(data)
if not sig:
raise util.Abort(_("error while signing"))
sig = binascii.b2a_base64(sig)
sig = sig.replace("\n", "")
sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
# write it
if opts['local']:
repo.vfs.append("localsigs", sigmessage)
return
if not opts["force"]:
msigs = match.exact(repo.root, '', ['.hgsigs'])
if util.any(repo.status(match=msigs, unknown=True, ignored=True)):
raise util.Abort(_("working copy of .hgsigs is changed "),
hint=_("please commit .hgsigs manually"))
sigsfile = repo.wfile(".hgsigs", "ab")
sigsfile.write(sigmessage)
sigsfile.close()
if '.hgsigs' not in repo.dirstate:
repo[None].add([".hgsigs"])
if opts["no_commit"]:
return
message = opts['message']
if not message:
# we don't translate commit messages
message = "\n".join(["Added signature for changeset %s"
% hgnode.short(n)
for n in nodes])
try:
editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
repo.commit(message, opts['user'], opts['date'], match=msigs,
editor=editor)
except ValueError, inst:
raise util.Abort(str(inst))
def shortkey(ui, key):
if len(key) != 16:
ui.debug("key ID \"%s\" format error\n" % key)
return key
return key[-8:]
def node2txt(repo, node, ver):
"""map a manifest into some text"""
if ver == "0":
return "%s\n" % hgnode.hex(node)
else:
raise util.Abort(_("unknown signature version"))
|
hekra01/mercurial
|
hgext/gpg.py
|
Python
|
gpl-2.0
| 9,698
|
#!/usr/bin/env python
'''
The MIT License (MIT)
Copyright (c) <2014> <Mathias Lesche>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
contact: mat.lesche(at)gmail.com
'''
''' python modules '''
import logging
from argparse import ArgumentParser as ArgumentParser
from argparse import RawDescriptionHelpFormatter
from os.path import sep
from random import randint
from subprocess import Popen
from time import sleep as sleep
from types import NoneType
''' own modules '''
from database.database import Database
from sge.sge import SGE
from support.auxiliary_module import get_emailadress
from support.fileclass import Fileclass
from support.information import Information
from support.io_module import add_FilestoList_recursive, write_list
from support.io_module import get_fileobject
from support.io_module import check_Directorylist
from support.io_module import check_Fileslist
from support.io_module import create_Directory
from support.io_module import write_string
from support.main_logger import MainLogger
class MergeFastqParser(object):
def __init__(self):
self.__parser = ArgumentParser(description="""
Script merges fastq files which come from the same library.
It identifies the libraries, sample ids (via db access),
tracks and read direction.
List of files can be supplied via options -b, -d, -f or -i
It's possible to remove track information in the output files.
Track information won't be removed, if pe data is used and the
script discovers single tracks for one of the read directions.
The file 'library_records.csv' is created and shows relation
between library, sample, tracks, read direction and readcount.
""", formatter_class=RawDescriptionHelpFormatter)
self.initialiseParser()
self.__files = []
self.__bfx = ''
self.__output = ''
self.__zip = False
self.__tracks = False
self.__array = True
self.__email = False
self.__logger = logging.getLogger('dsp.support.merge_fastq')
self.parse()
def initialiseParser(self):
self.__parser.add_argument('-d', '--directory', type=str, metavar='DIRECTORY', dest='directory', nargs= '+', help= "directories (' ' separated)")
self.__parser.add_argument('-f', '--files', type=str, metavar='FILE', dest='file', nargs= '+', help="list of fastq files(' ' separated)")
self.__parser.add_argument('-b', '--bfx', type=str, metavar='BFX', dest='bfx', help="bfx project from the database (e.g. bfx588)")
self.__parser.add_argument('-i', '--input', type=str, metavar='FILE', dest='input', help="file having paths or directories")
self.__parser.add_argument('-o', '--output', type=str, metavar='DIRECTORY', dest='output', required=True, help='output directory for the merged fastq files')
self.__parser.add_argument("-s", "--single", dest = 'array', action= 'store_false', help= 'submit single jobs instead of array')
self.__parser.add_argument("-t", "--tracks", dest = 'tracks', action= 'store_true', help= 'keep tracks in the fastq filename')
self.__parser.add_argument('-z', '--zipped', dest='zipped', action='store_true', help='zip merged fastq files')
self.__parser.add_argument("-e", "--email", dest = 'email', action= 'store_true', help= 'activates email notification')
def parse(self, inputstring = None):
if isinstance(inputstring, NoneType):
self.__options = self.__parser.parse_args()
else:
self.__options = self.__parser.parse_args(inputstring)
def show_log(self, level, message):
if level == 'debug':
self.__logger.debug(message)
elif level == 'info':
self.__logger.info(message)
elif level == 'warning':
self.__logger.warning(message)
elif level == 'error':
self.__logger.error(message)
elif level == 'critical':
self.__logger.critical(message)
'''
method checks the input list which are directories and returns the valid directories
@param dirlist: list of string
@return: list
'''
def check_directory(self, dirlist):
if isinstance(dirlist, NoneType):
return []
good, bad = check_Directorylist(dirlist)
for i in good:
self.show_log('info', "valid directory: {0}".format(i))
for i in bad:
self.show_log('warning', "check directory: {0}".format(i))
if len(good) == 0:
self.show_log('warning', "check directory input")
return good
'''
method checks the input list which are files, filters for fastq files and returns the valid files
@param filelist: list of string
@return: list
'''
def check_files(self, filelist):
if isinstance(filelist, NoneType):
return []
good, bad = check_Fileslist(filelist)
goodfiles = [i for i in good if i.endswith(('fastq.gz', 'fastq', 'fq.gz', 'fq'))]
badfiles = [i for i in good if not i.endswith(('fastq.gz', 'fastq', 'fq.gz', 'fq'))]
badfiles.extend(bad)
for i in badfiles:
self.show_log('warning', "check file: {0}".format(i))
return goodfiles
'''
method queries the database and returns the fastq file paths of valid fastq files
@param dbinst: db instance
@return: list
'''
def check_bfx(self, dbinst):
filelist = []
if isinstance(self.__options.bfx, NoneType):
return filelist
self.__bfxid = self.__options.bfx[3:]
# select all libs which belong to the project
libs = dbinst.query_BFXLib_with_bfxid(self.__bfxid)
for lib in libs:
tracks = dbinst.query_Tracks_with_libid(lib['LIBRARY_ID'])
for track in tracks:
if track['TRACKSSTATUS_ID'] != 3:
continue
files = dbinst.query_FQstorage_with_trackid(track['ID'])
for fileinput in files:
filelist.extend(self.check_files((fileinput['DIRECTORY'],)))
return [i for i in filelist if i.endswith(('fastq.gz', 'fastq', 'fq.gz', 'fq'))]
'''
method checks an input file and returns list of files and directories
@return: two list
'''
def check_input(self):
filelist, dirlist = [], []
if isinstance(self.__options.input, NoneType):
return [], []
good, bad = check_Fileslist([self.__options.input])
for i in bad:
self.show_log('warning', "check input file: {0}".format(i))
if len(good) == 0:
return [], []
with get_fileobject(self.__options.input, 'r') as filein:
for line in filein:
if line == '\n': continue
dirlist.extend(self.check_directory((line.strip('\n'),)))
filelist.extend(self.check_files((line.strip('\n'),)))
return filelist, dirlist
'''
method check if the output directory exists
'''
def check_output(self):
output = self.__options.output
good = check_Directorylist((output, ))[0]
if len(good) != 1:
self.show_log('error', "check output directory: {0}".format(output))
exit(2)
else:
self.__output = good[0]
def main(self, dbinst):
self.__email = self.__options.email
self.__zip = self.__options.zipped
self.__tracks = self.__options.tracks
self.__array = self.__options.array
self.check_output()
filelist, dirlist = [], [] #@UnusedVariable
self.__files.extend(self.check_files(self.__options.file))
self.__files.extend(self.check_bfx(dbinst))
# files are already filtered for fastq, directories have to search
filelist, dirlist = self.check_input()
self.__files.extend(filelist)
dirlist.extend(self.check_directory(self.__options.directory))
for dirname in dirlist:
if dirname != '': self.__files.extend(add_FilestoList_recursive(dirname, [], ('fastq.gz', 'fastq', 'fq.gz', 'fq')))
# remove empty lines
self.__files = sorted(set(self.__files))
self.__files = [i for i in self.__files if i != '']
for filename in self.__files:
self.show_log('info', "valid file: {0}".format(filename))
if len(self.__files) == 0:
self.show_log('error', "neither directory (-d), files (-f), input (-i) or bfx (-b) provided files")
exit(2)
else:
self.show_log('info', "testing {0} files for merging".format(len(self.__files)))
sleep(2)
def get_files(self):
return self.__files
def get_output(self):
return self.__output
def get_tracks(self):
return self.__tracks
def get_zip(self):
return self.__zip
def get_email(self):
return self.__email
def get_array(self):
return self.__array
files = property(get_files)
output = property(get_output)
zip = property(get_zip)
email = property(get_email)
tracks = property(get_tracks)
array = property(get_array)
class MergeFastq(object):
def __init__(self, inputfiles, outputdir, zipped, array, tracks, emailuse):
self.__inputfiles = inputfiles
self.__zipped = zipped
self.__array = array
self.__tracks = tracks
self.__emailuse = emailuse
self.__main = outputdir
if self.__array:
# self.__log = '{0}tmp{0}'.format(sep)
self.__log = '{0}dev{0}null'.format(sep)
else:
self.__log = '{0}log{1}'.format(self.__main, sep)
self.__qsub = '{0}qsub{1}'.format(self.__main, sep)
self.__tmp = '{0}tmp{1}'.format(self.__main, sep)
self.__email = get_emailadress()
self.create_directories()
self.__libdict = Fileclass.sort_files(self.__inputfiles)
self.__sgelist = []
self.__mergecommands = []
self.__mergefile = ''
self.__recordlist = ['Lib\tSample\tRead\tTrack\tReadcount\n']
self.__recordfile = '{0}library_records.{1}.csv'.format(self.__main, randint(1000, 9999))
self.__logger = logging.getLogger('dsp.support.merge_fastq')
def show_log(self, level, message):
if level == 'debug':
self.__logger.debug(message)
elif level == 'info':
self.__logger.info(message)
elif level == 'warning':
self.__logger.warning(message)
elif level == 'error':
self.__logger.error(message)
elif level == 'critical':
self.__logger.critical(message)
'''
method creates qsub- and log-directory are created and exits if it is not possible.
'''
def create_directories(self):
if not create_Directory(self.__qsub):
self.show_log('error', 'Not possible to create qsub directory in {0}'.format(self.__main))
exit(2)
if not self.__array:
# is not needed anymore, no logfile is written to the filesystem
if not create_Directory(self.__log):
self.show_log('error', 'Not possible to create log directory in {0}'.format(self.__main))
exit(2)
'''
method identfies files which are only in one of the read directions and don't have
a partner. They will be process separately
'''
def getFiles_withoutPartner(self):
for libid in sorted(self.__libdict):
if len(self.__libdict[libid]) == 1: continue # don't need to do this for libs which have only a single direction
alldirection = self.__libdict[libid].keys()
# tracks common to all read direction
intertracks = set.intersection(*[set(self.__libdict[libid][i]) for i in self.__libdict[libid].keys()])
for read in alldirection:
readonlytracks = set(self.__libdict[libid][read].keys()) - intertracks
if len(readonlytracks) != 0:
self.__tracks = True
temp = []
[temp.extend(self.__libdict[libid][read][i]) for i in readonlytracks]
self.__libdict[libid]['{0}-only'.format(read)]['tracks'] = temp
for i in readonlytracks:
del self.__libdict[libid][read][i]
if len(self.__libdict[libid][read]) == 0:
del self.__libdict[libid][read]
self.show_log('info', 'The following Tracks are only in {0} and will be processed separately'.format(read))
for name in [i.filename for i in temp]:
self.show_log('info', '{0}'.format(name))
sleep(2)
'''
method builds the name for the new file by combining libid, sample, direction and track
from the fileclass instances
@param filelist: list of fileclass instances
@return: file instance, string, string
'''
def build_outputfile(self, filelist):
libid = '-'.join(sorted(set(filelist[0].libid)))
sample = '-'.join(sorted(set(filelist[0].sample)))
sample = sample.replace(' ', '_')
direction = '-'.join(sorted(set(filelist[0].read)))
track = '-'.join(['-'.join(i.track) for i in filelist])
recordstring = '{0}\t{1}\t{2}\t{3}'.format(libid, sample, direction, track)
if self.__tracks:
filename = '{0}_{1}_Track-{2}_{3}'.format(libid, sample, track, direction)
else:
filename = '{0}_{1}_{2}'.format(libid, sample, direction)
shortname = filename
if self.__zipped:
filename = '{0}{1}.fastq.gz'.format(self.__main, filename)
else:
filename = '{0}{1}.fastq'.format(self.__main, filename)
return Fileclass(filename), shortname, recordstring
'''
method builds the commandline if only one single input file is given
@param fileinput: fileclass instance
@param fileout: fileclass instance
@return: string
'''
def call_singlefile(self, fileinput, fileout):
if fileinput.filename.endswith('gz') and self.__zipped:
return 'cat {0} > {1}'.format(fileinput.completepath, fileout.completepath)
elif fileinput.filename.endswith('fastq') and self.__zipped:
return 'gzip -c {0} > {1}'.format(fileinput.completepath, fileout.completepath)
elif fileinput.filename.endswith('gz') and not self.__zipped:
return 'gunzip -c {0} > {1}'.format(fileinput.completepath, fileout.completepath)
elif fileinput.filename.endswith('fastq') and not self.__zipped:
return 'cat {0} > {1}'.format(fileinput.completepath, fileout.completepath)
'''
method builds the command line for the transfer operation
@param filelist: list of fileclass instances
@param fileoutclass: instance of fileclass
@return: string
'''
def build_commandline(self, filelist, fileoutclass):
fileoutname = fileoutclass.completepath
length = len(filelist)
gz = [1 for i in filelist if i.completepath.endswith('gz')]
if sum(gz) == length and self.__zipped:
command = ['cat','{0}'.format(' '.join([i.completepath for i in filelist])),'>', fileoutname]
return ' '.join(command)
elif sum(gz) == length and not self.__zipped:
command = ['zcat','{0}'.format(' '.join([i.completepath for i in filelist])),'>',fileoutname]
return ' '.join(command)
elif sum(gz) == 0 and self.__zipped:
command = ['gzip', '-c', ' '.join([i.completepath for i in filelist]), '>', fileoutname]
return ' '.join(command)
elif sum(gz) == 0 and not self.__zipped:
command = ['cat', ' '.join([i.completepath for i in filelist]), '>', fileoutname]
return ' '.join(command)
else:
commandlist = []
for i in filelist:
commandlist.append(self.call_singlefile(i, fileoutclass))
return ' && '.join(commandlist)
'''
method selects a single fileinstance of fileinstances according the zipped
argument and if the fileinstance is zipped
'''
def pick_file(self, filelist):
if self.__zipped:
retlist = [i for i in filelist if i.zipped]
if retlist >= 1:
return retlist[0]
else:
return filelist[0]
else:
retlist = [i for i in filelist if not i.zipped]
if retlist >= 1:
return retlist[0]
else:
return filelist[0]
'''
method builds the sge object for the merge jobs. jobs are either single or array job,
depending on the __array variable
'''
def build_sgejob(self):
for libid in sorted(self.__libdict):
for direction in self.__libdict[libid]:
filemergelist = []
for track, fileinstances in sorted(self.__libdict[libid][direction].viewitems()):
if len(fileinstances) > 1:
fileinst = self.pick_file(fileinstances)
filemergelist.append(fileinst)
self.show_log('warning', 'Lib {0}; Direction: {1}; Track {2}; status: More than one track; Using only one'.format(libid, direction, track))
self.show_log('info', 'File {0}'.format(fileinst.filename))
else:
filemergelist.extend(fileinstances)
self.show_log('info', 'File {0}'.format(fileinstances[0].filename))
if len(filemergelist) > 1:
self.show_log('info', 'Lib {0}; Direction: {1}; Files: {2}; status: merged and transferred'.format(libid, direction, len(filemergelist)))
else:
self.show_log('info', 'Lib {0}; Direction: {1}; Files: {2}; status: not merged but transferred'.format(libid, direction, len(filemergelist)))
outputfileinst, shortoutputname, recordstring = self.build_outputfile(filemergelist)
recordstring = '{0}\t{1}\n'.format(recordstring, sum([i.readcount for i in filemergelist]))
self.__recordlist.append(recordstring)
commandline = self.build_commandline(filemergelist, outputfileinst)
if self.__array:
self.__mergecommands.append(commandline + '\n')
else:
sgeinst = SGE(self.__main, self.__log, self.__qsub, '', True, self.__email, False, 'ngs.q', 1, 3, 4, self.__emailuse)
sgeinst.modulename = ''
sgeinst.set_logfilename_line(shortoutputname)
sgeinst.set_qstatname_line(shortoutputname)
sgeinst.set_qsubfile(shortoutputname)
sgeinst.commandline = commandline
sgeinst.set_qsubscript()
self.__sgelist.append(sgeinst)
sleep(2)
if self.__array:
sgeinst = SGE(self.__main, self.__log, self.__qsub, '', True, self.__email, False, 'ngs.q', 1, 3, 1, self.__emailuse)
self.__mergefile = '{0}array_merge.{1}.csv'.format(self.__main, sgeinst.randomnumber)
sgeinst.set_arrayvalue_line(1, len(self.__mergecommands))
sgeinst.set_arraytaskline(15)
sgeinst.modulename = ''
sgeinst.set_logfilename_line('logpath')
sgeinst.set_qstatname_line('array-merge', sgeinst.randomnumber)
sgeinst.set_qsubfile('array-merge', sgeinst.randomnumber)
# sgeinst.commandline = 'SEED=$(awk "NR==$SGE_TASK_ID" {0})\neval "$SEED" && rm {1}'.format(self.__mergefile, sgeinst.logfile)
# direction the log file to /dev/null
sgeinst.commandline = 'SEED=$(awk "NR==$SGE_TASK_ID" {0})\neval "$SEED"'.format(self.__mergefile)
sgeinst.set_qsubscript([], False)
self.__sgelist.append(sgeinst)
def write_files(self):
if len(self.__recordlist) != 0:
self.show_log('info', 'Writing tracking file: {0}'.format(self.__recordfile))
write_list(self.__recordlist, self.__recordfile, 'w')
if len(self.__mergecommands) != 0:
self.show_log('info', 'Writing array merge file: {0}'.format(self.__mergefile))
write_list(self.__mergecommands, self.__mergefile, 'w')
self.show_log('info', 'Writing qsub files')
for sgeinst in self.__sgelist:
write_string(sgeinst.qsubscript, sgeinst.qsubfile, 'w')
def start_sge(self):
for sgeinst in self.__sgelist:
self.show_log('info', 'Starting merge - {0}'.format(sgeinst.qsubfile))
starter = Popen(['qsub', sgeinst.qsubfile])
starter.wait()
def get_sgelist(self):
return self.__sgelist
sgelist = property(get_sgelist)
if __name__ == '__main__':
mainlog = MainLogger()
db = Database(Information.DB_HOST, Information.DB_USER, Information.DB_PW, Information.DB)
db.setConnection()
parser = MergeFastqParser()
parser.main(db)
inputfiles = []
for index, filename in enumerate(sorted(parser.files)):
fileinst = Fileclass(filename, db)
inputfiles.append(fileinst)
inputfiles[index].set_information()
db.closeConnection()
mergefastq = MergeFastq(inputfiles, parser.output, parser.zip, parser.array, parser.tracks, parser.email)
mergefastq.getFiles_withoutPartner()
mergefastq.build_sgejob()
mergefastq.write_files()
mergefastq.start_sge()
mainlog.close()
logging.shutdown()
|
mlesche/dsp
|
dsp/src/support_scripts/merge_fastq.py
|
Python
|
mit
| 20,921
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
import sys
PY3 = False
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
if PY3:
import urllib.parse as urlparse # Es muy lento en PY2. En PY3 es nativo
else:
import urlparse # Usamos el nativo de PY2 que es más rápido
import re
from platformcode import config, logger
from core import scrapertools
from core.item import Item
from core import servertools
from core import httptools
host = 'https://pornvibe.org'
# no se ven HD, falla pagina?
def mainlist(item):
logger.info()
itemlist = []
itemlist.append(item.clone(title="Nuevos" , action="lista", url=host + "/all-videos/"))
itemlist.append(item.clone(title="Canal" , action="categorias", url=host + "/categories/"))
itemlist.append(item.clone(title="Buscar", action="search"))
return itemlist
def search(item, texto):
logger.info()
texto = texto.replace(" ", "+")
item.url = "%s/?s=%s" % (host, texto)
try:
return lista(item)
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
def categorias(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>|<br/>", "", data)
patron = '<div class="item-cat.*?'
patron += '<img src="([^"]+)" alt="([^"]+)".*?'
patron += '<a href="([^"]+)".*?'
patron += '<p>([^<]+)Videos posted<'
matches = re.compile(patron,re.DOTALL).findall(data)
for thumbnail,scrapedtitle,scrapedurl,cantidad in matches:
title = "%s (%s)" %(scrapedtitle,cantidad)
url = urlparse.urljoin(host,scrapedurl)
itemlist.append(item.clone(action="lista", title=title, url=url,
fanart=thumbnail, thumbnail=thumbnail, plot="") )
return sorted(itemlist, key=lambda i: i.title)
def stitle(title, url):
logger.info()
t = title.split()
long = len(t)-1
url = scrapertools.find_single_match(url, '.org/([^/]+)')
url2 = url.split('-')[long:]
t2=""
for elem in url2:
t2 += "%s " % elem.capitalize()
stitle = "%s %s" %(title, t2)
return stitle
def lista(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data= scrapertools.find_single_match(data, '<head>(.*?)>Next »')
data = re.sub(r"\n|\r|\t| |<br>|<br/>", "", data)
patron = '<div class="item large-\d+.*?'
patron += 'src="([^"]+)".*?'
patron += '<div class="video-stats clearfix">(.*?)<span class="sl-wrapper">(.*?)<div class="post-des">.*?'
patron += '<a href="([^"]+)">([^<]+)<'
matches = re.compile(patron,re.DOTALL).findall(data)
for scrapedthumbnail,quality,time,scrapedurl,scrapedtitle in matches:
if "..." in scrapedtitle:
scrapedtitle = scrapertools.find_single_match(scrapedtitle, '(.*?–)')
scrapedtitle = stitle(scrapedtitle,scrapedurl)
quality = scrapertools.find_single_match(quality, '<h6>([^<]+)</h6>')
quality = "" # Solo ofrece videolinks SD
time = scrapertools.find_single_match(time, '<span>([^<]+)</span>')
title = "[COLOR yellow]%s[/COLOR] [COLOR red]%s[/COLOR] %s" % (time,quality, scrapedtitle)
thumbnail = scrapedthumbnail
url = urlparse.urljoin(item.url,scrapedurl)
plot = ""
action = "play"
if logger.info() == False:
action = "findvideos"
itemlist.append(item.clone(action=action, title=title, url=url,
thumbnail=thumbnail, fanart=thumbnail, plot=plot, contentTitle=title))
next_page = scrapertools.find_single_match(data, '<a class="next page-numbers" href="([^"]+)"')
if next_page:
next_page = urlparse.urljoin(item.url,next_page)
itemlist.append(item.clone(action="lista", title="[COLOR blue]Página Siguiente >>[/COLOR]", url=next_page) )
return itemlist
def findvideos(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>|<br/>", "", data)
patron = 'src="([^"]+)" type="video/mp4"'
matches = re.compile(patron,re.DOTALL).findall(data)
for url in matches:
itemlist.append(item.clone(action="play", title= "%s", contentTitle= item.title, url=url))
itemlist = servertools.get_servers_itemlist(itemlist, lambda i: i.title % i.server.capitalize())
return itemlist
def play(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>|<br/>", "", data)
patron = 'src="([^"]+)" type="video/mp4"'
matches = re.compile(patron,re.DOTALL).findall(data)
for url in matches:
itemlist.append(item.clone(action="play", title= "%s", contentTitle= item.title, url=url))
itemlist = servertools.get_servers_itemlist(itemlist, lambda i: i.title % i.server.capitalize())
return itemlist
|
alfa-addon/addon
|
plugin.video.alfa/channels/pornvibe.py
|
Python
|
gpl-3.0
| 5,118
|
#! /usr/bin/env python
# *******************************************************************************
# This file is part of tools_piccante.
#
# tools_piccante is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# tools_piccante is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with tools_piccante. If not, see <http://www.gnu.org/licenses/>.
#
# *******************************************************************************
# -*- coding: utf-8 -*-
#
import os, os.path, glob, sys, shutil, time
from PyQt4.QtCore import pyqtSlot
from PyQt4 import QtGui
import struct
#from scipy import *
import numpy as np
class Window(QtGui.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setGeometry(100,100,600,500)
self.setWindowTitle("Read Fields")
self.addMyMenu()
self.comp = 0
self.textComp = 'x'
self.outpath = os.path.join(os.getcwd(),"output.dat")
self.inputpath = os.getcwd()
self.home()
def home(self):
#self.addMyButtons()
self.addMyToolbar()
self.Flag = False
self.addLabels()
self.addCompChoice()
self.show()
def addLabels(self):
xpos = 10
ypos = 200
vsize = 20
self.statusDisplay = QtGui.QLabel("Status: not ready!", self)
self.statusDisplay.move(xpos,ypos)
self.statusDisplay.resize(480,vsize)
ypos+=vsize
self.fileDisplay = QtGui.QLabel("No File Chosen", self)
self.fileDisplay.move(xpos,ypos)
self.fileDisplay.resize(480,vsize)
ypos+=vsize
self.fileInfo1 = QtGui.QLabel("File info:", self)
self.fileInfo1.move(xpos,ypos)
self.fileInfo1.resize(480,vsize)
ypos+=vsize
text = "Grid:"
self.fileInfo2 = QtGui.QLabel(text, self)
self.fileInfo2.move(xpos,ypos)
self.fileInfo2.resize(480,vsize)
ypos+=vsize
self.fileInfo3 = QtGui.QLabel("", self)
self.fileInfo3.move(xpos,ypos)
self.fileInfo3.resize(480,vsize)
self.progress = QtGui.QProgressBar(self)
self.progress.setGeometry(10,80,250,20)
def addMyToolbar(self):
openAction = QtGui.QAction('Open File', self)
openAction.triggered.connect(self.openFile)
saveAction = QtGui.QAction('Write File', self)
saveAction.triggered.connect(self.saveFile)
self.toolbar = self.addToolBar("openAction")
self.toolbar.addAction(openAction)
self.toolbar.addAction(saveAction)
def addMyMenu(self):
openAction = QtGui.QAction("&Open", self)
openAction.setShortcut("Ctrl+O")
openAction.setStatusTip('Leave the App')
openAction.triggered.connect(self.openFile)
saveAction = QtGui.QAction("&Save", self)
saveAction.setShortcut("Ctrl+S")
saveAction.setStatusTip('output file')
saveAction.triggered.connect(self.saveFile)
quitAction = QtGui.QAction("&Quit", self)
quitAction.setShortcut("Ctrl+Q")
quitAction.setStatusTip('quit')
quitAction.triggered.connect(self.close_app)
mainMenu = self.menuBar()
#mainMenu.setNativeMenuBar(False)
fileMenu = mainMenu.addMenu('&File')
fileMenu.addAction(openAction)
fileMenu.addAction(saveAction)
fileMenu.addAction(quitAction)
self.statusBar()
def addCompChoice(self):
xpos = 10
ypos = 150
xsize = 150
self.comp = 0
self.componentLabel = QtGui.QLabel("Components = ?", self)
self.componentLabel.move(xpos,ypos)
self.componentLabel.resize(150,20)
self.compChoice = QtGui.QComboBox(self)
self.compChoice.move(xpos+xsize,ypos)
self.compChoice.activated[str].connect(self.comp_choice)
def addFileInfo(self):
text = "Total points: %d = [%d : %d : %d] [Nx : Ny : Nz]" %(self.Ntot,self.Nx, self.Ny, self.Nz)
self.fileInfo2.setText(text)
def changeCompChoice(self):
self.compChoice.clear()
if self.Nc >=1:
text = 'Components = %d' %self.Nc
self.componentLabel.setText(text)
self.compChoice.addItem("x")
if self.Nc >=2 :
self.compChoice.addItem("y")
if self.Nc >=3 :
self.compChoice.addItem("z")
def addMyButtons(self):
btn = QtGui.QPushButton("quit", self)
btn.clicked.connect(self.close_app)
btn.resize(70,20)
btn.move(0,100)
btn2 = QtGui.QPushButton("openFile", self)
btn2.clicked.connect(self.openFile)
btn2.resize(70,20)
btn2.move(70,100)
btn3 = QtGui.QPushButton("WRITE", self)
btn3.clicked.connect(self.saveFile)
btn3.resize(70,20)
btn3.move(140,100)
def comp_choice(self, text):
label = "Chosen component: " + text
self.componentLabel.setText(label)
self.comp = 0
self.textComp = text
if text == "y":
self.comp = 1
elif text == "z":
self.comp = 2
else:
pass
print "changed component to " + text
def close_app(self):
mymessage = "Sei sicuro?"
result = QtGui.QMessageBox.question(self, 'Message', mymessage, QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No)
if result == QtGui.QMessageBox.Yes:
sys.exit()
else:
pass
def myDefaultOutput(self):
if self.Nc == 1:
self.outpath = os.path.join(os.getcwd(),"output.dat")
else:
self.outpath = os.path.join(os.getcwd(),"output_%s.dat"%self.textComp)
def saveFile(self):
if not(self.Flag):
QtGui.QMessageBox.warning(self, "Attention", "No data file loaded: chose a file to open")
return
else:
self.myDefaultOutput()
self.outputname = QtGui.QFileDialog.getSaveFileName(self, 'Save File', self.outpath,)
self.fileInfo3.setText("Output file: " + self.outputname)
if self.comp >= self.Nc:
self.comp = 0
message = 'File has only %i components\nThe default (x) will be used instead' % self.Nc
QtGui.QMessageBox.warning(self, "Attention", message)
np.savetxt( str( self.outputname) ,self.myField[0,:,:,self.comp],fmt='%15.14e')
print 'done'
def openFile(self):
self.filename = QtGui.QFileDialog.getOpenFileName(self, 'Open File', self.inputpath)
self.Flag = self.test_file(self.filename)
if self.Flag:
self.myField = self.analize_field(self.filename)
self.statusDisplay.setText("Status OK")
self.fileDisplay.setText("Input file: " + self.filename)
self.changeCompChoice()
self.addFileInfo()
else:
self.fileDisplay.setText("ERROR")
def printName(self):
print self.filename
def test_file(self, filename):
f = open(filename ,'rb')
#- endianness 0:small - 1:big -#
endianness = struct.unpack('i', f.read(4))[0]
#- global grid dim -#
Nx = struct.unpack('i', f.read(4))[0]
Ny = struct.unpack('i', f.read(4))[0]
Nz = struct.unpack('i', f.read(4))[0]
if not(Nx > 0 and Ny > 0 and Nz > 0):
return False
if not(Nx < 1000000 and Ny < 10000 and Nz <10000):
return False
#- processor grid -#
Npx = struct.unpack('i', f.read(4))[0]
Npy = struct.unpack('i', f.read(4))[0]
Npz = struct.unpack('i', f.read(4))[0]
Nproc = Npx*Npy*Npz
if not(Npx > 0 and Npy > 0 and Npz > 0):
return False
if not(Npx < 1000 and Npy < 1000 and Npz <1000):
return False
#- field components -#
Nc = struct.unpack('i', f.read(4))[0]
if not(Nc < 4 and Nc >0):
return False
return True
f.close()
def analize_field(self,filename):
f = open(filename ,'rb')
#- endianness 0:small - 1:big -#
endianness = struct.unpack('i', f.read(4))[0]
#- global grid dim -#
Nx = struct.unpack('i', f.read(4))[0]
Ny = struct.unpack('i', f.read(4))[0]
Nz = struct.unpack('i', f.read(4))[0]
Ntot = Nx*Ny*Nz
self.Ntot = Ntot
self.Nx = Nx
self.Ny = Ny
self.Nz = Nz
#- processor grid -#
Npx = struct.unpack('i', f.read(4))[0]
Npy = struct.unpack('i', f.read(4))[0]
Npz = struct.unpack('i', f.read(4))[0]
Nproc = Npx*Npy*Npz
#- field components -#
self.Nc = struct.unpack('i', f.read(4))[0]
#- grid -> X -#
#x = np.zeros((Nx))
#for i in range(0,Nx):
x = struct.unpack('f'*Nx, f.read(4*Nx))
np.savetxt( 'x.dat' ,x[:],fmt='%15.14e')
#- grid -> Y -#
y = np.zeros((Ny))
for i in range(0,Ny):
y[i] = struct.unpack('f', f.read(4))[0]
#- grid -> Z -#
z = np.zeros((Nz))
for i in range(0,Nz):
z[i] = struct.unpack('f', f.read(4))[0]
#- loop on processors -#
F = np.zeros((Nz,Ny,Nx,self.Nc))
counter = 0
prog = 0.0
for nprocessor in range(0,Nproc):
#-processor dims -#
i0 = struct.unpack('i', f.read(4))[0]
j0 = struct.unpack('i', f.read(4))[0]
k0 = struct.unpack('i', f.read(4))[0]
li0 = struct.unpack('i', f.read(4))[0]
lj0 = struct.unpack('i', f.read(4))[0]
lk0 = struct.unpack('i', f.read(4))[0]
#print '>>> ',i0,j0,k0,li0,lj0,lk0
NN=li0*lj0*lk0*self.Nc
array=np.array(struct.unpack('f'*NN, f.read(4*NN))).reshape(lk0,lj0,li0,self.Nc)
for k in range(0,lk0):
for j in range(0,lj0):
for i in range(0,li0):
for c in range(0,self.Nc):
F[k+k0,j+j0,i+i0,c] = array[k,j,i,c]
counter += li0
prog = counter*(100.0/Ntot)
self.progress.setValue(prog)
#np.savetxt( nameOutFile ,F[0,:,:,component],fmt='%15.14e')
f.close()
print "done"
return F
def run():
app = QtGui.QApplication(sys.argv)
GUI = Window()
sys.exit(app.exec_())
run()
|
ALaDyn/tools-piccante
|
py-tools/readFieldsWithGUI.py
|
Python
|
gpl-3.0
| 11,248
|
import errno
import os
def deepgetattr(obj, attr):
"""Recurse through an attribute chain to get the ultimate value."""
return reduce(getattr, attr.split('.'), obj)
def mkdir_p(path):
"""Makes a directory path, but doesn't crash if the path already exists."""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
|
shashi792/courtlistener
|
alert/lib/utils.py
|
Python
|
agpl-3.0
| 448
|
#!/usr/bin/env python
from manager import Plugin
from operator import itemgetter
import GeoIP
class GeoIPStats(Plugin):
def __init__(self, **kwargs):
self.gi = GeoIP.new(GeoIP.GEOIP_MEMORY_CACHE)
self.countries = {}
def process(self, **kwargs):
if 'remote_host' in kwargs:
country = self.gi.country_name_by_addr(kwargs['remote_host'])
if country in self.countries:
self.countries[country] += 1
else:
self.countries[country] = 1
def report(self, **kwargs):
print "== Requests by country =="
for (country, count) in sorted(self.countries.iteritems(), key=itemgetter(1), reverse=True):
print " %10d: %s" % (count, country)
|
rytis/Apache-access-log-parser
|
plugins/plugin_geoip_stats.py
|
Python
|
apache-2.0
| 767
|
#
# Copyright 2013 © Nguyễn Hà Dương (cmpitgATgmailDOTcom)
#
# This file is part of Blutkit.
#
# Blutkit is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Blutkit is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Blutkit. If not, see <http://www.gnu.org/licenses/>.
#
#
# Notes: There some ways to present a key/modifier:
# * QtKey (a Qt enum, currently served as the internal representation
# of a key for performance reason)
# * Key (a string)
# * Str (a string, the human-readable and parseable form)
#
from blutkit.gui.keyconstants import *
from blutkit.utils.strutils import *
from blutkit.config import Config
def isKeyPressEvent(event):
"""Determine if an event is a keypress event."""
return event.type() == QEvent.Type.KeyPress
def isKeyReleaseEvent(event):
"""Determine if an event is a keyrelease event."""
return event.type() == QEvent.Type.KeyRelease
def isKeyEvent(event):
"""Determine if an event is a keyboard event."""
return isKeyPressEvent(event) or isKeyReleaseEvent(event)
def toKeyStr(key):
"""Convert a key to its string representation."""
if not isString(key):
result = QtKeyToKeyStr.get(key, None)
# Should be a keymod
if not result:
result = QtKeymodToKeyStr.get(key, "")
else:
result = key
return result
def toQtKey(key):
"""Convert a string to QtKey."""
# Special case: Colon and Comma have different enum values but share the
# same symbol ",". All are unified as Comma.
if key == Qt.Key_Colon:
key = Qt.Key_Comma
if isString(key):
key = KeyStrToQtKey[key]
return key
def toKeymod(key):
"""Convert a key to QtKeymod."""
if not isString(key):
key = toKeyStr(key)
return KeyStrToQtKeymod[key]
def isKeymod(key):
"""Determine if a key is also a modifier."""
if not isString(key):
key = toKeyStr(key)
return key in Keymods
def isMouseButton(key):
"""Determine if a key represents a mouse button."""
return toKeyStr(key) in MouseButtons
def getKeyFromEvent(event):
"""Return the corresponding string representing the key from a key
event."""
return event.key()
def getKeymodsFromEvent(event):
"""Return a dictionary of pressed key modifiers."""
mods = set()
for modVal in QtKeymods:
if (int(event.modifiers()) & int(modVal)) == int(modVal):
mods.add(modVal)
return mods
def toMouseButton(key):
"""Convert a string representing a mouse button to Qt's representation."""
return StrToMouseButton[key]
def isSingleKeyStroke(keycomb):
"""Determine if a key combination is just a single key/mouse stroke."""
return len(keycomb) == 1 and len(keycomb[0].mods) == 0
def bindKeyGlobal(keys="", commands=""):
"""Add a global key binding."""
removeGlobalKeyBinding(keys)
Config["globalkeybindings"].append({ "keys": keys, "commands": commands })
def globalKeyBindingExists(keys):
"""Check if a global key binding exists."""
return len(filter(lambda d: d["keys"] == keys,
Config["globalkeybindings"])) != 0
def removeGlobalKeyBinding(keys):
"""Remove a key binding from global key binding."""
Config["globalkeybindings"] = list(filter(lambda d: d["keys"] != keys,
Config["globalkeybindings"]))
# print(isKeymod("shift"))
# print(toKeymod("control"))
# print(toKeyStr(toKeymod("control"))) # == "control"
|
cmpitg/blutkit
|
blutkit/gui/keyutils.py
|
Python
|
gpl-3.0
| 3,939
|
from pprint import pprint
VAR = 42
def foo():
import sys
import ast, tokenize
pass
class C:
from textwrap import dedent
pass
import codecs as C
pass
|
asedunov/intellij-community
|
python/testData/formatter/noBlankLinesAfterLocalImports_after.py
|
Python
|
apache-2.0
| 182
|
import pytest
from opentrons import protocol_api as papi, types
labware_name = 'corning_96_wellplate_360ul_flat'
def test_load_to_slot(loop):
ctx = papi.ProtocolContext(loop=loop)
labware = ctx.load_labware(labware_name, '1')
assert labware._offset == types.Point(0, 0, 0)
other = ctx.load_labware(labware_name, 2)
assert other._offset == types.Point(132.5, 0, 0)
def test_loaded(loop):
ctx = papi.ProtocolContext(loop=loop)
labware = ctx.load_labware(labware_name, '1')
assert ctx.loaded_labwares[1] == labware
def test_get_incorrect_definition_by_name():
with pytest.raises(FileNotFoundError):
papi.labware.get_labware_definition('fake_labware')
def test_get_mixed_case_labware_def():
dfn = papi.labware.get_labware_definition(
'COrnIng_96_wElLplaTE_360UL_Flat')
assert dfn['parameters']['loadName'] == labware_name
def test_load_label(loop):
ctx = papi.ProtocolContext(loop=loop)
labware = ctx.load_labware(labware_name, '1', 'my cool labware')
assert 'my cool labware' in str(labware)
def test_deprecated_load(loop):
ctx = papi.ProtocolContext(loop=loop)
labware = ctx.load_labware_by_name(labware_name, '1', 'my cool labware')
assert 'my cool labware' in str(labware)
|
Opentrons/labware
|
api/tests/opentrons/protocol_api/test_labware_load.py
|
Python
|
apache-2.0
| 1,271
|
import datetime
import numbers
from functools import partial
from django.urls import reverse
from django.utils.translation import ugettext as _
import pytz
from couchdbkit import ResourceNotFound
from casexml.apps.case.views import get_wrapped_case
from corehq.apps.hqwebapp.templatetags.proptable_tags import get_display_data
def case_hierarchy_context(case, get_case_url, show_view_buttons=True, timezone=None):
wrapped_case = get_wrapped_case(case)
if timezone is None:
timezone = pytz.utc
columns = wrapped_case.related_cases_columns
type_info = wrapped_case.related_type_info
descendent_case_list = get_flat_descendant_case_list(
case, get_case_url, type_info=type_info
)
parent_cases = []
if case.live_indices:
# has parent case(s)
# todo: handle duplicates in ancestor path (bubbling up of parent-child
# relationships)
for idx in case.live_indices:
try:
parent_cases.append(idx.referenced_case)
except ResourceNotFound:
parent_cases.append(None)
for parent_case in parent_cases:
if parent_case:
parent_case.edit_data = {
'view_url': get_case_url(parent_case.case_id)
}
last_parent_id = parent_case.case_id
else:
last_parent_id = None
for c in descendent_case_list:
if not getattr(c, 'treetable_parent_node_id', None) and last_parent_id:
c.treetable_parent_node_id = last_parent_id
case_list = parent_cases + descendent_case_list
for c in case_list:
if not c:
continue
c.columns = []
case_dict = get_wrapped_case(c).to_full_dict()
for column in columns:
c.columns.append(get_display_data(
case_dict, column, timezone=timezone))
return {
'current_case': case,
'domain': case.domain,
'case_list': case_list,
'columns': columns,
'num_columns': len(columns) + 1,
'show_view_buttons': show_view_buttons,
}
def normalize_date(val):
# Can't use isinstance since datetime is a subclass of date.
if type(val) == datetime.date:
return datetime.datetime.combine(val, datetime.time.min)
return val
def get_inverse(val):
if isinstance(val, (datetime.datetime, datetime.date)):
return datetime.datetime.max - val
elif isinstance(val, numbers.Number):
return 10 ** 20
elif val is None or isinstance(val, bool):
return not val
else:
raise Exception("%r has uninversable type: %s" % (val, type(val)))
def sortkey(child, type_info=None):
"""Return sortkey based on sort order defined in type_info, or use default
based on open/closed and opened_on/closed_on dates.
"""
type_info = type_info or {}
case = child['case']
if case.closed:
key = [1]
try:
for attr, direction in type_info[case.type]['closed_sortkeys']:
val = normalize_date(getattr(case, attr))
if direction.lower() == 'desc':
val = get_inverse(val)
key.append(val)
except KeyError:
key.append(datetime.datetime.max - case.closed_on)
else:
key = [0]
try:
for attr, direction in type_info[case.type]['open_sortkeys']:
val = normalize_date(getattr(case, attr))
if direction.lower() == 'desc':
val = get_inverse(val)
key.append(val)
except KeyError:
key.append(case.opened_on or datetime.datetime.min)
return key
def get_session_data(case, current_case, type_info):
# this logic should ideally be implemented in subclasses of
# CommCareCase
if type_info and case.type in type_info:
attr = type_info[case.type]['case_id_attr']
return {
attr: case.case_id,
'case_id': current_case.case_id
}
else:
return {
'case_id': case.case_id
}
TREETABLE_INDENT_PX = 19
def process_case_hierarchy(case_output, get_case_url, type_info):
current_case = case_output['case']
submit_url_root = reverse('receiver_post', args=[current_case.domain])
form_url_root = reverse('formplayer_main', args=[current_case.domain])
def process_output(case_output, depth=0):
for c in case_output['child_cases']:
process_output(c, depth=depth + 1)
case = case_output['case']
common_data = {
'indent_px': depth * TREETABLE_INDENT_PX,
'submit_url_root': submit_url_root,
'form_url_root': form_url_root,
'view_url': get_case_url(case.case_id),
'session_data': get_session_data(case, current_case, type_info)
}
data = type_info.get(case.type, {})
if 'description_property' in data:
data['description'] = getattr(case, data['description_property'], None)
if 'edit_session_data' in data:
data['session_data'].update(data['edit_session_data'])
data.update(common_data)
case.edit_data = data
if 'child_type' in data and not case.closed:
child_type = data['child_type']
child_data = type_info.get(child_type, {})
child_data.update(common_data)
child_data.update({
"link_text": _("Add %(case_type)s") % {
'case_type': child_data.get('type_name', child_type)
},
"parent_node_id": case.case_id,
})
if 'create_session_data' in child_data:
child_data['session_data'].update(child_data['create_session_data'])
case.add_child_data = child_data
process_output(case_output)
def get_case_hierarchy(case, type_info):
def get_children(case, referenced_type=None, seen=None):
seen = seen or set()
ignore_types = type_info.get(case.type, {}).get("ignore_relationship_types", [])
if referenced_type and referenced_type in ignore_types:
return None
seen.add(case.case_id)
children = [
get_children(i.referenced_case, i.referenced_type, seen) for i in case.reverse_indices
if i.referenced_id and i.referenced_id not in seen
]
children = [c for c in children if c is not None]
# non-first-level descendants
descendant_types = []
for c in children:
descendant_types.extend(c['descendant_types'])
descendant_types = list(set(descendant_types))
children = sorted(children, key=partial(sortkey, type_info=type_info))
# set parent_case_id used by flat display
for c in children:
if not hasattr(c['case'], 'treetable_parent_node_id'):
c['case'].treetable_parent_node_id = case.case_id
child_cases = []
for c in children:
child_cases.extend(c['case_list'])
return {
'case': case,
'child_cases': children,
'descendant_types': list(set(descendant_types + [c['case'].type for c in children])),
'case_list': [case] + child_cases
}
return get_children(case)
def get_flat_descendant_case_list(case, get_case_url, type_info=None):
type_info = type_info or {}
hierarchy = get_case_hierarchy(case, type_info)
process_case_hierarchy(hierarchy, get_case_url, type_info)
return hierarchy['case_list']
|
dimagi/commcare-hq
|
corehq/apps/reports/view_helpers.py
|
Python
|
bsd-3-clause
| 7,625
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class CosmosDBManagementClientConfiguration(Configuration):
"""Configuration for CosmosDBManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
"""
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
super(CosmosDBManagementClientConfiguration, self).__init__(**kwargs)
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2021-11-15-preview"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-cosmosdb/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
|
Azure/azure-sdk-for-python
|
sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/_configuration.py
|
Python
|
mit
| 3,243
|
# http://trac.secdev.org/scapy/ticket/31
# scapy.contrib.description = MPLS
# scapy.contrib.status = loads
from scapy.packet import Packet,bind_layers
from scapy.fields import BitField,ByteField
from scapy.layers.l2 import Ether
from scapy.layers.l2 import GRE
class MPLS(Packet):
name = "MPLS"
fields_desc = [ BitField("label", 3, 20),
BitField("cos", 0, 3),
BitField("s", 1, 1),
ByteField("ttl", 0) ]
def guess_payload_class(self, payload):
if len(payload) >= 1:
ip_version = (ord(payload[0]) >> 4) & 0xF
if ip_version == 4:
return IP
elif ip_version == 6:
return IPv6
return Padding
bind_layers(Ether, MPLS, type=0x8847)
bind_layers(GRE, MPLS, proto=0x8847)
|
guedou/scapy-bpf
|
scapy/contrib/mpls.py
|
Python
|
gpl-2.0
| 814
|
#!/usr/bin/env python
# -*- mode: python; sh-basic-offset: 4; indent-tabs-mode: nil; coding: utf-8 -*-
# vim: tabstop=4 softtabstop=4 expandtab shiftwidth=4 fileencoding=utf-8
import argparse
import sys
import clusto
from clusto import drivers
from clusto import script_helper
from pprint import pprint
import sys
import traceback
JSON=False
YAML=False
try:
import yaml
YAML=True
except ImportError:
pass
try:
import simplejson as json
JSON=True
except ImportError:
try:
import json
JSON=True
except:
pass
class Attr(script_helper.Script):
'''
Operate upon an object's attributes, you should be able to
add, remove, list or set attributes of any kind
'''
obj = None
format = 'list'
def __init__(self):
script_helper.Script.__init__(self)
def run_show_yaml(self, attrs):
self.debug('Printing in format: YAML')
print yaml.safe_dump(attrs, encoding='utf-8',
explicit_start=True, default_flow_style=False)
return 0
def run_show_json(self, attrs):
self.debug('Printing in format: JSON')
print json.dumps(attrs, sort_keys=True, indent=2)
return 0
def run_show_csv(self, attrs):
self.debug('Printing in format: CSV')
print 'key;subkey;number;"value"'
for attr in attrs:
print '%s;%s;%s;"%s"' % (
str(attr['key'] or ''),
str(attr['subkey'] or ''),
str(int(attr['number'] or 0)),
str(attr['value']))
return 0
def run_show_list(self, attrs):
self.debug('Printing in format: List')
maxkey = 3 + max([len(str(_['key'])) for _ in attrs] + [0])
maxsubkey = 6 + max([len(str(_['subkey'])) for _ in attrs] + [0])
maxnumber = 3 + max([len(str(_['number'])) for _ in attrs] + [0])
if maxkey < 5: maxkey = 5
if maxsubkey < 8: maxsubkey = 8
print ''.join(['KEY'.ljust(maxkey, ' '), 'SUBKEY'.ljust(maxsubkey, ' '), 'VALUE'])
for attr in attrs:
print ''.join([str(_).ljust(maxsize, ' ') for _, maxsize in [
(attr['key'], maxkey),
(attr['subkey'], maxsubkey),
(attr['value'], 0),
]])
return 0
def run_set(self, kwargs):
kwargs.pop('merge_container_attrs')
return self.obj.set_attr(**kwargs)
def run_add(self, kwargs):
kwargs.pop('merge_container_attrs')
return self.obj.add_attr(**kwargs)
def run_delete(self, kwargs):
kwargs.pop('merge_container_attrs')
return self.obj.del_attrs(**kwargs)
def run_show(self, kwargs):
attrs = self.obj.attrs(**kwargs)
attrs.sort(key=lambda _: (_.key, _.number, _.subkey, _.value))
result = []
for attr in attrs:
row = {
'key': attr.key,
'subkey': attr.subkey,
'number': attr.number,
'type': attr.datatype,
'value': unicode(attr.value)
}
result.append(row)
return (getattr(self, 'run_show_%s' % self.format)(result))
def run(self, args):
obj = clusto.get(args.obj[0])
if not obj:
self.error('Object %s does not exist' % args.obj[0])
return -1
self.obj = obj[0]
opts = {}
kwargs = dict(args.__dict__.items())
self.format = args.format
for k in ['key', 'subkey', 'value', 'merge_container_attrs']:
if kwargs[k] != None:
opts[k] = kwargs[k]
return (getattr(self, 'run_%s' % args.action[0])(opts))
def _add_arguments(self, parser):
actions = ['add', 'show', 'set', 'delete']
choices = ['list', 'csv']
if JSON:
choices.append('json')
if YAML:
choices.append('yaml')
parser.add_argument('action', nargs=1, metavar='action', choices=actions,
help='Action to execute (add, delete, set, show)')
parser.add_argument('--format', choices=choices, default='list',
help='What format to use to display the info, defaults to "list"')
parser.add_argument('-k', '--key', help='Attribute key to filter on',
default=None)
parser.add_argument('-s', '--subkey', help='Attribute subkey to filter on',
default=None)
parser.add_argument('-v', '--value', help='Attribute value to filter on',
default=None)
parser.add_argument('-m', '--merge', default=False, action='store_true',
dest='merge_container_attrs',
help='Merge container attributes recursively (defaults to False)')
parser.add_argument('obj', nargs=1, metavar='object',
help='Object to modify/query attributes from')
def add_subparser(self, subparsers):
parser = self._setup_subparser(subparsers)
self._add_arguments(parser)
def main():
attr, args = script_helper.init_arguments(Attr)
return(attr.run(args))
if __name__ == '__main__':
sys.exit(main())
|
sanyaade-mobiledev/clusto
|
src/clusto/commands/attr.py
|
Python
|
bsd-3-clause
| 5,122
|
# redditarchiver 0.02, a tool for archiving reddit JSON data and linked files
# Copyright (C) 2014 Thadeus J. Fleming
#
# redditarchiver is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# redditarchiver is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with redditarchiver. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, with_statement
import json
import praw
import os
import sys
import argparse
import urlparse
import re
import subprocess32
import requests
import time
import shutil
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--fresh-start',action='store_true', help='Ignore any previous data and get everything again.')
parser.add_argument('--reprocess',action='store_true', help='Run the whole JSON file through the processing function again. This is handy when additional processing functionality has been added.')
parser.add_argument('--no-save',dest='save', action='store_false', help="Don't save the resulting data or latest ID.")
parser.add_argument('--process',dest='process', action='store_true', help="Process the results, downloading imgur links with wget and tagging them with the tag utility.")
parser.add_argument('-d', '--directory', help="Where to put the archived files", default='~/Archive')
parser.add_argument('-u', '--username', help="Which username to use. Overrides users.json")
args = parser.parse_args()
user_agent = "redditarchiver 0.02 by iamthad https://github.com/iamthad/redditarchiver"
r = praw.Reddit(user_agent = user_agent)
r.config.store_json_result = True
fresh_start = args.fresh_start
process = args.process
reprocess = args.reprocess
save = args.save
archiveDir = os.path.expanduser(args.directory)
# redditarchiver folder
raDir = os.path.expanduser('~/.redditarchiver')
# Check if redditarchiver folder exists
if not os.path.isdir(raDir):
os.mkdir(raDir)
if args.username:
users = [{'username':args.username, 'toArchive': ['liked','saved']}]
else:
usersFn = os.path.join(raDir,'users.json')
if os.path.exists(usersFn):
with open(usersFn) as usersFile:
users = json.load(usersFile)
else:
print('Create a JSON file at', usersFn, 'with user information, or run with the --username argument. See users.json.template for an example')
if process or reprocess:
urlsFn = os.path.join(archiveDir,'urls.txt')
tagsFn = os.path.join(archiveDir,'tags.txt')
if os.path.exists(urlsFn):
os.remove(urlsFn)
if os.path.exists(tagsFn):
os.remove(tagsFn)
for user in users:
print(user['username'])
r.login(username=user['username'],password=(user['password'] if 'password' in user else None))
me = r.user
userDir = os.path.join(raDir,user['username'])
if not os.path.isdir(userDir):
os.mkdir(userDir)
for ttype in user['toArchive']:
newestID = get_newest_id(ttype, userDir) if not fresh_start else []
things = get_things(ttype, me, userDir, newestID)
if process and not reprocess:
make_temp_files(things, archiveDir, urlsFn, tagsFn)
things = (load_old_things(ttype, things, userDir) if not fresh_start else things)
if reprocess:
make_temp_files(things, archiveDir, urlsFn, tagsFn)
if save:
save_things(ttype, things, userDir)
if process or reprocess:
if os.path.exists(urlsFn) and os.path.exists(tagsFn):
shutil.copy2('mktags.sh',archiveDir)
run_commands(archiveDir,raDir)
def get_newest_id(ttype, userDir):
newestIdFn = os.path.join(userDir,ttype+'-newest.txt')
if os.path.exists(newestIdFn):
print("Found " + ttype + "-newest")
with open(newestIdFn) as newest:
newestID = newest.read()
else:
print("First time for", ttype)
if os.path.exists(thingJSONFn):
print("No " + ttype + "-newest, but " + ttype +".json exists! Aborting!")
raise Exception
else:
newestID = None
return newestID
def get_things(ttype, me, userDir, newestID):
print("Getting", ttype)
thingJSONFn = os.path.join(userDir,ttype+'.json')
things = []
newthings = praw.internal._get_redditor_listing(ttype)(me,params=({'before':newestID} if newestID else {}),limit=None)
nnew = 0
try:
for thing in newthings:
things.append(thing.json_dict)
nnew = nnew + 1
print("Got", nnew, "new", ttype)
except TypeError:
print("Got 1 new", ttype)
things.append(newthings.json_dict)
nnew = 1
return things
def load_old_things(ttype, things, userDir):
thingJSONFn = os.path.join(userDir,ttype+'.json')
if os.path.exists(thingJSONFn):
with open(thingJSONFn) as thingsfile:
try:
things.extend(json.load(thingsfile))
except Exception as e:
print('Something went wrong', e, file=sys.stderr)
return things
def save_things(ttype, things, userDir):
if len(things) > 0:
newestIdFn = os.path.join(userDir,ttype+'-newest.txt')
thingJSONFn = os.path.join(userDir,ttype+'.json')
newestID = things[0]['name']
with open(newestIdFn,'w') as newest:
newest.write(newestID)
with open(thingJSONFn,'w') as thingsfile:
json.dump(things,thingsfile)
def make_temp_files(things,archiveDir,urlsFn,tagsFn):
# from RES
imgurHashReStr = r"^https?:\/\/(?:i\.|m\.|edge\.|www\.)*imgur\.com\/(?!gallery)(?!removalrequest)(?!random)(?!memegen)([\w]{5,7}(?:[&,][\w]{5,7})*)(?:#\d+)?[sbtmlh]?(\.(?:jpe?g|gif|png|gifv))?(\?.*)?$"
imgurHashRe = re.compile(imgurHashReStr)
nThings = len(things)
nImgurThings = 0
print('Processing', nThings, 'things.')
contentTypeDict = {"image/jpeg": ".jpg", "image/gif": ".mp4", "image/png": ".png"}
with open(urlsFn,'a') as urlsFile, open(tagsFn,'a') as tagsFile:
for thing in things:
if 'url' in thing:
# Can only process non-album imgur links for now
url = thing['url']
subreddit = (thing['subreddit'] if 'subreddit' in thing else '')
parsed = urlparse.urlparse(url)
if parsed.netloc.find('imgur') > -1:
match = imgurHashRe.search(url)
if match:
groups = match.groups()
headerReq = requests.head('http://i.imgur.com/' + groups[0] + '.jpg')
if 'content-type' in headerReq.headers:
contentType = headerReq.headers['content-type']
if contentType in contentTypeDict:
properURL = 'http://i.imgur.com/' + groups[0] + contentTypeDict[contentType]
print(properURL,file=urlsFile)
print(subreddit, 'i.imgur.com/*' + groups[0] + "*", file=tagsFile)
nImgurThings = nImgurThings + 1
else:
print("Error, content-type not found", contentType, file=sys.stderr)
time.sleep(1)
print('Used imgur logic for', nImgurThings, 'things.')
def run_commands(archiveDir,raDir):
subprocess32.check_call('wget -xN -w 2 -i urls.txt', cwd=archiveDir, shell=True)
subprocess32.check_call(os.path.join(archiveDir,'mktags.sh')+' tags.txt', cwd=archiveDir, shell=True)
if __name__ == "__main__":
main()
|
iamthad/redditarchiver
|
redditarchiver.py
|
Python
|
gpl-3.0
| 8,190
|
# -*- test-case-name: foolscap.test.test_pb -*-
import re, time
from zope.interface import implements, implementsOnly, implementedBy, Interface
from twisted.python import log
from twisted.internet import defer, reactor, task, protocol
from twisted.application import internet
from twisted.web.client import getPage
from twisted.trial import unittest
from foolscap import broker, eventual, negotiate
from foolscap.api import Tub, Referenceable, RemoteInterface, \
eventually, fireEventually, flushEventualQueue
from foolscap.remoteinterface import getRemoteInterface, RemoteMethodSchema, \
UnconstrainedMethod
from foolscap.schema import Any, SetOf, DictOf, ListOf, TupleOf, \
NumberConstraint, ByteStringConstraint, IntegerConstraint, \
UnicodeConstraint, ChoiceOf
from foolscap.referenceable import TubRef
from foolscap.util import allocate_tcp_port
from twisted.python import failure
from twisted.internet.main import CONNECTION_DONE
def getRemoteInterfaceName(obj):
i = getRemoteInterface(obj)
return i.__remote_name__
class Loopback:
# The transport's promise is that write() can be treated as a
# synchronous, isolated function call: specifically, the Protocol's
# dataReceived() and connectionLost() methods shall not be called during
# a call to write().
connected = True
def write(self, data):
eventually(self._write, data)
def _write(self, data):
if not self.connected:
return
try:
# isolate exceptions: if one occurred on a regular TCP transport,
# they would hang up, so duplicate that here.
self.peer.dataReceived(data)
except:
f = failure.Failure()
log.err(f)
print "Loopback.write exception:", f
self.loseConnection(f)
def loseConnection(self, why=failure.Failure(CONNECTION_DONE)):
assert isinstance(why, failure.Failure), why
if self.connected:
self.connected = False
# this one is slightly weird because 'why' is a Failure
eventually(self._loseConnection, why)
def _loseConnection(self, why):
assert isinstance(why, failure.Failure), why
self.protocol.connectionLost(why)
self.peer.connectionLost(why)
def flush(self):
self.connected = False
return fireEventually()
def getPeer(self):
return broker.LoopbackAddress()
def getHost(self):
return broker.LoopbackAddress()
Digits = re.compile("\d*")
MegaSchema1 = DictOf(str,
ListOf(TupleOf(SetOf(int, maxLength=10, mutable=True),
str, bool, int, long, float, None,
UnicodeConstraint(),
ByteStringConstraint(),
Any(), NumberConstraint(),
IntegerConstraint(),
ByteStringConstraint(maxLength=100,
minLength=90,
regexp="\w+"),
ByteStringConstraint(regexp=Digits),
),
maxLength=20),
maxKeys=5)
# containers should convert their arguments into schemas
MegaSchema2 = TupleOf(SetOf(int),
ListOf(int),
DictOf(int, str),
)
MegaSchema3 = ListOf(TupleOf(int,int))
class RIHelper(RemoteInterface):
def set(obj=Any()): return bool
def set2(obj1=Any(), obj2=Any()): return bool
def append(obj=Any()): return Any()
def get(): return Any()
def echo(obj=Any()): return Any()
def defer(obj=Any()): return Any()
def hang(): return Any()
# test one of everything
def megaschema(obj1=MegaSchema1, obj2=MegaSchema2): return None
def mega3(obj1=MegaSchema3): return None
def choice1(obj1=ChoiceOf(ByteStringConstraint(2000), int)): return None
class HelperTarget(Referenceable):
implements(RIHelper)
d = None
def __init__(self, name="unnamed"):
self.name = name
def __repr__(self):
return "<HelperTarget %s>" % self.name
def waitfor(self):
self.d = defer.Deferred()
return self.d
def remote_set(self, obj):
self.obj = obj
if self.d:
self.d.callback(obj)
return True
def remote_set2(self, obj1, obj2):
self.obj1 = obj1
self.obj2 = obj2
return True
def remote_append(self, obj):
self.calls.append(obj)
def remote_get(self):
return self.obj
def remote_echo(self, obj):
self.obj = obj
return obj
def remote_defer(self, obj):
return fireEventually(obj)
def remote_hang(self):
self.d = defer.Deferred()
return self.d
def remote_megaschema(self, obj1, obj2):
self.obj1 = obj1
self.obj2 = obj2
return None
def remote_mega3(self, obj):
self.obj = obj
return None
def remote_choice1(self, obj):
self.obj = obj
return None
class TimeoutError(Exception):
pass
class PollComplete(Exception):
pass
class PollMixin:
def poll(self, check_f, pollinterval=0.01, timeout=None):
# Return a Deferred, then call check_f periodically until it returns
# True, at which point the Deferred will fire.. If check_f raises an
# exception, the Deferred will errback. If the check_f does not
# indicate success within timeout= seconds, the Deferred will
# errback. If timeout=None, no timeout will be enforced, and the loop
# will poll forever (or really until Trial times out).
cutoff = None
if timeout is not None:
cutoff = time.time() + timeout
lc = task.LoopingCall(self._poll, check_f, cutoff)
d = lc.start(pollinterval)
def _convert_done(f):
f.trap(PollComplete)
return None
d.addErrback(_convert_done)
return d
def _poll(self, check_f, cutoff):
if cutoff is not None and time.time() > cutoff:
raise TimeoutError()
if check_f():
raise PollComplete()
class StallMixin:
def stall(self, res, timeout):
d = defer.Deferred()
reactor.callLater(timeout, d.callback, res)
return d
class TargetMixin(PollMixin, StallMixin):
def setUp(self):
self.loopbacks = []
def setupBrokers(self):
self.targetBroker = broker.Broker(TubRef("targetBroker"))
self.callingBroker = broker.Broker(TubRef("callingBroker"))
t1 = Loopback()
t1.peer = self.callingBroker
t1.protocol = self.targetBroker
self.targetBroker.transport = t1
self.loopbacks.append(t1)
t2 = Loopback()
t2.peer = self.targetBroker
t2.protocol = self.callingBroker
self.callingBroker.transport = t2
self.loopbacks.append(t2)
self.targetBroker.connectionMade()
self.callingBroker.connectionMade()
def tearDown(self):
# returns a Deferred which fires when the Loopbacks are drained
dl = [l.flush() for l in self.loopbacks]
d = defer.DeferredList(dl)
d.addCallback(flushEventualQueue)
return d
def setupTarget(self, target, txInterfaces=False):
# txInterfaces controls what interfaces the sender uses
# False: sender doesn't know about any interfaces
# True: sender gets the actual interface list from the target
# (list): sender uses an artificial interface list
puid = target.processUniqueID()
tracker = self.targetBroker.getTrackerForMyReference(puid, target)
tracker.send()
clid = tracker.clid
if txInterfaces:
iname = getRemoteInterfaceName(target)
else:
iname = None
rtracker = self.callingBroker.getTrackerForYourReference(clid, iname)
rr = rtracker.getRef()
return rr, target
class RIMyTarget(RemoteInterface):
# method constraints can be declared directly:
add1 = RemoteMethodSchema(_response=int, a=int, b=int)
free = UnconstrainedMethod()
# or through their function definitions:
def add(a=int, b=int): return int
#add = schema.callable(add) # the metaclass makes this unnecessary
# but it could be used for adding options or something
def join(a=str, b=str, c=int): return str
def getName(): return str
disputed = RemoteMethodSchema(_response=int, a=int)
def fail(): return str # actually raises an exception
def failstring(): return str # raises a string exception
class RIMyTarget2(RemoteInterface):
__remote_name__ = "RIMyTargetInterface2"
sub = RemoteMethodSchema(_response=int, a=int, b=int)
# For some tests, we want the two sides of the connection to disagree about
# the contents of the RemoteInterface they are using. This is remarkably
# difficult to accomplish within a single process. We do it by creating
# something that behaves just barely enough like a RemoteInterface to work.
class FakeTarget(dict):
pass
RIMyTarget3 = FakeTarget()
RIMyTarget3.__remote_name__ = RIMyTarget.__remote_name__
RIMyTarget3['disputed'] = RemoteMethodSchema(_response=int, a=str)
RIMyTarget3['disputed'].name = "disputed"
RIMyTarget3['disputed'].interface = RIMyTarget3
RIMyTarget3['disputed2'] = RemoteMethodSchema(_response=str, a=int)
RIMyTarget3['disputed2'].name = "disputed"
RIMyTarget3['disputed2'].interface = RIMyTarget3
RIMyTarget3['sub'] = RemoteMethodSchema(_response=int, a=int, b=int)
RIMyTarget3['sub'].name = "sub"
RIMyTarget3['sub'].interface = RIMyTarget3
class Target(Referenceable):
implements(RIMyTarget)
def __init__(self, name=None):
self.calls = []
self.name = name
def getMethodSchema(self, methodname):
return None
def remote_add(self, a, b):
self.calls.append((a,b))
return a+b
remote_add1 = remote_add
def remote_free(self, *args, **kwargs):
self.calls.append((args, kwargs))
return "bird"
def remote_getName(self):
return self.name
def remote_disputed(self, a):
return 24
def remote_fail(self):
raise ValueError("you asked me to fail")
def remote_fail_remotely(self, target):
return target.callRemote("fail")
def remote_failstring(self):
raise "string exceptions are annoying"
class TargetWithoutInterfaces(Target):
# undeclare the RIMyTarget interface
implementsOnly(implementedBy(Referenceable))
class BrokenTarget(Referenceable):
implements(RIMyTarget)
def remote_add(self, a, b):
return "error"
class IFoo(Interface):
# non-remote Interface
pass
class Foo(Referenceable):
implements(IFoo)
class RIDummy(RemoteInterface):
pass
class RITypes(RemoteInterface):
def returns_none(work=bool): return None
def takes_remoteinterface(a=RIDummy): return str
def returns_remoteinterface(work=int): return RIDummy
def takes_interface(a=IFoo): return str
def returns_interface(work=bool): return IFoo
class DummyTarget(Referenceable):
implements(RIDummy)
class TypesTarget(Referenceable):
implements(RITypes)
def remote_returns_none(self, work):
if work:
return None
return "not None"
def remote_takes_remoteinterface(self, a):
# TODO: really, I want to just be able to say:
# if RIDummy.providedBy(a):
iface = a.tracker.interface
if iface and iface == RIDummy:
return "good"
raise RuntimeError("my argument (%s) should provide RIDummy, "
"but doesn't" % a)
def remote_returns_remoteinterface(self, work):
if work == 1:
return DummyTarget()
if work == -1:
return TypesTarget()
return 15
def remote_takes_interface(self, a):
if IFoo.providedBy(a):
return "good"
raise RuntimeError("my argument (%s) should provide IFoo, but doesn't" % a)
def remote_returns_interface(self, work):
if work:
return Foo()
return "not implementor of IFoo"
class ShouldFailMixin:
def shouldFail(self, expected_failure, which, substring,
callable, *args, **kwargs):
assert substring is None or isinstance(substring, str)
d = defer.maybeDeferred(callable, *args, **kwargs)
def done(res):
if isinstance(res, failure.Failure):
if not res.check(expected_failure):
self.fail("got failure %s, was expecting %s"
% (res, expected_failure))
if substring:
self.failUnless(substring in str(res),
"%s: substring '%s' not in '%s'"
% (which, substring, str(res)))
# make the Failure available to a subsequent callback, but
# keep it from triggering an errback
return [res]
else:
self.fail("%s was supposed to raise %s, not get '%s'" %
(which, expected_failure, res))
d.addBoth(done)
return d
tubid_low = "3hemthez7rvgvyhjx2n5kdj7mcyar3yt"
certData_low = \
"""-----BEGIN CERTIFICATE-----
MIIBnjCCAQcCAgCEMA0GCSqGSIb3DQEBBAUAMBcxFTATBgNVBAMUDG5ld3BiX3Ro
aW5neTAeFw0wNjExMjYxODUxMTBaFw0wNzExMjYxODUxMTBaMBcxFTATBgNVBAMU
DG5ld3BiX3RoaW5neTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA1DuK9NoF
fiSreA8rVqYPAjNiUqFelAAYPgnJR92Jry1J/dPA3ieNcCazbjVeKUFjd6+C30XR
APhajsAJFiJdnmgrtVILNrpZDC/vISKQoAmoT9hP/cMqFm8vmUG/+AXO76q63vfH
UmabBVDNTlM8FJpbm9M26cFMrH45G840gA0CAwEAATANBgkqhkiG9w0BAQQFAAOB
gQBCtjgBbF/s4w/16Y15lkTAO0xt8ZbtrvcsFPGTXeporonejnNaJ/aDbJt8Y6nY
ypJ4+LTT3UQwwvqX5xEuJmFhmXGsghRGypbU7Zxw6QZRppBRqz8xMS+y82mMZRQp
ezP+BiTvnoWXzDEP1233oYuELVgOVnHsj+rC017Ykfd7fw==
-----END CERTIFICATE-----
-----BEGIN RSA PRIVATE KEY-----
MIICXQIBAAKBgQDUO4r02gV+JKt4DytWpg8CM2JSoV6UABg+CclH3YmvLUn908De
J41wJrNuNV4pQWN3r4LfRdEA+FqOwAkWIl2eaCu1Ugs2ulkML+8hIpCgCahP2E/9
wyoWby+ZQb/4Bc7vqrre98dSZpsFUM1OUzwUmlub0zbpwUysfjkbzjSADQIDAQAB
AoGBAIvxTykw8dpBt8cMyZjzGoZq93Rg74pLnbCap1x52iXmiRmUHWLfVcYT3tDW
4+X0NfBfjL5IvQ4UtTHXsqYjtvJfXWazYYa4INv5wKDBCd5a7s1YQ8R7mnhlBbRd
nqZ6RpGuQbd3gTGZCkUdbHPSqdCPAjryH9mtWoQZIepcIcoJAkEA77gjO+MPID6v
K6lf8SuFXHDOpaNOAiMlxVnmyQYQoF0PRVSpKOQf83An7R0S/jN3C7eZ6fPbZcyK
SFVktHhYwwJBAOKlgndbSkVzkQCMcuErGZT1AxHNNHSaDo8X3C47UbP3nf60SkxI
boqmpuPvEPUB9iPQdiNZGDU04+FUhe5Vtu8CQHDQHXS/hIzOMy2/BfG/Y4F/bSCy
W7HRzKK1jlCoVAbEBL3B++HMieTMsV17Q0bx/WI8Q2jAZE3iFmm4Fi6APHUCQCMi
5Yb7cBg0QlaDb4vY0q51DXTFC0zIVVl5qXjBWXk8+hFygdIxqHF2RIkxlr9k/nOu
7aGtPkOBX5KfN+QrBaECQQCltPE9YjFoqPezfyvGZoWAKb8bWzo958U3uVBnCw2f
Fs8AQDgI/9gOUXxXno51xQSdCnJLQJ8lThRUa6M7/F1B
-----END RSA PRIVATE KEY-----
"""
tubid_high = "6cxxohyb5ysw6ftpwprbzffxrghbfopm"
certData_high = \
"""-----BEGIN CERTIFICATE-----
MIIBnjCCAQcCAgCEMA0GCSqGSIb3DQEBBAUAMBcxFTATBgNVBAMUDG5ld3BiX3Ro
aW5neTAeFw0wNjExMjYxODUxNDFaFw0wNzExMjYxODUxNDFaMBcxFTATBgNVBAMU
DG5ld3BiX3RoaW5neTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEArfrebvt3
8FE3kKoscY2J/8A4J6CUUUiM7/gl00UvGvvjfdaWbsj4w0o8W2tE0X8Zce3dScSl
D6qVXy6AEc4Flqs0q02w9uNzcdDY6LF3NiK0Lq+JP4OjJeImUBe8wUU0RQxqf/oA
GhgHEZhTp6aAdxBXZFOVDloiW6iqrKH/thcCAwEAATANBgkqhkiG9w0BAQQFAAOB
gQBXi+edp3iz07wxcRztvXtTAjY/9gUwlfa6qSTg/cGqbF0OPa+sISBOFRnnC8qM
ENexlkpiiD4Oyj+UtO5g2CMz0E62cTJTqz6PfexnmKIGwYjq5wZ2tzOrB9AmAzLv
TQQ9CdcKBXLd2GCToh8hBvjyyFwj+yTSbq+VKLMFkBY8Rg==
-----END CERTIFICATE-----
-----BEGIN RSA PRIVATE KEY-----
MIICXgIBAAKBgQCt+t5u+3fwUTeQqixxjYn/wDgnoJRRSIzv+CXTRS8a++N91pZu
yPjDSjxba0TRfxlx7d1JxKUPqpVfLoARzgWWqzSrTbD243Nx0NjosXc2IrQur4k/
g6Ml4iZQF7zBRTRFDGp/+gAaGAcRmFOnpoB3EFdkU5UOWiJbqKqsof+2FwIDAQAB
AoGBAKrU3Vp+Y2u+Y+ARqKgrQai1tq36eAhEQ9dRgtqrYTCOyvcCIR5RCirAFvnx
H1bSBUsgNBw+EZGLfzZBs5FICaUjBOQYBYzfxux6+jlGvdl7idfHs7zogyEYBqye
0VkwzZ0mVXM2ujOD/z/ANkdEn2fGj/VwAYDlfvlyNZMckHp5AkEA5sc1VG3snWmG
lz4967MMzJ7XNpZcTvLEspjpH7hFbnXUHIQ4wPYOP7dhnVvKX1FiOQ8+zXVYDDGB
SK1ABzpc+wJBAMD+imwAhHNBbOb3cPYzOz6XRZaetvep3GfE2wKr1HXP8wchNXWj
Ijq6fJinwPlDugHaeNnfb+Dydd+YEiDTSJUCQDGCk2Jlotmyhfl0lPw4EYrkmO9R
GsSlOKXIQFtZwSuNg9AKXdKn9y6cPQjxZF1GrHfpWWPixNz40e+xm4bxcnkCQQCs
+zkspqYQ/CJVPpHkSnUem83GvAl5IKmp5Nr8oPD0i+fjixN0ljyW8RG+bhXcFaVC
BgTuG4QW1ptqRs5w14+lAkEAuAisTPUDsoUczywyoBbcFo3SVpFPNeumEXrj4MD/
uP+TxgBi/hNYaR18mTbKD4mzVSjqyEeRC/emV3xUpUrdqg==
-----END RSA PRIVATE KEY-----
"""
class BaseMixin(ShouldFailMixin):
def setUp(self):
self.connections = []
self.servers = []
self.services = []
def tearDown(self):
for c in self.connections:
if c.transport:
c.transport.loseConnection()
dl = []
for s in self.servers:
dl.append(defer.maybeDeferred(s.stopListening))
for s in self.services:
dl.append(defer.maybeDeferred(s.stopService))
d = defer.DeferredList(dl)
d.addCallback(flushEventualQueue)
return d
def stall(self, res, timeout):
d = defer.Deferred()
reactor.callLater(timeout, d.callback, res)
return d
def insert_turns(self, res, count):
d = eventual.fireEventually(res)
for i in range(count-1):
d.addCallback(eventual.fireEventually)
return d
def makeServer(self, options={}, listenerOptions={}):
self.tub = tub = Tub(_test_options=options)
tub.startService()
self.services.append(tub)
portnum = allocate_tcp_port()
tub.listenOn("tcp:%d:interface=127.0.0.1" % portnum,
_test_options=listenerOptions)
tub.setLocation("127.0.0.1:%d" % portnum)
self.target = Target()
return tub.registerReference(self.target), portnum
def makeSpecificServer(self, certData,
negotiationClass=negotiate.Negotiation):
self.tub = tub = Tub(certData=certData)
tub.negotiationClass = negotiationClass
tub.startService()
self.services.append(tub)
portnum = allocate_tcp_port()
tub.listenOn("tcp:%d:interface=127.0.0.1" % portnum)
tub.setLocation("127.0.0.1:%d" % portnum)
self.target = Target()
return tub.registerReference(self.target), portnum
def createSpecificServer(self, certData,
negotiationClass=negotiate.Negotiation):
tub = Tub(certData=certData)
tub.negotiationClass = negotiationClass
tub.startService()
self.services.append(tub)
portnum = allocate_tcp_port()
tub.listenOn("tcp:%d:interface=127.0.0.1" % portnum)
tub.setLocation("127.0.0.1:%d" % portnum)
target = Target()
return tub, target, tub.registerReference(target), portnum
def makeNullServer(self):
f = protocol.Factory()
f.protocol = protocol.Protocol # discards everything
s = internet.TCPServer(0, f)
s.startService()
self.services.append(s)
portnum = s._port.getHost().port
return portnum
def makeHTTPServer(self):
try:
from twisted.web import server, resource, static
except ImportError:
raise unittest.SkipTest('this test needs twisted.web')
root = resource.Resource()
root.putChild("", static.Data("hello\n", "text/plain"))
s = internet.TCPServer(0, server.Site(root))
s.startService()
self.services.append(s)
portnum = s._port.getHost().port
return portnum
def connectClient(self, portnum):
tub = Tub()
tub.startService()
self.services.append(tub)
d = tub.getReference("pb://127.0.0.1:%d/hello" % portnum)
return d
def connectHTTPClient(self, portnum):
return getPage("http://127.0.0.1:%d/foo" % portnum)
class MakeTubsMixin:
def makeTubs(self, numTubs, mangleLocation=None):
self.services = []
self.tub_ports = []
for i in range(numTubs):
t = Tub()
t.startService()
self.services.append(t)
portnum = allocate_tcp_port()
self.tub_ports.append(portnum)
t.listenOn("tcp:%d:interface=127.0.0.1" % portnum)
location = "127.0.0.1:%d" % portnum
if mangleLocation:
location = mangleLocation(portnum)
t.setLocation(location)
return self.services
|
david415/foolscap
|
src/foolscap/test/common.py
|
Python
|
mit
| 20,624
|
from inputs import SIXTEENTH
import operator
import functools
sues = SIXTEENTH.input
possibles = []
possibles2 = []
for id,att in sues.items():
if 'children' in att and not att['children'] == 3:
continue
if 'cats' in att and not att['cats'] == 7:
continue
if 'samoyeds' in att and not att['samoyeds'] == 2:
continue
if 'pomeranians' in att and not att['pomeranians'] == 3:
continue
if 'akitas' in att and not att['akitas'] == 0:
continue
if 'vizslas' in att and not att['vizslas'] == 0:
continue
if 'goldfish' in att and not att['goldfish'] == 5:
continue
if 'trees' in att and not att['trees'] == 3:
continue
if 'cars' in att and not att['cars'] == 2:
continue
if 'perfumes' in att and not att['perfumes'] == 1:
continue
possibles.append(id)
for id,att in sues.items():
if 'children' in att and not att['children'] == 3:
continue
if 'cats' in att and not att['cats'] > 7:
continue
if 'samoyeds' in att and not att['samoyeds'] == 2:
continue
if 'pomeranians' in att and not att['pomeranians'] < 3:
continue
if 'akitas' in att and not att['akitas'] == 0:
continue
if 'vizslas' in att and not att['vizslas'] == 0:
continue
if 'goldfish' in att and not att['goldfish'] < 5:
continue
if 'trees' in att and not att['trees'] > 3:
continue
if 'cars' in att and not att['cars'] == 2:
continue
if 'perfumes' in att and not att['perfumes'] == 1:
continue
possibles2.append(id)
print possibles
print possibles2
|
lyrixderaven/AdventOfCode
|
16.advent.py
|
Python
|
gpl-2.0
| 1,736
|
from ._filesdb import *
|
stilley2/filesdb
|
filesdb/__init__.py
|
Python
|
mit
| 24
|
#!/usr/bin/env python2
# -*- coding: UTF-8 -*-
# File: run-and-draw-last.py
# Date: Thu Sep 18 15:43:47 2014 -0700
import matplotlib.pyplot as plt
from scipy.misc import imread, imsave
import numpy as np
import os, sys
import glob
from copy import copy
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), '../')))
from lib.imageutil import stack_vectors
from network_runner import get_nn
if len(sys.argv) < 3:
print "Usage: {0} <model> <input images>".format(sys.argv[0])
sys.exit()
def draw(vec, ofname):
""" draw a vector in dots or lines, also save the vector
ofname: output filename
"""
fig = plt.figure(figsize = (38, 2))
plt.plot(range(len(vec)), vec,'bo')
fig.savefig(ofname)
# also save the vector
fname = ofname[:-3] + 'txt'
with open(fname, 'w') as f:
f.write(repr(vec))
fig = plt.figure()
def gen_file_list():
""" generate image filenames from arguments given in the command line"""
for k in range(2, len(sys.argv)):
pattern = sys.argv[k]
for f in glob.glob(pattern):
if os.path.isfile(f):
yield f
# We have already saved the learned parameters in sys.argv[1]
# build nn with params
model_file = sys.argv[1]
# get a network from the saved file
nn = get_nn(model_file)
print "Running network with model {0}".format(model_file)
# get the weight of the digit '3' at the second position
prms = nn.nn.layers[-1].get_params()['Ws'][2][:,3]
# save the weight in all_vecs, to draw together with another vector later
all_vecs = [prms]
draw(prms, './weight-secondposition-3.png')
for idx, f in enumerate(gen_file_list()):
print "Running {0}...".format(f)
# network accepts images ranging from [0, 1]
img = imread(f) / 255.0
# run the network against the image
results = nn.run(img)
pred = nn.predict(img)
print "Predict: ", pred
#print [results[-1][k].shape for k in range(len(results[-1]))]
outdir = os.path.dirname(f) + '/vec'
try:
os.mkdir(outdir)
except:
pass
# get the representation after the last hidden layer, which is [-2]
# layer[-1] is the output layer.
hidden_vec = results[-2].reshape((results[-2].shape[1],))
# build filename for output
pred = str(pred[0]) + '-' + ''.join(map(str, pred[1:]))
basename = os.path.basename(f)[:-4]
fname = os.path.join(outdir, basename + '-{0}-vec.jpg'.format(pred))
draw(hidden_vec, fname)
# plot color-graph of weight vector and representation
vecs = copy(all_vecs)
vecs.append(hidden_vec)
img = stack_vectors(vecs)
plt.imshow(img)
plt.savefig(os.path.join(outdir, basename + '-{0}-color.jpg'.format(pred)))
print "Results written to {0}.".format(outdir)
|
mfs6174/Deep6174
|
scripts/run-and-draw-last.py
|
Python
|
apache-2.0
| 2,768
|
#!/usr/bin/env python
#
# Copyright (c) 2005 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import getopt
import sys
filenames = sys.argv[1:]
if not filenames:
print """Usage: memlogs.py file [...]
Summarizes the --debug=memory numbers from one or more build logs.
"""
sys.exit(0)
fmt = "%12s %12s %12s %12s %s"
print fmt % ("pre-read", "post-read", "pre-build", "post-build", "")
for fname in sys.argv[1:]:
lines = [l for l in open(fname).readlines() if l[:7] == 'Memory ']
t = tuple([l.split()[-1] for l in lines]) + (fname,)
print fmt % t
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
azverkan/scons
|
bin/memlogs.py
|
Python
|
mit
| 1,715
|
class Stock():
def __init__(self, name, symbol, prices=[]):
self.name = name
self.symbol = symbol
self.prices = prices
def high_price(self):
if len(self.prices) == 0:
return 'MISSING PRICES'
return max(self.prices)
apple = Stock('Apple', 'APPL', [500.43, 570.60])
print apple.high_price()
|
schmit/intro-python-course
|
lectures/code/classes_stocks.py
|
Python
|
mit
| 351
|
import os
import sys
import active
import operator
from os import listdir
## Author: James Earle.
##
## This script parses the output files from 20 separate GPP runs
## and creates a file, 'sorted_inds.txt' displaying all the runs
## ordered by fitness in terms of their performance on testing data.
## We don't want to graph all of them, so we will not automate that
## process. Rather, we'd like to hand pick the best, and then 1-2
## that are average.
# Init an empty dictionary
best_of_run_inds = {}
outHourDir = active.listActiveDirectory()
# Append run number to the beginning of this pattern
pattern = "_best_ind_tree.txt"
# List and search through all the files.
allRunFiles = listdir(active.listActiveDirectory())
# Iterate the 20 necessary files
for x in range(0, 20):
patternWithRunNumber = str(x) + pattern
if((patternWithRunNumber) in allRunFiles):
with open(outHourDir + "/" + patternWithRunNumber, 'r') as file:
std_test_fitness = file.readline() # it is the first line in the file
# Add a float representation to the dictionary. Use string slicing.
std_test_fitness = float(std_test_fitness[5:len(std_test_fitness)-1])
best_of_run_inds[patternWithRunNumber] = std_test_fitness;
# Python cannot sort Dictionaries, as they are inherently unsorted.
# To work around this, the below code returns a list of sorted tuples.
best_of_run_inds_sorted = sorted(best_of_run_inds.items(), key=operator.itemgetter(1))
# Write the results out to the new file in a human-readable manner.
with open(outHourDir + "/sorted_inds.txt", "w") as out:
for x in range(0, len(best_of_run_inds_sorted)):
out.write(str(best_of_run_inds_sorted[x]) + "\n")
|
JamesEarle/PythonProjects
|
GPP Scripts/sort_runs.py
|
Python
|
mit
| 1,733
|
##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
import GafferScene
Gaffer.Metadata.registerNode(
GafferScene.SceneSwitch,
"description",
"""
Chooses between multiple input scene, passing through the
chosen input to the output.
""",
plugs = {
"index" : [
"description",
"""
The index of the input which is passed through. A value
of 0 chooses the first input, 1 the second and so on. Values
larger than the number of available inputs wrap back around to
the beginning.
"""
]
}
)
GafferUI.PlugValueWidget.registerCreator( GafferScene.SceneSwitch, "in[0-9]*", None )
|
goddardl/gaffer
|
python/GafferSceneUI/SceneSwitchUI.py
|
Python
|
bsd-3-clause
| 2,393
|
""" Cisco_IOS_XR_snmp_test_trap_act
This module contains a collection of YANG definitions
for Cisco IOS\-XR action package configuration.
Copyright (c) 2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class SnmpColdStartRpc(object):
"""
Generate SNMPv2\-MIB\:\:coldStart
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:snmp-cold-start'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['SnmpColdStartRpc']['meta_info']
class SnmpWarmStartRpc(object):
"""
Generate SNMPv2\-MIB\:\:warmStart
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:snmp-warm-start'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['SnmpWarmStartRpc']['meta_info']
class InterfaceLinkUpRpc(object):
"""
Generate IF\-MIB\:\:linkUp
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.InterfaceLinkUpRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = InterfaceLinkUpRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: ifindex
interface index for which to generate LinkUp trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.ifindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:interface-link-up/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.ifindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InterfaceLinkUpRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:interface-link-up'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InterfaceLinkUpRpc']['meta_info']
class InterfaceLinkDownRpc(object):
"""
Generate IF\-MIB\:\:linkDown
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.InterfaceLinkDownRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = InterfaceLinkDownRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: ifindex
interface index for which to generate LinkDown trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.ifindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:interface-link-down/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.ifindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InterfaceLinkDownRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:interface-link-down'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InterfaceLinkDownRpc']['meta_info']
class SonetSectionStatusRpc(object):
"""
Generate CISCO\-SONET\-MIB\:\:ciscoSonetSectionStatusChange
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.SonetSectionStatusRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = SonetSectionStatusRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: ifindex
interface index for which to generate ciscoSonetSectionStatusChange trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.ifindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:sonet-section-status/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.ifindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['SonetSectionStatusRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:sonet-section-status'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['SonetSectionStatusRpc']['meta_info']
class SonetLineStatusRpc(object):
"""
Generate CISCO\-SONET\-MIB\:\:ciscoSonetLineStatusChange
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.SonetLineStatusRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = SonetLineStatusRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: ifindex
interface index for which to generate ciscoSonetLineStatusChange trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.ifindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:sonet-line-status/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.ifindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['SonetLineStatusRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:sonet-line-status'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['SonetLineStatusRpc']['meta_info']
class SonetPathStatusRpc(object):
"""
Generate CISCO\-SONET\-MIB\:\:ciscoSonetPathStatusChange
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.SonetPathStatusRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = SonetPathStatusRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: ifindex
interface index for which to generate ciscoSonetPathStatusChange trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.ifindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:sonet-path-status/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.ifindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['SonetPathStatusRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:sonet-path-status'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['SonetPathStatusRpc']['meta_info']
class InfraSyslogMessageGeneratedRpc(object):
"""
Generate CISCO\-SYSLOG\-MIB\:\:clogMessageGenerated
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:infra-syslog-message-generated'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InfraSyslogMessageGeneratedRpc']['meta_info']
class InfraFlashDeviceInsertedRpc(object):
"""
Generate CISCO\-FLASH\-MIB\:\:ciscoFlashDeviceInsertedNotif
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:infra-flash-device-inserted'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InfraFlashDeviceInsertedRpc']['meta_info']
class InfraFlashDeviceRemovedRpc(object):
"""
Generate CISCO\-FLASH\-MIB\:\:ciscoFlashDeviceRemovedNotif
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:infra-flash-device-removed'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InfraFlashDeviceRemovedRpc']['meta_info']
class InfraRedundancyProgressionRpc(object):
"""
Generate CISCO\-RF\-MIB\:\:ciscoRFProgressionNotif
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:infra-redundancy-progression'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InfraRedundancyProgressionRpc']['meta_info']
class InfraRedundancySwitchRpc(object):
"""
Generate CISCO\-RF\-MIB\:\:ciscoRFSwactNotif
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:infra-redundancy-switch'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InfraRedundancySwitchRpc']['meta_info']
class InfraBridgeNewRootRpc(object):
"""
Generate BRIDGE\-MIB\:\:newRoot
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:infra-bridge-new-root'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InfraBridgeNewRootRpc']['meta_info']
class InfraBridgeTopologyChangeRpc(object):
"""
Generate BRIDGE\-MIB\:\:topologyChange
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:infra-bridge-topology-change'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InfraBridgeTopologyChangeRpc']['meta_info']
class InfraConfigEventRpc(object):
"""
Generate CISCO\-CONFIG\-MAN\-MIB\:\:ciscoConfigManEvent
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:infra-config-event'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['InfraConfigEventRpc']['meta_info']
class EntitySensorThresholdNotificationRpc(object):
"""
Generate CISCO\-ENTITY\-SENSOR\-MIB\:\:entSensorThresholdNotification
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.EntitySensorThresholdNotificationRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = EntitySensorThresholdNotificationRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: entindex
entity Physical Index for which to generate trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.entindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-sensor-threshold-notification/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.entindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntitySensorThresholdNotificationRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-sensor-threshold-notification'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntitySensorThresholdNotificationRpc']['meta_info']
class EntityFruPowerStatusChangeFailedRpc(object):
"""
oper status changed to failed
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.EntityFruPowerStatusChangeFailedRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = EntityFruPowerStatusChangeFailedRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: entindex
entity Physical Index for which to generate trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.entindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-power-status-change-failed/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.entindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruPowerStatusChangeFailedRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-power-status-change-failed'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruPowerStatusChangeFailedRpc']['meta_info']
class EntityFruModuleStatusChangeUpRpc(object):
"""
fu trap module status changed as ok
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.EntityFruModuleStatusChangeUpRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = EntityFruModuleStatusChangeUpRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: entindex
entity Physical Index for which to generate trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.entindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-module-status-change-up/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.entindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruModuleStatusChangeUpRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-module-status-change-up'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruModuleStatusChangeUpRpc']['meta_info']
class EntityFruModuleStatusChangeDownRpc(object):
"""
fu trap module status changed as failed
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.EntityFruModuleStatusChangeDownRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = EntityFruModuleStatusChangeDownRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: entindex
entity Physical Index for which to generate trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.entindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-module-status-change-down/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.entindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruModuleStatusChangeDownRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-module-status-change-down'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruModuleStatusChangeDownRpc']['meta_info']
class EntityFruFanTrayOperStatusUpRpc(object):
"""
Generate CISCO\-ENTITY\-FRU\-CONTROL\-MIB\:\:cefcFanTrayStatusChange
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.EntityFruFanTrayOperStatusUpRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = EntityFruFanTrayOperStatusUpRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: entindex
entity Physical Index for which to generate trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.entindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-fan-tray-oper-status-up/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.entindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruFanTrayOperStatusUpRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-fan-tray-oper-status-up'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruFanTrayOperStatusUpRpc']['meta_info']
class EntityFruFanTrayInsertedRpc(object):
"""
Generate CISCO\-ENTITY\-FRU\-CONTROL\-MIB\:\:cefcFRUInserted
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.EntityFruFanTrayInsertedRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = EntityFruFanTrayInsertedRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: entindex
entity Physical Index for which to generate trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.entindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-fan-tray-inserted/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.entindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruFanTrayInsertedRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-fan-tray-inserted'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruFanTrayInsertedRpc']['meta_info']
class EntityFruFanTrayRemovedRpc(object):
"""
Generate CISCO\-ENTITY\-FRU\-CONTROL\-MIB\:\:cefcFRURemoved
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.EntityFruFanTrayRemovedRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = EntityFruFanTrayRemovedRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: entindex
entity Physical Index for which to generate trap
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.entindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-fan-tray-removed/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.entindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruFanTrayRemovedRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:entity-fru-fan-tray-removed'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['EntityFruFanTrayRemovedRpc']['meta_info']
class PlatformHfrBundleDownedLinkRpc(object):
"""
Generate CISCO\-FABRIC\-HFR\-MIB\:\:cfhBundleDownedLinkNotification
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.PlatformHfrBundleDownedLinkRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = PlatformHfrBundleDownedLinkRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: bundle_name
bundle name for which to generate the trap
**type**\: str
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.bundle_name = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:platform-hfr-bundle-downed-link/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.bundle_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['PlatformHfrBundleDownedLinkRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:platform-hfr-bundle-downed-link'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['PlatformHfrBundleDownedLinkRpc']['meta_info']
class PlatformHfrBundleStateRpc(object):
"""
Generate CISCO\-FABRIC\-HFR\-MIB\:\:cfhBundleStateNotification
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.PlatformHfrBundleStateRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = PlatformHfrBundleStateRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: bundle_name
bundle name for which to generate the trap
**type**\: str
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.bundle_name = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:platform-hfr-bundle-state/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.bundle_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['PlatformHfrBundleStateRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:platform-hfr-bundle-state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['PlatformHfrBundleStateRpc']['meta_info']
class PlatformHfrPlaneStateRpc(object):
"""
Generate CISCO\-FABRIC\-HFR\-MIB\:\:cfhPlaneStateNotification
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.PlatformHfrPlaneStateRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = PlatformHfrPlaneStateRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: plane_id
plane identifier for which to generate the trap
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.plane_id = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:platform-hfr-plane-state/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.plane_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['PlatformHfrPlaneStateRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:platform-hfr-plane-state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['PlatformHfrPlaneStateRpc']['meta_info']
class RoutingBgpEstablishedRpc(object):
"""
Generate BGP4\-MIB\:\:bglEstablishedNotification
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-bgp-established'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingBgpEstablishedRpc']['meta_info']
class RoutingBgpEstablishedRemotePeerRpc(object):
"""
Generate BGP4\-MIB\:\:bglEstablishedNotification remote peer
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.RoutingBgpEstablishedRemotePeerRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = RoutingBgpEstablishedRemotePeerRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: address
BGP remote peer IP address for which to generate the trap
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.address = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-bgp-established-remote-peer/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingBgpEstablishedRemotePeerRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-bgp-established-remote-peer'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingBgpEstablishedRemotePeerRpc']['meta_info']
class RoutingBgpStateChangeRpc(object):
"""
Generate CISCO\-BGP\-MIB\:\:cbgpBackwardTransition
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-bgp-state-change'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingBgpStateChangeRpc']['meta_info']
class RoutingBgpStateChangeRemotePeerRpc(object):
"""
Generate CISCO\-BGP\-MIB\:\:cbgpBackwardTransition remote peer
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.RoutingBgpStateChangeRemotePeerRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = RoutingBgpStateChangeRemotePeerRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: address
BGP remote peer IP address for which to generate the trap
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.address = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-bgp-state-change-remote-peer/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingBgpStateChangeRemotePeerRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-bgp-state-change-remote-peer'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingBgpStateChangeRemotePeerRpc']['meta_info']
class RoutingOspfNeighborStateChangeRpc(object):
"""
Generate OSPF\-TRAP\-MIB\:\:ospfNbrStateChange
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-ospf-neighbor-state-change'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingOspfNeighborStateChangeRpc']['meta_info']
class RoutingOspfNeighborStateChangeAddressRpc(object):
"""
Generate OSPF\-TRAP\-MIB\:\:ospfNbrStateChange address
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.RoutingOspfNeighborStateChangeAddressRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = RoutingOspfNeighborStateChangeAddressRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: address
neighbor's IP source address for which to generate the trap
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
.. attribute:: ifindex
0 for interfaces having IP addresses or IF\-MIB ifindex of addressless interface
**type**\: int
**range:** 0..2147483647
**mandatory**\: True
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.address = None
self.ifindex = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-ospf-neighbor-state-change-address/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.address is not None:
return True
if self.ifindex is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingOspfNeighborStateChangeAddressRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-ospf-neighbor-state-change-address'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingOspfNeighborStateChangeAddressRpc']['meta_info']
class RoutingMplsLdpSessionDownRpc(object):
"""
Generate MPLS\-LDP\-STD\-MIB\:\:mplsLdpSessionDown
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-ldp-session-down'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsLdpSessionDownRpc']['meta_info']
class RoutingMplsLdpSessionDownEntityIdRpc(object):
"""
Generate MPLS\-LDP\-STD\-MIB\:\:mplsLdpSessionDown entity\-id
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.RoutingMplsLdpSessionDownEntityIdRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = RoutingMplsLdpSessionDownEntityIdRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: entity_id
entity ldp\-id in x.x.x.x.y.y format where x.x.x.x is the entity IP address and y.y is the label space
**type**\: str
**length:** 23
**mandatory**\: True
.. attribute:: entity_index
entity index for which to generate the trap
**type**\: int
**range:** 1..4294967295
**mandatory**\: True
.. attribute:: peer_id
peer ldp\-id in x.x.x.x.y.y format where x.x.x.x is the entity IP address and y.y is the label space
**type**\: str
**length:** 23
**mandatory**\: True
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.entity_id = None
self.entity_index = None
self.peer_id = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-ldp-session-down-entity-id/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.entity_id is not None:
return True
if self.entity_index is not None:
return True
if self.peer_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsLdpSessionDownEntityIdRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-ldp-session-down-entity-id'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsLdpSessionDownEntityIdRpc']['meta_info']
class RoutingMplsTunnelReRoutedRpc(object):
"""
Generate MPLS\-TE\-STD\-MIB\:\:mplsTunnelRerouted
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-tunnel-re-routed'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsTunnelReRoutedRpc']['meta_info']
class RoutingMplsTunnelReRoutedIndexRpc(object):
"""
Generate MPLS\-TE\-STD\-MIB\:\:mplsTunnelRerouted index
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.RoutingMplsTunnelReRoutedIndexRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = RoutingMplsTunnelReRoutedIndexRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: destination
destination address for which to generate the trap
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
.. attribute:: index
tunnel index for which to generate the trap
**type**\: int
**range:** 0..65535
**mandatory**\: True
.. attribute:: instance
tunnel instance for which to generate the trap
**type**\: int
**range:** 0..65535
**mandatory**\: True
.. attribute:: source
source address for which to generate the trap
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.destination = None
self.index = None
self.instance = None
self.source = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-tunnel-re-routed-index/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.destination is not None:
return True
if self.index is not None:
return True
if self.instance is not None:
return True
if self.source is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsTunnelReRoutedIndexRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-tunnel-re-routed-index'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsTunnelReRoutedIndexRpc']['meta_info']
class RoutingMplsTunnelReOptimizedRpc(object):
"""
Generate MPLS\-TE\-STD\-MIB\:\:mplsTunnelReoptimized
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-tunnel-re-optimized'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsTunnelReOptimizedRpc']['meta_info']
class RoutingMplsTunnelReOptimizedIndexRpc(object):
"""
Generate MPLS\-TE\-STD\-MIB\:\:mplsTunnelReoptimized index
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.RoutingMplsTunnelReOptimizedIndexRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = RoutingMplsTunnelReOptimizedIndexRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: destination
destination address for which to generate the trap
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
.. attribute:: index
tunnel index for which to generate the trap
**type**\: int
**range:** 0..65535
**mandatory**\: True
.. attribute:: instance
tunnel instance for which to generate the trap
**type**\: int
**range:** 0..65535
**mandatory**\: True
.. attribute:: source
source address for which to generate the trap
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.destination = None
self.index = None
self.instance = None
self.source = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-tunnel-re-optimized-index/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.destination is not None:
return True
if self.index is not None:
return True
if self.instance is not None:
return True
if self.source is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsTunnelReOptimizedIndexRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-tunnel-re-optimized-index'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsTunnelReOptimizedIndexRpc']['meta_info']
class RoutingMplsTunnelDownRpc(object):
"""
Generate MPLS\-TE\-STD\-MIB\:\:mplsTunnelDown
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-tunnel-down'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsTunnelDownRpc']['meta_info']
class RoutingMplsTunnelDownIndexRpc(object):
"""
Generate MPLS\-TE\-STD\-MIB\:\:mplsTunnelDown index
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_snmp_test_trap_act.RoutingMplsTunnelDownIndexRpc.Input>`
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.input = RoutingMplsTunnelDownIndexRpc.Input()
self.input.parent = self
self.is_rpc = True
class Input(object):
"""
.. attribute:: destination
destination address for which to generate the trap
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
.. attribute:: index
tunnel index for which to generate the trap
**type**\: int
**range:** 0..65535
**mandatory**\: True
.. attribute:: instance
tunnel instance for which to generate the trap
**type**\: int
**range:** 0..65535
**mandatory**\: True
.. attribute:: source
src address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.parent = None
self.destination = None
self.index = None
self.instance = None
self.source = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-tunnel-down-index/Cisco-IOS-XR-snmp-test-trap-act:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
if self.parent is None:
raise YPYError('Parent reference is needed to determine if entity has configuration data')
return self.parent.is_config()
def _has_data(self):
if self.destination is not None:
return True
if self.index is not None:
return True
if self.instance is not None:
return True
if self.source is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsTunnelDownIndexRpc.Input']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:routing-mpls-tunnel-down-index'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.input is not None and self.input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['RoutingMplsTunnelDownIndexRpc']['meta_info']
class AllRpc(object):
"""
generate all the supported traps
"""
_prefix = 'snmp-test-trap-act'
_revision = '2016-10-25'
def __init__(self):
self.is_rpc = True
@property
def _common_path(self):
return '/Cisco-IOS-XR-snmp-test-trap-act:all'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_snmp_test_trap_act as meta
return meta._meta_table['AllRpc']['meta_info']
|
111pontes/ydk-py
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_snmp_test_trap_act.py
|
Python
|
apache-2.0
| 71,452
|
# coding=utf-8
from __future__ import absolute_import
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import logging
from flask import request, jsonify, make_response
from werkzeug.exceptions import BadRequest
from octoprint.events import eventManager, Events
from octoprint.settings import settings
from octoprint.printer import get_connection_options
from octoprint.server import admin_permission
from octoprint.server.api import api
from octoprint.server.util.flask import restricted_access
import octoprint.plugin
import octoprint.util
#~~ settings
@api.route("/settings", methods=["GET"])
def getSettings():
logger = logging.getLogger(__name__)
s = settings()
connectionOptions = get_connection_options()
data = {
"api": {
"enabled": s.getBoolean(["api", "enabled"]),
"key": s.get(["api", "key"]) if admin_permission.can() else "n/a",
"allowCrossOrigin": s.get(["api", "allowCrossOrigin"])
},
"appearance": {
"name": s.get(["appearance", "name"]),
"color": s.get(["appearance", "color"]),
"colorTransparent": s.getBoolean(["appearance", "colorTransparent"]),
"defaultLanguage": s.get(["appearance", "defaultLanguage"])
},
"printer": {
"defaultExtrusionLength": s.getInt(["printerParameters", "defaultExtrusionLength"])
},
"webcam": {
"streamUrl": s.get(["webcam", "stream"]),
"snapshotUrl": s.get(["webcam", "snapshot"]),
"ffmpegPath": s.get(["webcam", "ffmpeg"]),
"bitrate": s.get(["webcam", "bitrate"]),
"ffmpegThreads": s.get(["webcam", "ffmpegThreads"]),
"watermark": s.getBoolean(["webcam", "watermark"]),
"flipH": s.getBoolean(["webcam", "flipH"]),
"flipV": s.getBoolean(["webcam", "flipV"]),
"rotate90": s.getBoolean(["webcam", "rotate90"])
},
"feature": {
"gcodeViewer": s.getBoolean(["gcodeViewer", "enabled"]),
"temperatureGraph": s.getBoolean(["feature", "temperatureGraph"]),
"waitForStart": s.getBoolean(["feature", "waitForStartOnConnect"]),
"alwaysSendChecksum": s.getBoolean(["feature", "alwaysSendChecksum"]),
"sdSupport": s.getBoolean(["feature", "sdSupport"]),
"sdAlwaysAvailable": s.getBoolean(["feature", "sdAlwaysAvailable"]),
"swallowOkAfterResend": s.getBoolean(["feature", "swallowOkAfterResend"]),
"repetierTargetTemp": s.getBoolean(["feature", "repetierTargetTemp"]),
"externalHeatupDetection": s.getBoolean(["feature", "externalHeatupDetection"]),
"keyboardControl": s.getBoolean(["feature", "keyboardControl"]),
"pollWatched": s.getBoolean(["feature", "pollWatched"])
},
"serial": {
"port": connectionOptions["portPreference"],
"baudrate": connectionOptions["baudratePreference"],
"portOptions": connectionOptions["ports"],
"baudrateOptions": connectionOptions["baudrates"],
"autoconnect": s.getBoolean(["serial", "autoconnect"]),
"timeoutConnection": s.getFloat(["serial", "timeout", "connection"]),
"timeoutDetection": s.getFloat(["serial", "timeout", "detection"]),
"timeoutCommunication": s.getFloat(["serial", "timeout", "communication"]),
"timeoutTemperature": s.getFloat(["serial", "timeout", "temperature"]),
"timeoutSdStatus": s.getFloat(["serial", "timeout", "sdStatus"]),
"log": s.getBoolean(["serial", "log"]),
"additionalPorts": s.get(["serial", "additionalPorts"]),
"additionalBaudrates": s.get(["serial", "additionalBaudrates"]),
"longRunningCommands": s.get(["serial", "longRunningCommands"]),
"checksumRequiringCommands": s.get(["serial", "checksumRequiringCommands"]),
"helloCommand": s.get(["serial", "helloCommand"])
},
"folder": {
"uploads": s.getBaseFolder("uploads"),
"timelapse": s.getBaseFolder("timelapse"),
"timelapseTmp": s.getBaseFolder("timelapse_tmp"),
"logs": s.getBaseFolder("logs"),
"watched": s.getBaseFolder("watched")
},
"temperature": {
"profiles": s.get(["temperature", "profiles"]),
"cutoff": s.getInt(["temperature", "cutoff"])
},
"system": {
"actions": s.get(["system", "actions"]),
"events": s.get(["system", "events"])
},
"terminalFilters": s.get(["terminalFilters"]),
"scripts": {
"gcode": {
"afterPrinterConnected": None,
"beforePrinterDisconnected": None,
"beforePrintStarted": None,
"afterPrintCancelled": None,
"afterPrintDone": None,
"beforePrintPaused": None,
"afterPrintResumed": None,
"snippets": dict()
}
},
"server": {
"commands": {
"systemShutdownCommand": s.get(["server", "commands", "systemShutdownCommand"]),
"systemRestartCommand": s.get(["server", "commands", "systemRestartCommand"]),
"serverRestartCommand": s.get(["server", "commands", "serverRestartCommand"])
}
}
}
gcode_scripts = s.listScripts("gcode")
if gcode_scripts:
data["scripts"] = dict(gcode=dict())
for name in gcode_scripts:
data["scripts"]["gcode"][name] = s.loadScript("gcode", name, source=True)
def process_plugin_result(name, result):
if result:
try:
jsonify(test=result)
except:
logger.exception("Error while jsonifying settings from plugin {}, please contact the plugin author about this".format(name))
if not "plugins" in data:
data["plugins"] = dict()
if "__enabled" in result:
del result["__enabled"]
data["plugins"][name] = result
for plugin in octoprint.plugin.plugin_manager().get_implementations(octoprint.plugin.SettingsPlugin):
try:
result = plugin.on_settings_load()
process_plugin_result(plugin._identifier, result)
except TypeError:
logger.warn("Could not load settings for plugin {name} ({version}) since it called super(...)".format(name=plugin._plugin_name, version=plugin._plugin_version))
logger.warn("in a way which has issues due to OctoPrint's dynamic reloading after plugin operations.")
logger.warn("Please contact the plugin's author and ask to update the plugin to use a direct call like")
logger.warn("octoprint.plugin.SettingsPlugin.on_settings_load(self) instead.")
except:
logger.exception("Could not load settings for plugin {name} ({version})".format(version=plugin._plugin_version, name=plugin._plugin_name))
return jsonify(data)
@api.route("/settings", methods=["POST"])
@restricted_access
@admin_permission.require(403)
def setSettings():
if not "application/json" in request.headers["Content-Type"]:
return make_response("Expected content-type JSON", 400)
try:
data = request.json
except BadRequest:
return make_response("Malformed JSON body in request", 400)
_saveSettings(data)
return getSettings()
def _saveSettings(data):
logger = logging.getLogger(__name__)
s = settings()
if "api" in data.keys():
if "enabled" in data["api"].keys(): s.setBoolean(["api", "enabled"], data["api"]["enabled"])
if "key" in data["api"].keys(): s.set(["api", "key"], data["api"]["key"], True)
if "allowCrossOrigin" in data["api"].keys(): s.setBoolean(["api", "allowCrossOrigin"], data["api"]["allowCrossOrigin"])
if "appearance" in data.keys():
if "name" in data["appearance"].keys(): s.set(["appearance", "name"], data["appearance"]["name"])
if "color" in data["appearance"].keys(): s.set(["appearance", "color"], data["appearance"]["color"])
if "colorTransparent" in data["appearance"].keys(): s.setBoolean(["appearance", "colorTransparent"], data["appearance"]["colorTransparent"])
if "defaultLanguage" in data["appearance"]: s.set(["appearance", "defaultLanguage"], data["appearance"]["defaultLanguage"])
if "printer" in data.keys():
if "defaultExtrusionLength" in data["printer"]: s.setInt(["printerParameters", "defaultExtrusionLength"], data["printer"]["defaultExtrusionLength"])
if "webcam" in data.keys():
if "streamUrl" in data["webcam"].keys(): s.set(["webcam", "stream"], data["webcam"]["streamUrl"])
if "snapshotUrl" in data["webcam"].keys(): s.set(["webcam", "snapshot"], data["webcam"]["snapshotUrl"])
if "ffmpegPath" in data["webcam"].keys(): s.set(["webcam", "ffmpeg"], data["webcam"]["ffmpegPath"])
if "bitrate" in data["webcam"].keys(): s.set(["webcam", "bitrate"], data["webcam"]["bitrate"])
if "ffmpegThreads" in data["webcam"].keys(): s.setInt(["webcam", "ffmpegThreads"], data["webcam"]["ffmpegThreads"])
if "watermark" in data["webcam"].keys(): s.setBoolean(["webcam", "watermark"], data["webcam"]["watermark"])
if "flipH" in data["webcam"].keys(): s.setBoolean(["webcam", "flipH"], data["webcam"]["flipH"])
if "flipV" in data["webcam"].keys(): s.setBoolean(["webcam", "flipV"], data["webcam"]["flipV"])
if "rotate90" in data["webcam"].keys(): s.setBoolean(["webcam", "rotate90"], data["webcam"]["rotate90"])
if "feature" in data.keys():
if "gcodeViewer" in data["feature"].keys(): s.setBoolean(["gcodeViewer", "enabled"], data["feature"]["gcodeViewer"])
if "temperatureGraph" in data["feature"].keys(): s.setBoolean(["feature", "temperatureGraph"], data["feature"]["temperatureGraph"])
if "waitForStart" in data["feature"].keys(): s.setBoolean(["feature", "waitForStartOnConnect"], data["feature"]["waitForStart"])
if "alwaysSendChecksum" in data["feature"].keys(): s.setBoolean(["feature", "alwaysSendChecksum"], data["feature"]["alwaysSendChecksum"])
if "sdSupport" in data["feature"].keys(): s.setBoolean(["feature", "sdSupport"], data["feature"]["sdSupport"])
if "sdAlwaysAvailable" in data["feature"].keys(): s.setBoolean(["feature", "sdAlwaysAvailable"], data["feature"]["sdAlwaysAvailable"])
if "swallowOkAfterResend" in data["feature"].keys(): s.setBoolean(["feature", "swallowOkAfterResend"], data["feature"]["swallowOkAfterResend"])
if "repetierTargetTemp" in data["feature"].keys(): s.setBoolean(["feature", "repetierTargetTemp"], data["feature"]["repetierTargetTemp"])
if "externalHeatupDetection" in data["feature"].keys(): s.setBoolean(["feature", "externalHeatupDetection"], data["feature"]["externalHeatupDetection"])
if "keyboardControl" in data["feature"].keys(): s.setBoolean(["feature", "keyboardControl"], data["feature"]["keyboardControl"])
if "pollWatched" in data["feature"]: s.setBoolean(["feature", "pollWatched"], data["feature"]["pollWatched"])
if "serial" in data.keys():
if "autoconnect" in data["serial"].keys(): s.setBoolean(["serial", "autoconnect"], data["serial"]["autoconnect"])
if "port" in data["serial"].keys(): s.set(["serial", "port"], data["serial"]["port"])
if "baudrate" in data["serial"].keys(): s.setInt(["serial", "baudrate"], data["serial"]["baudrate"])
if "timeoutConnection" in data["serial"].keys(): s.setFloat(["serial", "timeout", "connection"], data["serial"]["timeoutConnection"])
if "timeoutDetection" in data["serial"].keys(): s.setFloat(["serial", "timeout", "detection"], data["serial"]["timeoutDetection"])
if "timeoutCommunication" in data["serial"].keys(): s.setFloat(["serial", "timeout", "communication"], data["serial"]["timeoutCommunication"])
if "timeoutTemperature" in data["serial"].keys(): s.setFloat(["serial", "timeout", "temperature"], data["serial"]["timeoutTemperature"])
if "timeoutSdStatus" in data["serial"].keys(): s.setFloat(["serial", "timeout", "sdStatus"], data["serial"]["timeoutSdStatus"])
if "additionalPorts" in data["serial"] and isinstance(data["serial"]["additionalPorts"], (list, tuple)): s.set(["serial", "additionalPorts"], data["serial"]["additionalPorts"])
if "additionalBaudrates" in data["serial"] and isinstance(data["serial"]["additionalBaudrates"], (list, tuple)): s.set(["serial", "additionalBaudrates"], data["serial"]["additionalBaudrates"])
if "longRunningCommands" in data["serial"] and isinstance(data["serial"]["longRunningCommands"], (list, tuple)): s.set(["serial", "longRunningCommands"], data["serial"]["longRunningCommands"])
if "checksumRequiringCommands" in data["serial"] and isinstance(data["serial"]["checksumRequiringCommands"], (list, tuple)): s.set(["serial", "checksumRequiringCommands"], data["serial"]["checksumRequiringCommands"])
if "helloCommand" in data["serial"]: s.set(["serial", "helloCommand"], data["serial"]["helloCommand"])
oldLog = s.getBoolean(["serial", "log"])
if "log" in data["serial"].keys(): s.setBoolean(["serial", "log"], data["serial"]["log"])
if oldLog and not s.getBoolean(["serial", "log"]):
# disable debug logging to serial.log
logging.getLogger("SERIAL").debug("Disabling serial logging")
logging.getLogger("SERIAL").setLevel(logging.CRITICAL)
elif not oldLog and s.getBoolean(["serial", "log"]):
# enable debug logging to serial.log
logging.getLogger("SERIAL").setLevel(logging.DEBUG)
logging.getLogger("SERIAL").debug("Enabling serial logging")
if "folder" in data.keys():
if "uploads" in data["folder"].keys(): s.setBaseFolder("uploads", data["folder"]["uploads"])
if "timelapse" in data["folder"].keys(): s.setBaseFolder("timelapse", data["folder"]["timelapse"])
if "timelapseTmp" in data["folder"].keys(): s.setBaseFolder("timelapse_tmp", data["folder"]["timelapseTmp"])
if "logs" in data["folder"].keys(): s.setBaseFolder("logs", data["folder"]["logs"])
if "watched" in data["folder"].keys(): s.setBaseFolder("watched", data["folder"]["watched"])
if "temperature" in data.keys():
if "profiles" in data["temperature"].keys(): s.set(["temperature", "profiles"], data["temperature"]["profiles"])
if "cutoff" in data["temperature"].keys(): s.setInt(["temperature", "cutoff"], data["temperature"]["cutoff"])
if "terminalFilters" in data.keys():
s.set(["terminalFilters"], data["terminalFilters"])
if "system" in data.keys():
if "actions" in data["system"].keys(): s.set(["system", "actions"], data["system"]["actions"])
if "events" in data["system"].keys(): s.set(["system", "events"], data["system"]["events"])
# if "scripts" in data:
# if "gcode" in data["scripts"] and isinstance(data["scripts"]["gcode"], dict):
# for name, script in data["scripts"]["gcode"].items():
# if name == "snippets":
# continue
# s.saveScript("gcode", name, script.replace("\r\n", "\n").replace("\r", "\n"))
#
if "server" in data:
if "commands" in data["server"]:
if "systemShutdownCommand" in data["server"]["commands"].keys(): s.set(["server", "commands", "systemShutdownCommand"], data["server"]["commands"]["systemShutdownCommand"])
if "systemRestartCommand" in data["server"]["commands"].keys(): s.set(["server", "commands", "systemRestartCommand"], data["server"]["commands"]["systemRestartCommand"])
if "serverRestartCommand" in data["server"]["commands"].keys(): s.set(["server", "commands", "serverRestartCommand"], data["server"]["commands"]["serverRestartCommand"])
if "plugins" in data:
for plugin in octoprint.plugin.plugin_manager().get_implementations(octoprint.plugin.SettingsPlugin):
plugin_id = plugin._identifier
if plugin_id in data["plugins"]:
try:
plugin.on_settings_save(data["plugins"][plugin_id])
except TypeError:
logger.warn("Could not save settings for plugin {name} ({version}) since it called super(...)".format(name=plugin._plugin_name, version=plugin._plugin_version))
logger.warn("in a way which has issues due to OctoPrint's dynamic reloading after plugin operations.")
logger.warn("Please contact the plugin's author and ask to update the plugin to use a direct call like")
logger.warn("octoprint.plugin.SettingsPlugin.on_settings_save(self, data) instead.")
except:
logger.exception("Could not save settings for plugin {name} ({version})".format(version=plugin._plugin_version, name=plugin._plugin_name))
if s.save():
payload = dict(
config_hash=s.config_hash,
effective_hash=s.effective_hash
)
eventManager().fire(Events.SETTINGS_UPDATED, payload=payload)
|
javivi001/OctoPrint
|
src/octoprint/server/api/settings.py
|
Python
|
agpl-3.0
| 15,741
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2014 Thomas Voegtlin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import socket
import threading
import time
import xmlrpclib
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from electrum import bitcoin, util
from electrum import transaction
from electrum.plugins import BasePlugin, hook
from electrum.i18n import _
from electrum_gui.qt.transaction_dialog import show_transaction
import sys
import traceback
PORT = 12344
HOST = 'ecdsa.net'
server = xmlrpclib.ServerProxy('http://%s:%d'%(HOST,PORT), allow_none=True)
class Listener(util.DaemonThread):
def __init__(self, parent):
util.DaemonThread.__init__(self)
self.daemon = True
self.parent = parent
self.received = set()
self.keyhashes = []
def set_keyhashes(self, keyhashes):
self.keyhashes = keyhashes
def clear(self, keyhash):
server.delete(keyhash)
self.received.remove(keyhash)
def run(self):
while self.running:
if not self.keyhashes:
time.sleep(2)
continue
for keyhash in self.keyhashes:
if keyhash in self.received:
continue
try:
message = server.get(keyhash)
except Exception as e:
self.print_error("cannot contact cosigner pool")
time.sleep(30)
continue
if message:
self.received.add(keyhash)
self.print_error("received message for", keyhash)
self.parent.obj.emit(SIGNAL("cosigner:receive"), keyhash,
message)
# poll every 30 seconds
time.sleep(30)
class Plugin(BasePlugin):
def __init__(self, parent, config, name):
BasePlugin.__init__(self, parent, config, name)
self.listener = None
self.obj = QObject()
self.obj.connect(self.obj, SIGNAL('cosigner:receive'), self.on_receive)
self.keys = []
self.cosigner_list = []
@hook
def on_new_window(self, window):
self.update(window)
@hook
def on_close_window(self, window):
self.update(window)
def is_available(self):
return True
def update(self, window):
wallet = window.wallet
if wallet.wallet_type not in ['2of2', '2of3']:
return
if self.listener is None:
self.print_error("starting listener")
self.listener = Listener(self)
self.listener.start()
elif self.listener:
self.print_error("shutting down listener")
self.listener.stop()
self.listener = None
self.keys = []
self.cosigner_list = []
for key, xpub in wallet.master_public_keys.items():
K = bitcoin.deserialize_xkey(xpub)[-1].encode('hex')
_hash = bitcoin.Hash(K).encode('hex')
if wallet.master_private_keys.get(key):
self.keys.append((key, _hash, window))
else:
self.cosigner_list.append((window, xpub, K, _hash))
if self.listener:
self.listener.set_keyhashes([t[1] for t in self.keys])
@hook
def transaction_dialog(self, d):
d.cosigner_send_button = b = QPushButton(_("Send to cosigner"))
b.clicked.connect(lambda: self.do_send(d.tx))
d.buttons.insert(0, b)
self.transaction_dialog_update(d)
@hook
def transaction_dialog_update(self, d):
if d.tx.is_complete() or d.wallet.can_sign(d.tx):
d.cosigner_send_button.hide()
return
for window, xpub, K, _hash in self.cosigner_list:
if window.wallet == d.wallet and self.cosigner_can_sign(d.tx, xpub):
d.cosigner_send_button.show()
break
else:
d.cosigner_send_button.hide()
def cosigner_can_sign(self, tx, cosigner_xpub):
from electrum.transaction import x_to_xpub
xpub_set = set([])
for txin in tx.inputs:
for x_pubkey in txin['x_pubkeys']:
xpub = x_to_xpub(x_pubkey)
if xpub:
xpub_set.add(xpub)
return cosigner_xpub in xpub_set
def do_send(self, tx):
for window, xpub, K, _hash in self.cosigner_list:
if not self.cosigner_can_sign(tx, xpub):
continue
message = bitcoin.encrypt_message(tx.raw, K)
try:
server.put(_hash, message)
except Exception as e:
traceback.print_exc(file=sys.stdout)
window.show_message("Failed to send transaction to cosigning pool.")
return
window.show_message("Your transaction was sent to the cosigning pool.\nOpen your cosigner wallet to retrieve it.")
def on_receive(self, keyhash, message):
self.print_error("signal arrived for", keyhash)
for key, _hash, window in self.keys:
if _hash == keyhash:
break
else:
self.print_error("keyhash not found")
return
wallet = window.wallet
if wallet.use_encryption:
password = window.password_dialog('An encrypted transaction was retrieved from cosigning pool.\nPlease enter your password to decrypt it.')
if not password:
return
else:
password = None
if not window.question(_("An encrypted transaction was retrieved from cosigning pool.\nDo you want to open it now?")):
return
xprv = wallet.get_master_private_key(key, password)
if not xprv:
return
try:
k = bitcoin.deserialize_xkey(xprv)[-1].encode('hex')
EC = bitcoin.EC_KEY(k.decode('hex'))
message = EC.decrypt_message(message)
except Exception as e:
traceback.print_exc(file=sys.stdout)
window.show_message(str(e))
return
self.listener.clear(keyhash)
tx = transaction.Transaction(message)
show_transaction(tx, window, prompt_if_unsaved=True)
|
akshayaurora/electrum
|
plugins/cosigner_pool/qt.py
|
Python
|
gpl-3.0
| 6,879
|
#! /usr/bin/env python
import openturns as ot
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
from openturns.viewer import View
import time
ot.RandomGenerator.SetSeed(0)
ot.Log.Show(ot.Log.INFO)
# Bounds are [0,1]^dimension
dimension = 50
# Size of sample
size = 100
# Factory: lhs generates
lhsDesign = ot.LHSExperiment(ot.ComposedDistribution([ot.Uniform(0.0, 1.0)] * dimension), size)
lhsDesign.setAlwaysShuffle(True) # randomized
geomProfile = ot.GeometricProfile(10.0, 0.999, 50000)
c2 = ot.SpaceFillingC2()
sa = ot.SimulatedAnnealingLHS(lhsDesign, geomProfile, c2)
tic = time.time()
design = sa.generate()
result = sa.getResult()
toc = time.time()
dt1 = toc-tic
print("time=%f"%dt1)
print("dimension=%d, size=%d,sa=%s"%(dimension, size, sa))
print(str(result.getOptimalValue())+" c2="+str(result.getC2())+" phiP="+str(result.getPhiP())+" minDist="+str(result.getMinDist()))
crit = result.drawHistoryCriterion()
proba = result.drawHistoryProbability()
temp = result.drawHistoryTemperature()
pp = PdfPages('large_OTLHS.pdf')
# Criterion
fig = View(crit, plot_kwargs={'color':'blue'}).getFigure()
fig.savefig("otlhs_c2_crit_big.png")
pp.savefig(fig)
plt.close(fig)
# Proba
fig = View(proba, plot_kwargs={'marker': 'o', 'ms': 0.6}, axes_kwargs={'ylim': [-0.05, 1.05]}).getFigure()
fig.savefig("lhs_c2_proba_big.png")
pp.savefig(fig)
plt.close(fig)
# Temperature
fig = View(temp).getFigure()
pp.savefig(fig)
plt.close(fig)
minDist = ot.SpaceFillingMinDist()
sa = ot.SimulatedAnnealingLHS(lhsDesign, geomProfile, minDist)
tic = time.time()
design = sa.generate()
result = sa.getResult()
toc = time.time()
dt2 = toc-tic
print("time=%f"%dt2)
print("dimension=%d, size=%d,sa=%s"%(dimension, size, sa))
print(str(result.getOptimalValue())+" c2="+str(result.getC2())+" phiP="+str(result.getPhiP())+" minDist="+str(result.getMinDist()))
crit = result.drawHistoryCriterion()
proba = result.drawHistoryProbability()
temp = result.drawHistoryTemperature()
# Criterion
fig = View(crit, plot_kwargs={'color':'blue'}).getFigure()
fig.savefig("otlhs_mindist_crit_big.png")
pp.savefig(fig)
plt.close(fig)
# Proba
fig = View(proba, plot_kwargs={'marker': 'o', 'ms': 0.6}, axes_kwargs={'ylim': [-0.05, 1.05]}).getFigure()
fig.savefig("lhs_mindist_proba_big.png")
pp.savefig(fig)
plt.close(fig)
# Temperature
fig = View(temp).getFigure()
pp.savefig(fig)
plt.close(fig)
pp.close()
|
aurelieladier/openturns
|
validation/src/optimal_lhs/validate_SA_big.py
|
Python
|
lgpl-3.0
| 2,430
|
from django import forms
class etl_form(forms.Form):
csv_file = forms.CharField(label='location of CSV file to load',
max_length=100)
|
PatrickCoffey/TLRweb
|
TLRweb/TLR_ETL/forms.py
|
Python
|
gpl-2.0
| 172
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
import multiprocessing
import numpy as np
import paddle.fluid as fluid
from paddle.fluid import core
from paddle.fluid.reader import _reader_process_loop
from paddle.fluid.framework import _test_eager_guard
if sys.version_info[0] == 2:
import Queue as queue
else:
import queue
def get_random_images_and_labels(image_shape, label_shape):
image = np.random.random(size=image_shape).astype('float32')
label = np.random.random(size=label_shape).astype('int64')
return image, label
def batch_generator_creator(batch_size, batch_num):
def __reader__():
for _ in range(batch_num):
batch_image, batch_label = get_random_images_and_labels(
[batch_size, 784], [batch_size, 1])
yield batch_image, batch_label
return __reader__
# NOTE: coverage CI can't cover child process code, so need these test.
# Here test child process loop function in main process
class TestDygraphDataLoaderProcess(unittest.TestCase):
def setUp(self):
self.batch_size = 8
self.batch_num = 4
self.epoch_num = 2
self.capacity = 2
def func_test_reader_process_loop(self):
# This unittest's memory mapped files needs to be cleaned manually
def __clear_process__(util_queue):
while True:
try:
util_queue.get_nowait()
except queue.Empty:
break
with fluid.dygraph.guard():
loader = fluid.io.DataLoader.from_generator(
capacity=self.batch_num + 1, use_multiprocess=True)
loader.set_batch_generator(
batch_generator_creator(self.batch_size, self.batch_num),
places=fluid.CPUPlace())
loader._data_queue = queue.Queue(self.batch_num + 1)
_reader_process_loop(loader._batch_reader, loader._data_queue)
# For clean memory mapped files
util_queue = multiprocessing.Queue(self.batch_num + 1)
for _ in range(self.batch_num):
data = loader._data_queue.get(timeout=10)
util_queue.put(data)
# Clean up memory mapped files
clear_process = multiprocessing.Process(
target=__clear_process__, args=(util_queue, ))
clear_process.start()
def test_reader_process_loop(self):
with _test_eager_guard():
self.func_test_reader_process_loop()
self.func_test_reader_process_loop()
def func_test_reader_process_loop_simple_none(self):
def none_sample_genarator(batch_num):
def __reader__():
for _ in range(batch_num):
yield None
return __reader__
with fluid.dygraph.guard():
loader = fluid.io.DataLoader.from_generator(
capacity=self.batch_num + 1, use_multiprocess=True)
loader.set_batch_generator(
none_sample_genarator(self.batch_num), places=fluid.CPUPlace())
loader._data_queue = queue.Queue(self.batch_num + 1)
exception = None
try:
_reader_process_loop(loader._batch_reader, loader._data_queue)
except ValueError as ex:
exception = ex
self.assertIsNotNone(exception)
def test_reader_process_loop_simple_none(self):
with _test_eager_guard():
self.func_test_reader_process_loop_simple_none()
self.func_test_reader_process_loop_simple_none()
if __name__ == '__main__':
unittest.main()
|
luotao1/Paddle
|
python/paddle/fluid/tests/unittests/test_imperative_data_loader_process.py
|
Python
|
apache-2.0
| 4,212
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pysnmp.sf.net/license.html
#
from pysnmp.carrier import sockfix
from pysnmp.carrier.base import AbstractTransportAddress
from pysnmp.carrier.asyncore.dgram.base import DgramSocketTransport
import socket
domainName = snmpUDP6Domain = (1, 3, 6, 1, 2, 1, 100, 1, 2)
class Udp6TransportAddress(tuple, AbstractTransportAddress):
pass
class Udp6SocketTransport(DgramSocketTransport):
sockFamily = socket.has_ipv6 and socket.AF_INET6 or None
addressType = Udp6TransportAddress
def normalizeAddress(self, transportAddress):
if '%' in transportAddress[0]: # strip zone ID
ta = self.addressType((transportAddress[0].split('%')[0],
transportAddress[1],
0, # flowinfo
0)) # scopeid
else:
ta = self.addressType((transportAddress[0],
transportAddress[1], 0, 0))
if (isinstance(transportAddress, self.addressType) and
transportAddress.getLocalAddress()):
return ta.setLocalAddress(transportAddress.getLocalAddress())
else:
return ta.setLocalAddress(self.getLocalAddress())
Udp6Transport = Udp6SocketTransport
|
k11a/snmpconverter
|
pysnmp/carrier/asyncore/dgram/udp6.py
|
Python
|
gpl-3.0
| 1,380
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-07-02 15:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rooms', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='room',
name='number',
field=models.IntegerField(primary_key=True, serialize=False, unique=True),
),
]
|
ViktorMarinov/get-a-room
|
get_a_room/rooms/migrations/0002_auto_20160702_1537.py
|
Python
|
mit
| 471
|
import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
@testing.parameterize(*testing.product({
'shape': [(4, 3, 2), (1,), (1, 2, 3, 4, 5, 6)],
'Wdim': [0, 1, 3],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
@testing.fix_random()
class TestPReLU(unittest.TestCase):
def setUp(self):
# Avoid unstability of numerical grad
self.x = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
self.x[(-0.05 < self.x) & (self.x < 0.05)] = 0.5
self.W = numpy.random.uniform(
-1, 1, self.shape[1:1 + self.Wdim]).astype(self.dtype)
self.gy = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
self.ggx = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
self.ggW = numpy.random.uniform(
-1, 1, self.W.shape).astype(self.dtype)
self.check_backward_options = {'dtype': numpy.float64}
if self.dtype == numpy.float16:
self.check_backward_options.update({'atol': 5e-4, 'rtol': 5e-3})
self.check_double_backward_options = {
'dtype': numpy.float64, 'atol': 5e-4, 'rtol': 5e-3}
if self.dtype == numpy.float16:
self.check_double_backward_options.update(
{'atol': 5e-3, 'rtol': 5e-2})
def check_forward(self, x_data, W_data):
x = chainer.Variable(x_data)
W = chainer.Variable(W_data)
y = functions.prelu(x, W)
self.assertEqual(y.data.dtype, self.dtype)
y_expect = self.x.copy()
masked = numpy.ma.masked_greater_equal(y_expect, 0, copy=False)
shape = (1,) + W.shape + (1,) * (x.ndim - W.ndim - 1)
masked *= self.W.reshape(shape)
testing.assert_allclose(y_expect, y.data)
def test_forward_cpu(self):
self.check_forward(self.x, self.W)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.W))
def check_backward(self, x_data, W_data, y_grad):
gradient_check.check_backward(
functions.prelu, (x_data, W_data), y_grad,
**self.check_backward_options)
def test_backward_cpu(self):
self.check_backward(self.x, self.W, self.gy)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.W),
cuda.to_gpu(self.gy))
def check_double_backward(self, x_data, W_data, y_grad, x_grad_grad,
W_grad_grad):
gradient_check.check_double_backward(
functions.prelu, (x_data, W_data), y_grad,
(x_grad_grad, W_grad_grad),
**self.check_double_backward_options)
def test_double_backward_cpu(self):
self.check_double_backward(self.x, self.W, self.gy, self.ggx, self.ggW)
@attr.gpu
def test_double_backward_gpu(self):
self.check_double_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.W),
cuda.to_gpu(self.gy), cuda.to_gpu(self.ggx),
cuda.to_gpu(self.ggW))
testing.run_module(__name__, __file__)
|
ronekko/chainer
|
tests/chainer_tests/functions_tests/activation_tests/test_prelu.py
|
Python
|
mit
| 3,278
|
#!/usr/bin/env python
#Boa:App:BoaApp
import wx
import frm_sideka_menu
modules ={u'bantuan': [0, u'bantuan', u'bantuan.py'],
u'cari_administrasi': [0, '', u'cari_administrasi.py'],
u'cari_kemiskinan': [0, '', u'cari_kemiskinan.py'],
u'cari_penduduk': [0, '', u'cari_penduduk.py'],
u'data_penduduk': [0, '', u'data_penduduk.py'],
u'edir_surat_masuk': [0, '', u'edir_surat_masuk.py'],
u'edit_anggota': [0, '', u'edit_anggota.py'],
u'edit_data_kemiskinan': [0, '', u'edit_data_kemiskinan.py'],
u'edit_data_penduduk': [0, '', u'edit_data_penduduk.py'],
u'edit_kejadian_kelahiran': [0, '', u'edit_kejadian_kelahiran.py'],
u'edit_kejadian_kematian': [0, '', u'edit_kejadian_kematian.py'],
u'edit_kejadian_lain': [0, '', u'edit_kejadian_lain.py'],
u'edit_kejadian_pindah': [0, '', u'edit_kejadian_pindah.py'],
u'edit_kk': [0, '', u'edit_kk.py'],
u'edit_profil': [0, '', u'edit_profil.py'],
u'edit_surat_keluar': [0, '', u'edit_surat_keluar.py'],
u'frm_sideka_menu': [1, 'Main frame of Application', u'frm_sideka_menu.py'],
u'importdisdukcapil': [0, '', u'importdisdukcapil.py'],
u'importmitradesa': [0, '', u'importmitradesa.py'],
u'input_administrasi_surat': [0, '', u'input_administrasi_surat.py'],
u'input_data_kemiskinan': [0, '', u'input_data_kemiskinan.py'],
u'input_edit_surat': [0, '', u'input_edit_surat.py'],
u'input_indikator_kemiskinan': [0, '', u'input_indikator_kemiskinan.py'],
u'input_profil': [0, '', u'input_profil.py'],
u'kejadian_kelahiran': [0, '', u'kejadian_kelahiran.py'],
u'kejadian_kematian': [0, '', u'kejadian_kematian.py'],
u'kejadian_lain': [0, '', u'kejadian_lain.py'],
u'kejadian_pindah': [0, '', u'kejadian_pindah.py'],
u'keuangan': [0, '', u'keuangan.py'],
u'kk_sementara': [0, '', u'kk_sementara.py'],
u'kk_tetap': [0, '', u'kk_tetap.py'],
u'kunci': [0, '', u'kunci.py'],
u'laporan_administrasi': [0, '', u'laporan_administrasi.py'],
u'laporan_kemiskinan': [0, '', u'laporan_kemiskinan.py'],
u'laporan_penduduk': [0, '', u'laporan_penduduk.py'],
u'laporan_potensi': [0, '', u'laporan_potensi.py'],
u'laporan_profil': [0, '', u'laporan_profil.py'],
u'laporan_statistik': [0, '', u'laporan_statistik.py'],
u'pecah_keluarga': [0, '', u'pecah_keluarga.py'],
u'pembuatan_surat_keluar': [0, '', u'pembuatan_surat_keluar.py'],
u'penyusunan': [0, '', u'penyusunan.py'],
u'pilihanimport': [0, '', u'pilihanimport.py'],
u'piramidapenduduk': [0, '', u'piramidapenduduk.py'],
u'potensi_ekonomi': [0, '', u'potensi_ekonomi.py'],
u'potensi_lahan': [0, '', u'potensi_lahan.py'],
u'potensi_pariwisata': [0, '', u'potensi_pariwisata.py'],
u'potensi_tambak': [0, '', u'potensi_tambak.py'],
u'sinkron_data': [0, '', u'sinkron_data.py'],
u'statistik_administrasi': [0, '', u'statistik_administrasi.py'],
u'statistik_kemiskinan': [0, '', u'statistik_kemiskinan.py'],
u'statistik_penduduk': [0, '', u'statistik_penduduk.py'],
u'statistik_potensi': [0, '', u'statistik_potensi.py'],
u'surat_masuk': [0, '', u'surat_masuk.py'],
u'tambah_anggota_keluarga': [0, '', u'tambah_anggota_keluarga.py']}
class MySplashScreen(wx.SplashScreen):
"""
Create a splash screen widget.
"""
def __init__(self, parent=None):
aBitmap = wx.Image(name = "/opt/sidesa/png/1.png").ConvertToBitmap()
aBitmap1 = wx.Image(name = "/opt/sidesa/png/1.png").ConvertToBitmap()
splashStyle = wx.SPLASH_CENTRE_ON_SCREEN | wx.SPLASH_TIMEOUT
splashDuration = 2000 # milliseconds
wx.SplashScreen.__init__(self, aBitmap1, splashStyle,
splashDuration, parent)
self.Bind(wx.EVT_CLOSE, self.OnExit)
wx.Yield()
def OnExit(self, evt):
self.Hide()
self.main = frm_sideka_menu.create(None)
self.main.Show()
evt.Skip() # Make sure the default handler runs too...
class BoaApp(wx.App):
def OnInit(self):
#self.main = frm_sideka_menu.create(None)
#self.main.Show()
#self.SetTopWindow(self.main)
MySplash = MySplashScreen()
MySplash.Show()
return True
def main():
application = BoaApp(0)
application.MainLoop()
if __name__ == '__main__':
main()
|
gedhe/sidesa2.0
|
sidesa.py
|
Python
|
gpl-2.0
| 4,181
|
from __future__ import division
import sys
import numpy as np
from random import randrange
import math
import random
def get_maxUptake(maxUptake_dict, prop, mean, std, system):
maxUptake = mean
#if system != 'ideal': # uncomment to make 'ideal' systems neutral
while maxUptake <= 0 or maxUptake > 1.0:
maxUptake = np.random.normal(loc=mean, scale=std, size=1)
maxUptake_dict[prop] = maxUptake
return [maxUptake, maxUptake_dict]
def get_SpeciesDispParams(system, DispParamsDict, prop, mean, std):
sp_mean = mean
sp_std = std
sp_std = (randrange(1, 10**5)/10**5) / randrange(10, 100)
DispParamsDict[prop] = [sp_mean, sp_std]
return [[sp_mean, sp_std], DispParamsDict]
def immigration(system, V, Qs2, Slist2, inds2, Tlist, ID, maxUptake_dict, maxUptake_list, DispParamsDict, mean, std, lgp, num_in, PropQ, xcoords, ycoords, zcoords, disp_mu, disp_std):
""" Immigration """
propagules = np.random.logseries(lgp, num_in) # list of propagules
for prop in propagules:
#PropQ = randrange(100,1000)/1000
Qs2.append(PropQ) # add individual's cell quota to cell quota list
Slist2.append(prop) # add species identity to species list
inds2.append(ID)
Tlist.append(0)
scale = 10**5
x = (float(round(random.randrange(scale)))/scale)*V
y = (float(round(random.randrange(scale)))/scale)*V
z = (float(round(random.randrange(scale)))/scale)*V
xcoords.append(x)
ycoords.append(y)
zcoords.append(z)
if prop in maxUptake_dict:
maxUptake = maxUptake_dict[prop]
else:
maxUptake, maxUptake_dict = get_maxUptake(maxUptake_dict, prop, mean, std, system) # maxUptake_method as a 3rd arg
maxUptake_list.append(maxUptake)
if prop in DispParamsDict:
disp_params = DispParamsDict[prop]
else:
disp_params, DispParamsDict = get_SpeciesDispParams(system, DispParamsDict, prop, disp_mu, disp_std) # maxUptake_method as a 3rd arg
ID += 1
return [Qs2, Slist2, inds2, Tlist, ID, maxUptake_dict, maxUptake_list, DispParamsDict, xcoords, ycoords, zcoords]
def growth(Qs2, Slist2, inds2, maxUptake_list, Tlist, TR, Kq, maint, xcoords, ycoords, zcoords):
""" Growth """
Glist = []
for i, val in enumerate(Qs2):
Q = Qs2[i] - maint # individual cell quota minus maint
maxUptake = maxUptake_list[i]
if Q < Kq:
Qs2.pop(i)
Slist2.pop(i)
inds2.pop(i)
xcoords.pop(i)
ycoords.pop(i)
zcoords.pop(i)
maxUptake_list.pop(i)
Tlist.pop(i)
continue
if TR > 0:
g = maxUptake*(1.0 - Kq/Q) # Droop form of g(s) with maintenance costs
#if Q + g > 1.0:
# g = 1.0 - Q
if TR >= g:
Q += g # increase cell quota by uptake
TR -= g # decrease total resources
Glist.append(g) # list of growth rates
elif TR < g:
Q += TR
Glist.append(TR) # list of growth rates
TR = 0
if TR < 0:
print 'Error: total resources is in the negative!'
sys.exit() # kill the simulation for debugging
if Q >= Kq:
Qs2[i] = Q
elif Q < Kq:
Qs2.pop(i)
Slist2.pop(i)
inds2.pop(i)
xcoords.pop(i)
ycoords.pop(i)
zcoords.pop(i)
maxUptake_list.pop(i)
Tlist.pop(i)
return [Qs2, Slist2, inds2, maxUptake_list, Tlist, TR, Glist, xcoords, ycoords, zcoords]
def reproduction(Qs, Slist, inds, Tlist, maxUptake_list, Kq, ID, maint, xcoords, ycoords, zcoords):
""" Reproduction """
new = 0
N = len(Qs)
for n in range(N):
i = randrange(len(Qs))
Q = Qs[i]
if Q > Kq*2:
if Q > 1.0:
x = [1]
else:
x = np.random.binomial(1, Q, 1)
if x[0] == 1:
Q = Q/2.0
Qs[i] = Q
Qs.append(Q) # individuals produce cells with half Q
Slist.append(Slist[i])
inds.append(ID)
maxUptake_list.append(maxUptake_list[i])
Tlist.append(0)
xcoords.append(xcoords[i])
ycoords.append(ycoords[i])
zcoords.append(zcoords[i])
new += 1
ID += 1
if N > 0:
gofS = new/float(N)
return [Qs, Slist, inds, Tlist, maxUptake_list, Kq, ID, gofS, xcoords, ycoords, zcoords]
def emigration(Qs2, Slist2, inds2, Tlist, maxUptake_list, r, V, xcoords, ycoords, zcoords, system):
""" Emigration """
Qs3 = []
Slist3 = []
inds3 = []
xcoords3 = []
ycoords3 = []
zcoords3 = []
maxUptake_list3 = []
Tlist3 = []
p_out = r/V # per capita chance of flowing out
N = len(Qs2)
for i in range(N):
x = np.random.binomial(1, p_out, 1)
if x[0] == 0:
Qs3.append(Qs2[i]) # individual emigrates, i.e., washed out
Slist3.append(Slist2[i]) # remove species identity from species list
inds3.append(inds2[i])
xcoords3.append(xcoords[i])
ycoords3.append(ycoords[i])
zcoords3.append(zcoords[i])
maxUptake_list3.append(maxUptake_list[i])
Tlist3.append(Tlist[i] + 1)
return [Qs3, Slist3, inds3, Tlist3, maxUptake_list3, xcoords3, ycoords3, zcoords3]
def outflow(TR, r, V):
TR -= (r/V) * TR # Total Resources decrease due to outflow
if TR < 0:
print 'Error: There is a resource debt: TR =',TR,' D =',r/V
sys.exit()
if V <= 0:
print 'Error: There is no volume: V =',V
sys.exit()
return [TR, V]
def dispersal(V, Qs2, Slist2, inds2, Tlist, maxUptake_list, DispParamsDict, Kq, ID, xcoords, ycoords, zcoords, system):
""" Envision the local environment as a 3-dimensional space"""
N = len(inds2)
for i in range(N): # Simulate over individuals
spID = Slist2[i] # unique individual ID
mean, std = DispParamsDict[spID]
go = 'no'
while go == 'no':
if system != 'non-ideal':
scale = 10**5
xcoords[i] = (float(round(randrange(scale)))/scale)*V
ycoords[i] = (float(round(randrange(scale)))/scale)*V
zcoords[i] = (float(round(randrange(scale)))/scale)*V
go = 'yes'
else:
x = np.random.normal(loc=mean, scale=std, size=1)
if xcoords[i] + x > V:
if xcoords[i] - x < 0:
print 'out of bounds for x-coord'
sys.exit()
else:
xcoords[i] = xcoords[i] - x
else:
xcoords[i] = xcoords[i] + x
y = np.random.normal(loc=mean, scale=std, size=1)
if ycoords[i] + y > V:
if ycoords[i] - y < 0:
print 'out of bounds for y-coord'
sys.exit()
else:
ycoords[i] = ycoords[i] - y
else:
ycoords[i] = ycoords[i] + y
z = np.random.normal(loc=mean, scale=std, size=1)
if zcoords[i] + z > V:
if zcoords[i] - z < 0:
print 'out of bounds for z-coord'
sys.exit()
else:
zcoords[i] = zcoords[i] - z
else:
zcoords[i] = zcoords[i] + z
go = 'yes'
return [Qs2, Slist2, inds2, Tlist, maxUptake_list, DispParamsDict, Kq, ID, xcoords, ycoords, zcoords]
|
embaldridge/hydrobide
|
models/BoneYard/BideProcesses/bide.py
|
Python
|
mit
| 9,023
|
DATE_FORMAT = '%Y-%m-%dT%H:%M:%S+00:00'
|
liveblog/liveblog
|
server/liveblog/tests/test_settings.py
|
Python
|
agpl-3.0
| 40
|
import os
import shutil
import sys
import zipfile
import platform
from distutils.core import setup
from distutils.sysconfig import get_python_lib
import py2exe
version = __import__('p2pool').__version__
im64 = '64' in platform.architecture()[0]
if os.path.exists('INITBAK'):
os.remove('INITBAK')
os.rename(os.path.join('p2pool', '__init__.py'), 'INITBAK')
try:
open(os.path.join('p2pool', '__init__.py'), 'wb').write('__version__ = %r%s%sDEBUG = False%s' % (version, os.linesep, os.linesep, os.linesep))
mfcdir = get_python_lib() + '\pythonwin\\'
mfcfiles = [os.path.join(mfcdir, i) for i in ["mfc90.dll", "mfc90u.dll", "mfcm90.dll", "mfcm90u.dll", "Microsoft.VC90.MFC.manifest"]]
bundle = 1
if im64:
bundle = bundle + 2
sys.argv[1:] = ['py2exe']
setup(name='p2pool',
version=version,
description='Peer-to-peer Dash mining pool',
author='Forrest Voight',
author_email='forrest@forre.st',
url='http://p2pool.forre.st/',
data_files=[
('', ['README.md']),
("Microsoft.VC90.MFC", mfcfiles),
('web-static', [
'web-static/d3.v2.min.js',
'web-static/favicon.ico',
'web-static/graphs.html',
'web-static/index.html',
'web-static/share.html',
]),
],
console=['run_p2pool.py'],
options=dict(py2exe=dict(
bundle_files=bundle,
dll_excludes=['w9xpopen.exe', "mswsock.dll", "MSWSOCK.dll"],
includes=['twisted.web.resource', 'ltc_scrypt','vtc_scrypt'],
)),
zipfile=None,
)
finally:
os.remove(os.path.join('p2pool', '__init__.py'))
os.rename('INITBAK', os.path.join('p2pool', '__init__.py'))
win = '32'
if im64:
win = '64'
dir_name = 'p2pool_win' + win + '_' + version
if os.path.exists(dir_name):
shutil.rmtree(dir_name)
os.rename('dist', dir_name)
with zipfile.ZipFile(dir_name + '.zip', 'w', zipfile.ZIP_DEFLATED) as zf:
for dirpath, dirnames, filenames in os.walk(dir_name):
for filename in filenames:
zf.write(os.path.join(dirpath, filename))
print dir_name
|
vertoe/p2pool-drk
|
setup.py
|
Python
|
gpl-3.0
| 2,191
|
# coding: utf-8
import app.resource
class Race(app.resource.IdResource):
fields = {
"name": str,
"date": str,
"leader_id": int,
"state": str,
"description": str
}
defaults = {
"description": ""
}
def __init__(self, application):
self.filename = "race"
super().__init__(application)
def sortfunction(self, resource):
return resource["date"]
def api_update(self, id, **data):
resource = super().api_update(id, **data)
self.sync_stations(resource, data)
return resource
def api_create(self, **data):
data["state"] = "PREPARE"
resource = super().api_create(**data)
self.sync_stations(resource, data)
return resource
def sync_stations(self, resource, data):
stations = self.application.station.findall({"race_id": resource["id"]})
if "stations[]name[]" in data:
stations = []
for i in range(0, len(data["stations[]name[]"])):
station = {
"name": data["stations[]name[]"][i],
"description": data["stations[]description[]"][i],
"race_id": resource["id"],
"position": i
}
stations.append(station)
if len(stations) == 0:
stations.append({
"name": "Station 1",
"description": "",
"race_id": resource["id"],
"position": 0
})
self.application.station.removeall({"race_id": resource["id"]})
for station in stations:
self.application.station.create(station)
self.application.station.save()
def prepare_response(self, resource):
resource["stations"] = self.application.station.findall({"race_id": resource["id"]})
# EOF
|
DarkLuk42/hn-ias-race
|
app/resources/race.py
|
Python
|
gpl-2.0
| 1,898
|
import select_backport
import select
import unittest
class TestSelectBackport(unittest.TestCase):
def test_alias(self):
self.assert_(select_backport.select is select.select)
self.assert_(select_backport.error is select.error)
if hasattr(select, "poll"):
self.assert_(select_backport.poll is select.poll)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestSelectBackport))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
|
gorakhargosh/select_backport
|
tests/test_select_backport.py
|
Python
|
mit
| 547
|
#
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The ios lacp fact class
It is in this file the configuration is collected from the device
for a given resource, parsed, and the facts tree is populated
based on the configuration.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from copy import deepcopy
from ansible.module_utils.network.common import utils
from ansible.module_utils.network.ios.argspec.lacp.lacp import LacpArgs
class LacpFacts(object):
""" The ios lacp fact class
"""
def __init__(self, module, subspec='config', options='options'):
self._module = module
self.argument_spec = LacpArgs.argument_spec
spec = deepcopy(self.argument_spec)
if subspec:
if options:
facts_argument_spec = spec[subspec][options]
else:
facts_argument_spec = spec[subspec]
else:
facts_argument_spec = spec
self.generated_spec = utils.generate_dict(facts_argument_spec)
def populate_facts(self, connection, ansible_facts, data=None):
""" Populate the facts for lacp
:param connection: the device connection
:param ansible_facts: Facts dictionary
:param data: previously collected conf
:rtype: dictionary
:returns: facts
"""
if connection:
pass
if not data:
data = connection.get('show lacp sys-id')
obj = {}
if data:
lacp_obj = self.render_config(self.generated_spec, data)
if lacp_obj:
obj = lacp_obj
ansible_facts['ansible_network_resources'].pop('lacp', None)
facts = {}
params = utils.validate_config(self.argument_spec, {'config': obj})
facts['lacp'] = utils.remove_empties(params['config'])
ansible_facts['ansible_network_resources'].update(facts)
return ansible_facts
def render_config(self, spec, conf):
"""
Render config as dictionary structure and delete keys
from spec for null values
:param spec: The facts tree, generated from the argspec
:param conf: The configuration
:rtype: dictionary
:returns: The generated config
"""
config = deepcopy(spec)
config['system']['priority'] = int(conf.split(',')[0])
return utils.remove_empties(config)
|
thaim/ansible
|
lib/ansible/module_utils/network/ios/facts/lacp/lacp.py
|
Python
|
mit
| 2,528
|
"""Utilities for memristor functions."""
try:
from tools import izip
except:
izip = zip
import os
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
PI = np.array(np.pi, dtype='float32')
# Encoding network
def build_multilayer_network(x, n_, f='tanh'):
"""
Parameters
----------
x : tf.tensor, shape (batch_size, d_in)
Input Image
n_ : list of int
List of number of units in each layer
f : str
String describing the Nonlinearity
Returns
-------
v : tf.tensor, shape (batch_size, n_m)
Encoded version of the image.
"""
if f == 'tanh':
f = tf.nn.tanh
elif f == 'relu':
f = tf.nn.relu
# FIXME Pass parameters
n_layers = len(n_) - 1
W_ = [tf.get_variable(name='W{}'.format(i),
shape=(n_[i], n_[i+1])) for i in range(n_layers)]
b_ = [tf.get_variable(name='b{}'.format(i),
shape=(n_[i+1],)) for i in range(n_layers)]
in_ = x
for W, b in zip(W_, b_):
in_ = f(tf.matmul(in_, W) + b)
return in_
def normalizer_t(x,new_min,new_max):
x_max = np.amax(x)
x_min = np.amin(x)
return (((x-x_min)/(x_max-x_min))*(new_max-new_min)+new_min)
def tensor_scaler(x,new_min,new_max):
x_max = tf.reduce_max(x)
x_min = tf.reduce_min(x)
return (((x-x_min)/(x_max-x_min))*(new_max-new_min)+new_min)
def gauss_interp(samp, xs, ys, interp_width, ratio=0.75):
"""
Parameters
----------
samp : tf.tensor (batch_size, n_m)
xs : tf.tensor (n_p, n_m)
Grid inputs
ys : tf.tensor (n_p, n_m)
Grid outputs
interp_width : float
Spacing between the xs
Returns
-------
interp_func : tf.tensor (batch_size, n_m)
"""
samp_shape = samp.get_shape()
collapsed_samp = tf.reshape(samp,
shape=tf.stack([samp_shape[0], 1, samp_shape[1]*samp_shape[2]*samp_shape[3]]))
xs = tf.cast(tf.expand_dims(xs, 0), tf.float32) # (1, n_p, n_m)
ys = tf.cast(tf.expand_dims(ys, 0), tf.float32) # (1, n_p, n_m)
sig = (ratio * interp_width).astype(np.float32) # spacing of xs
norm_factor = (np.sqrt(2 * np.pi) * sig / interp_width).astype(np.float32)
norm_factor = np.array(norm_factor, dtype=np.float32)
gauss_mean = tf.subtract(collapsed_samp, xs)
gauss = tf.exp(tf.multiply(-0.5, tf.divide(tf.square(gauss_mean), tf.square(sig))))
output = tf.reduce_sum(tf.multiply(tf.divide(ys, norm_factor), gauss), axis=[1])
return output
def memristor_output(v, eps, vs, mus, sigs, interp_width):
"""
Parameters
----------
mu, sig, eps : tf.tensor (batch_size, n_m)
mean, standard deviation, noise
"""
mean = gauss_interp(v, vs, mus, interp_width)
sdev = gauss_interp(v, vs, sigs, interp_width)
return mean + eps * sdev
# Data Iteration Utils
def batch_generator(data, batch_size):
"""
data : array, shape (n_samples, ...)
All of your data in a matrix.
batch_size : int
Batch size.
Yields
------
datum : shape (batch_size, ...)
A batch of data.
"""
n_samples = data.shape[0]
num_batches = int(n_samples / batch_size)
for i in range(num_batches):
yield data[i * batch_size: (i+1) * batch_size]
def file_batch_generator(files, batch_size, directory, max_batches=100):
"""
Generator that takes file names and yields batches of images.
Parameters
----------
files : list of str
File names
batch_size : int
Number of files per batch
directory : str
Base directory of the images.
max_batches : int
Max number of batches.
Yields
-------
batch : array, shape (batch_size, n_features)
A batch of images.
"""
n_samples = len(files)
num_batches = int(n_samples / batch_size)
for i in range(num_batches):
if i >= max_batches:
break
file_batch = files[(i + 0) * batch_size:
(i + 1) * batch_size]
batch = None
for j, fn in enumerate(file_batch):
img = plt.imread(os.path.join(directory, fn))
if batch is None:
n_features = img.size
batch = np.zeros((batch_size, n_features))
batch[j] = img.ravel()
yield batch
def random_generator(n_features, batch_size):
while True:
yield np.random.randn(batch_size, n_features)
class FileAndNoiseGenerator(object):
"""
Class that handles creation of file and noise generator.
"""
def __init__(self, file_list, base_directory, noise_dim,
max_batches=100):
self.file_list = file_list
self.base_directory = base_directory
self.noise_dim = noise_dim
self.max_batches = max_batches
def get_generator(self, batch_size=20):
image_gen = file_batch_generator(
self.file_list, batch_size, self.base_directory,
max_batches=self.max_batches)
rand_gen = random_generator(self.noise_dim, batch_size)
return izip(image_gen, rand_gen)
class DataAndNoiseGenerator(object):
"""
Object that Handles Creation of Data Generators.
"""
def __init__(self, data, noise_dim):
self.data = data
self.noise_dim = noise_dim
def get_generator(self, batch_size=None):
if batch_size is None:
batch_size = self.data.shape[0]
image_gen = batch_generator(self.data, batch_size)
rand_gen = random_generator(self.noise_dim, batch_size)
return izip(image_gen, rand_gen)
|
rzarcone/CAEs
|
utils/mem_utils.py
|
Python
|
bsd-2-clause
| 5,633
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""This module contains Google Dataproc operators."""
import inspect
import ntpath
import os
import re
import time
import uuid
import warnings
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Sequence, Set, Tuple, Union
from google.api_core.exceptions import AlreadyExists, NotFound
from google.api_core.retry import Retry, exponential_sleep_generator
from google.cloud.dataproc_v1 import Cluster
from google.protobuf.duration_pb2 import Duration
from google.protobuf.field_mask_pb2 import FieldMask
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator, BaseOperatorLink
from airflow.models.taskinstance import TaskInstance
from airflow.providers.google.cloud.hooks.dataproc import DataprocHook, DataProcJobBuilder
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.utils import timezone
DATAPROC_BASE_LINK = "https://console.cloud.google.com/dataproc"
DATAPROC_JOB_LOG_LINK = DATAPROC_BASE_LINK + "/jobs/{job_id}?region={region}&project={project_id}"
DATAPROC_CLUSTER_LINK = (
DATAPROC_BASE_LINK + "/clusters/{cluster_name}/monitoring?region={region}&project={project_id}"
)
class DataprocJobLink(BaseOperatorLink):
"""Helper class for constructing Dataproc Job link"""
name = "Dataproc Job"
def get_link(self, operator, dttm):
ti = TaskInstance(task=operator, execution_date=dttm)
job_conf = ti.xcom_pull(task_ids=operator.task_id, key="job_conf")
return (
DATAPROC_JOB_LOG_LINK.format(
job_id=job_conf["job_id"],
region=job_conf["region"],
project_id=job_conf["project_id"],
)
if job_conf
else ""
)
class DataprocClusterLink(BaseOperatorLink):
"""Helper class for constructing Dataproc Cluster link"""
name = "Dataproc Cluster"
def get_link(self, operator, dttm):
ti = TaskInstance(task=operator, execution_date=dttm)
cluster_conf = ti.xcom_pull(task_ids=operator.task_id, key="cluster_conf")
return (
DATAPROC_CLUSTER_LINK.format(
cluster_name=cluster_conf["cluster_name"],
region=cluster_conf["region"],
project_id=cluster_conf["project_id"],
)
if cluster_conf
else ""
)
class ClusterGenerator:
"""
Create a new Dataproc Cluster.
:param cluster_name: The name of the DataProc cluster to create. (templated)
:type cluster_name: str
:param project_id: The ID of the google cloud project in which
to create the cluster. (templated)
:type project_id: str
:param num_workers: The # of workers to spin up. If set to zero will
spin up cluster in a single node mode
:type num_workers: int
:param storage_bucket: The storage bucket to use, setting to None lets dataproc
generate a custom one for you
:type storage_bucket: str
:param init_actions_uris: List of GCS uri's containing
dataproc initialization scripts
:type init_actions_uris: list[str]
:param init_action_timeout: Amount of time executable scripts in
init_actions_uris has to complete
:type init_action_timeout: str
:param metadata: dict of key-value google compute engine metadata entries
to add to all instances
:type metadata: dict
:param image_version: the version of software inside the Dataproc cluster
:type image_version: str
:param custom_image: custom Dataproc image for more info see
https://cloud.google.com/dataproc/docs/guides/dataproc-images
:type custom_image: str
:param custom_image_project_id: project id for the custom Dataproc image, for more info see
https://cloud.google.com/dataproc/docs/guides/dataproc-images
:type custom_image_project_id: str
:param custom_image_family: family for the custom Dataproc image,
family name can be provide using --family flag while creating custom image, for more info see
https://cloud.google.com/dataproc/docs/guides/dataproc-images
:type custom_image_family: str
:param autoscaling_policy: The autoscaling policy used by the cluster. Only resource names
including projectid and location (region) are valid. Example:
``projects/[projectId]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]``
:type autoscaling_policy: str
:param properties: dict of properties to set on
config files (e.g. spark-defaults.conf), see
https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters#SoftwareConfig
:type properties: dict
:param optional_components: List of optional cluster components, for more info see
https://cloud.google.com/dataproc/docs/reference/rest/v1/ClusterConfig#Component
:type optional_components: list[str]
:param num_masters: The # of master nodes to spin up
:type num_masters: int
:param master_machine_type: Compute engine machine type to use for the primary node
:type master_machine_type: str
:param master_disk_type: Type of the boot disk for the primary node
(default is ``pd-standard``).
Valid values: ``pd-ssd`` (Persistent Disk Solid State Drive) or
``pd-standard`` (Persistent Disk Hard Disk Drive).
:type master_disk_type: str
:param master_disk_size: Disk size for the primary node
:type master_disk_size: int
:param worker_machine_type: Compute engine machine type to use for the worker nodes
:type worker_machine_type: str
:param worker_disk_type: Type of the boot disk for the worker node
(default is ``pd-standard``).
Valid values: ``pd-ssd`` (Persistent Disk Solid State Drive) or
``pd-standard`` (Persistent Disk Hard Disk Drive).
:type worker_disk_type: str
:param worker_disk_size: Disk size for the worker nodes
:type worker_disk_size: int
:param num_preemptible_workers: The # of preemptible worker nodes to spin up
:type num_preemptible_workers: int
:param labels: dict of labels to add to the cluster
:type labels: dict
:param zone: The zone where the cluster will be located. Set to None to auto-zone. (templated)
:type zone: str
:param network_uri: The network uri to be used for machine communication, cannot be
specified with subnetwork_uri
:type network_uri: str
:param subnetwork_uri: The subnetwork uri to be used for machine communication,
cannot be specified with network_uri
:type subnetwork_uri: str
:param internal_ip_only: If true, all instances in the cluster will only
have internal IP addresses. This can only be enabled for subnetwork
enabled networks
:type internal_ip_only: bool
:param tags: The GCE tags to add to all instances
:type tags: list[str]
:param region: The specified region where the dataproc cluster is created.
:type region: str
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:type gcp_conn_id: str
:param service_account: The service account of the dataproc instances.
:type service_account: str
:param service_account_scopes: The URIs of service account scopes to be included.
:type service_account_scopes: list[str]
:param idle_delete_ttl: The longest duration that cluster would keep alive while
staying idle. Passing this threshold will cause cluster to be auto-deleted.
A duration in seconds.
:type idle_delete_ttl: int
:param auto_delete_time: The time when cluster will be auto-deleted.
:type auto_delete_time: datetime.datetime
:param auto_delete_ttl: The life duration of cluster, the cluster will be
auto-deleted at the end of this duration.
A duration in seconds. (If auto_delete_time is set this parameter will be ignored)
:type auto_delete_ttl: int
:param customer_managed_key: The customer-managed key used for disk encryption
``projects/[PROJECT_STORING_KEYS]/locations/[LOCATION]/keyRings/[KEY_RING_NAME]/cryptoKeys/[KEY_NAME]`` # noqa
:type customer_managed_key: str
"""
def __init__(
self,
project_id: str,
num_workers: Optional[int] = None,
zone: Optional[str] = None,
network_uri: Optional[str] = None,
subnetwork_uri: Optional[str] = None,
internal_ip_only: Optional[bool] = None,
tags: Optional[List[str]] = None,
storage_bucket: Optional[str] = None,
init_actions_uris: Optional[List[str]] = None,
init_action_timeout: str = "10m",
metadata: Optional[Dict] = None,
custom_image: Optional[str] = None,
custom_image_project_id: Optional[str] = None,
custom_image_family: Optional[str] = None,
image_version: Optional[str] = None,
autoscaling_policy: Optional[str] = None,
properties: Optional[Dict] = None,
optional_components: Optional[List[str]] = None,
num_masters: int = 1,
master_machine_type: str = 'n1-standard-4',
master_disk_type: str = 'pd-standard',
master_disk_size: int = 1024,
worker_machine_type: str = 'n1-standard-4',
worker_disk_type: str = 'pd-standard',
worker_disk_size: int = 1024,
num_preemptible_workers: int = 0,
service_account: Optional[str] = None,
service_account_scopes: Optional[List[str]] = None,
idle_delete_ttl: Optional[int] = None,
auto_delete_time: Optional[datetime] = None,
auto_delete_ttl: Optional[int] = None,
customer_managed_key: Optional[str] = None,
**kwargs,
) -> None:
self.project_id = project_id
self.num_masters = num_masters
self.num_workers = num_workers
self.num_preemptible_workers = num_preemptible_workers
self.storage_bucket = storage_bucket
self.init_actions_uris = init_actions_uris
self.init_action_timeout = init_action_timeout
self.metadata = metadata
self.custom_image = custom_image
self.custom_image_project_id = custom_image_project_id
self.custom_image_family = custom_image_family
self.image_version = image_version
self.properties = properties or {}
self.optional_components = optional_components
self.master_machine_type = master_machine_type
self.master_disk_type = master_disk_type
self.master_disk_size = master_disk_size
self.autoscaling_policy = autoscaling_policy
self.worker_machine_type = worker_machine_type
self.worker_disk_type = worker_disk_type
self.worker_disk_size = worker_disk_size
self.zone = zone
self.network_uri = network_uri
self.subnetwork_uri = subnetwork_uri
self.internal_ip_only = internal_ip_only
self.tags = tags
self.service_account = service_account
self.service_account_scopes = service_account_scopes
self.idle_delete_ttl = idle_delete_ttl
self.auto_delete_time = auto_delete_time
self.auto_delete_ttl = auto_delete_ttl
self.customer_managed_key = customer_managed_key
self.single_node = num_workers == 0
if self.custom_image and self.image_version:
raise ValueError("The custom_image and image_version can't be both set")
if self.custom_image_family and self.image_version:
raise ValueError("The image_version and custom_image_family can't be both set")
if self.custom_image_family and self.custom_image:
raise ValueError("The custom_image and custom_image_family can't be both set")
if self.single_node and self.num_preemptible_workers > 0:
raise ValueError("Single node cannot have preemptible workers.")
def _get_init_action_timeout(self) -> dict:
match = re.match(r"^(\d+)([sm])$", self.init_action_timeout)
if match:
val = float(match.group(1))
if match.group(2) == "s":
return {"seconds": int(val)}
elif match.group(2) == "m":
return {"seconds": int(timedelta(minutes=val).total_seconds())}
raise AirflowException(
"DataprocClusterCreateOperator init_action_timeout"
" should be expressed in minutes or seconds. i.e. 10m, 30s"
)
def _build_gce_cluster_config(self, cluster_data):
if self.zone:
zone_uri = f'https://www.googleapis.com/compute/v1/projects/{self.project_id}/zones/{self.zone}'
cluster_data['gce_cluster_config']['zone_uri'] = zone_uri
if self.metadata:
cluster_data['gce_cluster_config']['metadata'] = self.metadata
if self.network_uri:
cluster_data['gce_cluster_config']['network_uri'] = self.network_uri
if self.subnetwork_uri:
cluster_data['gce_cluster_config']['subnetwork_uri'] = self.subnetwork_uri
if self.internal_ip_only:
if not self.subnetwork_uri:
raise AirflowException("Set internal_ip_only to true only when you pass a subnetwork_uri.")
cluster_data['gce_cluster_config']['internal_ip_only'] = True
if self.tags:
cluster_data['gce_cluster_config']['tags'] = self.tags
if self.service_account:
cluster_data['gce_cluster_config']['service_account'] = self.service_account
if self.service_account_scopes:
cluster_data['gce_cluster_config']['service_account_scopes'] = self.service_account_scopes
return cluster_data
def _build_lifecycle_config(self, cluster_data):
if self.idle_delete_ttl:
cluster_data['lifecycle_config']['idle_delete_ttl'] = {"seconds": self.idle_delete_ttl}
if self.auto_delete_time:
utc_auto_delete_time = timezone.convert_to_utc(self.auto_delete_time)
cluster_data['lifecycle_config']['auto_delete_time'] = utc_auto_delete_time.strftime(
'%Y-%m-%dT%H:%M:%S.%fZ'
)
elif self.auto_delete_ttl:
cluster_data['lifecycle_config']['auto_delete_ttl'] = {"seconds": int(self.auto_delete_ttl)}
return cluster_data
def _build_cluster_data(self):
if self.zone:
master_type_uri = (
f"projects/{self.project_id}/zones/{self.zone}/machineTypes/{self.master_machine_type}"
)
worker_type_uri = (
f"projects/{self.project_id}/zones/{self.zone}/machineTypes/{self.worker_machine_type}"
)
else:
master_type_uri = self.master_machine_type
worker_type_uri = self.worker_machine_type
cluster_data = {
'gce_cluster_config': {},
'master_config': {
'num_instances': self.num_masters,
'machine_type_uri': master_type_uri,
'disk_config': {
'boot_disk_type': self.master_disk_type,
'boot_disk_size_gb': self.master_disk_size,
},
},
'worker_config': {
'num_instances': self.num_workers,
'machine_type_uri': worker_type_uri,
'disk_config': {
'boot_disk_type': self.worker_disk_type,
'boot_disk_size_gb': self.worker_disk_size,
},
},
'secondary_worker_config': {},
'software_config': {},
'lifecycle_config': {},
'encryption_config': {},
'autoscaling_config': {},
}
if self.num_preemptible_workers > 0:
cluster_data['secondary_worker_config'] = {
'num_instances': self.num_preemptible_workers,
'machine_type_uri': worker_type_uri,
'disk_config': {
'boot_disk_type': self.worker_disk_type,
'boot_disk_size_gb': self.worker_disk_size,
},
'is_preemptible': True,
}
if self.storage_bucket:
cluster_data['config_bucket'] = self.storage_bucket
if self.image_version:
cluster_data['software_config']['image_version'] = self.image_version
elif self.custom_image:
project_id = self.custom_image_project_id or self.project_id
custom_image_url = (
f'https://www.googleapis.com/compute/beta/projects/{project_id}'
f'/global/images/{self.custom_image}'
)
cluster_data['master_config']['image_uri'] = custom_image_url
if not self.single_node:
cluster_data['worker_config']['image_uri'] = custom_image_url
elif self.custom_image_family:
project_id = self.custom_image_project_id or self.project_id
custom_image_url = (
'https://www.googleapis.com/compute/beta/projects/'
f'{project_id}/global/images/family/{self.custom_image_family}'
)
cluster_data['master_config']['image_uri'] = custom_image_url
if not self.single_node:
cluster_data['worker_config']['image_uri'] = custom_image_url
cluster_data = self._build_gce_cluster_config(cluster_data)
if self.single_node:
self.properties["dataproc:dataproc.allow.zero.workers"] = "true"
if self.properties:
cluster_data['software_config']['properties'] = self.properties
if self.optional_components:
cluster_data['software_config']['optional_components'] = self.optional_components
cluster_data = self._build_lifecycle_config(cluster_data)
if self.init_actions_uris:
init_actions_dict = [
{'executable_file': uri, 'execution_timeout': self._get_init_action_timeout()}
for uri in self.init_actions_uris
]
cluster_data['initialization_actions'] = init_actions_dict
if self.customer_managed_key:
cluster_data['encryption_config'] = {'gce_pd_kms_key_name': self.customer_managed_key}
if self.autoscaling_policy:
cluster_data['autoscaling_config'] = {'policy_uri': self.autoscaling_policy}
return cluster_data
def make(self):
"""
Helper method for easier migration.
:return: Dict representing Dataproc cluster.
"""
return self._build_cluster_data()
class DataprocCreateClusterOperator(BaseOperator):
"""
Create a new cluster on Google Cloud Dataproc. The operator will wait until the
creation is successful or an error occurs in the creation process. If the cluster
already exists and ``use_if_exists`` is True then the operator will:
- if cluster state is ERROR then delete it if specified and raise error
- if cluster state is CREATING wait for it and then check for ERROR state
- if cluster state is DELETING wait for it and then create new cluster
Please refer to
https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters
for a detailed explanation on the different parameters. Most of the configuration
parameters detailed in the link are available as a parameter to this operator.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:DataprocCreateClusterOperator`
:param project_id: The ID of the google cloud project in which
to create the cluster. (templated)
:type project_id: str
:param cluster_name: Name of the cluster to create
:type cluster_name: str
:param labels: Labels that will be assigned to created cluster
:type labels: Dict[str, str]
:param cluster_config: Required. The cluster config to create.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.dataproc_v1.types.ClusterConfig`
:type cluster_config: Union[Dict, google.cloud.dataproc_v1.types.ClusterConfig]
:param region: The specified region where the dataproc cluster is created.
:type region: str
:param delete_on_error: If true the cluster will be deleted if created with ERROR state. Default
value is true.
:type delete_on_error: bool
:param use_if_exists: If true use existing cluster
:type use_if_exists: bool
:param request_id: Optional. A unique id used to identify the request. If the server receives two
``DeleteClusterRequest`` requests with the same id, then the second request will be ignored and the
first ``google.longrunning.Operation`` created and stored in the backend is returned.
:type request_id: str
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
retried.
:type retry: google.api_core.retry.Retry
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: Sequence[Tuple[str, str]]
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:type gcp_conn_id: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = (
'project_id',
'region',
'cluster_config',
'cluster_name',
'labels',
'impersonation_chain',
)
template_fields_renderers = {'cluster_config': 'json'}
operator_extra_links = (DataprocClusterLink(),)
def __init__(
self,
*,
cluster_name: str,
region: Optional[str] = None,
project_id: Optional[str] = None,
cluster_config: Optional[Dict] = None,
labels: Optional[Dict] = None,
request_id: Optional[str] = None,
delete_on_error: bool = True,
use_if_exists: bool = True,
retry: Optional[Retry] = None,
timeout: float = 1 * 60 * 60,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
if region is None:
warnings.warn(
"Default region value `global` will be deprecated. Please, provide region value.",
DeprecationWarning,
stacklevel=2,
)
region = 'global'
# TODO: remove one day
if cluster_config is None:
warnings.warn(
f"Passing cluster parameters by keywords to `{type(self).__name__}` will be deprecated. "
"Please provide cluster_config object using `cluster_config` parameter. "
"You can use `airflow.dataproc.ClusterGenerator.generate_cluster` "
"method to obtain cluster object.",
DeprecationWarning,
stacklevel=1,
)
# Remove result of apply defaults
if 'params' in kwargs:
del kwargs['params']
# Create cluster object from kwargs
if project_id is None:
raise AirflowException(
"project_id argument is required when building cluster from keywords parameters"
)
kwargs["project_id"] = project_id
cluster_config = ClusterGenerator(**kwargs).make()
# Remove from kwargs cluster params passed for backward compatibility
cluster_params = inspect.signature(ClusterGenerator.__init__).parameters
for arg in cluster_params:
if arg in kwargs:
del kwargs[arg]
super().__init__(**kwargs)
self.cluster_config = cluster_config
self.cluster_name = cluster_name
self.labels = labels
self.project_id = project_id
self.region = region
self.request_id = request_id
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.delete_on_error = delete_on_error
self.use_if_exists = use_if_exists
self.impersonation_chain = impersonation_chain
def _create_cluster(self, hook: DataprocHook):
operation = hook.create_cluster(
project_id=self.project_id,
region=self.region,
cluster_name=self.cluster_name,
labels=self.labels,
cluster_config=self.cluster_config,
request_id=self.request_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
cluster = operation.result()
self.log.info("Cluster created.")
return cluster
def _delete_cluster(self, hook):
self.log.info("Deleting the cluster")
hook.delete_cluster(region=self.region, cluster_name=self.cluster_name, project_id=self.project_id)
def _get_cluster(self, hook: DataprocHook) -> Cluster:
return hook.get_cluster(
project_id=self.project_id,
region=self.region,
cluster_name=self.cluster_name,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
def _handle_error_state(self, hook: DataprocHook, cluster: Cluster) -> None:
if cluster.status.state != cluster.status.State.ERROR:
return
self.log.info("Cluster is in ERROR state")
gcs_uri = hook.diagnose_cluster(
region=self.region, cluster_name=self.cluster_name, project_id=self.project_id
)
self.log.info('Diagnostic information for cluster %s available at: %s', self.cluster_name, gcs_uri)
if self.delete_on_error:
self._delete_cluster(hook)
raise AirflowException("Cluster was created but was in ERROR state.")
raise AirflowException("Cluster was created but is in ERROR state")
def _wait_for_cluster_in_deleting_state(self, hook: DataprocHook) -> None:
time_left = self.timeout
for time_to_sleep in exponential_sleep_generator(initial=10, maximum=120):
if time_left < 0:
raise AirflowException(f"Cluster {self.cluster_name} is still DELETING state, aborting")
time.sleep(time_to_sleep)
time_left = time_left - time_to_sleep
try:
self._get_cluster(hook)
except NotFound:
break
def _wait_for_cluster_in_creating_state(self, hook: DataprocHook) -> Cluster:
time_left = self.timeout
cluster = self._get_cluster(hook)
for time_to_sleep in exponential_sleep_generator(initial=10, maximum=120):
if cluster.status.state != cluster.status.State.CREATING:
break
if time_left < 0:
raise AirflowException(f"Cluster {self.cluster_name} is still CREATING state, aborting")
time.sleep(time_to_sleep)
time_left = time_left - time_to_sleep
cluster = self._get_cluster(hook)
return cluster
def execute(self, context) -> dict:
self.log.info('Creating cluster: %s', self.cluster_name)
hook = DataprocHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
# Save data required to display extra link no matter what the cluster status will be
self.xcom_push(
context,
key="cluster_conf",
value={
"cluster_name": self.cluster_name,
"region": self.region,
"project_id": self.project_id,
},
)
try:
# First try to create a new cluster
cluster = self._create_cluster(hook)
except AlreadyExists:
if not self.use_if_exists:
raise
self.log.info("Cluster already exists.")
cluster = self._get_cluster(hook)
# Check if cluster is not in ERROR state
self._handle_error_state(hook, cluster)
if cluster.status.state == cluster.status.State.CREATING:
# Wait for cluster to be created
cluster = self._wait_for_cluster_in_creating_state(hook)
self._handle_error_state(hook, cluster)
elif cluster.status.state == cluster.status.State.DELETING:
# Wait for cluster to be deleted
self._wait_for_cluster_in_deleting_state(hook)
# Create new cluster
cluster = self._create_cluster(hook)
self._handle_error_state(hook, cluster)
return Cluster.to_dict(cluster)
class DataprocScaleClusterOperator(BaseOperator):
"""
Scale, up or down, a cluster on Google Cloud Dataproc.
The operator will wait until the cluster is re-scaled.
**Example**: ::
t1 = DataprocClusterScaleOperator(
task_id='dataproc_scale',
project_id='my-project',
cluster_name='cluster-1',
num_workers=10,
num_preemptible_workers=10,
graceful_decommission_timeout='1h',
dag=dag)
.. seealso::
For more detail on about scaling clusters have a look at the reference:
https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/scaling-clusters
:param cluster_name: The name of the cluster to scale. (templated)
:type cluster_name: str
:param project_id: The ID of the google cloud project in which
the cluster runs. (templated)
:type project_id: str
:param region: The region for the dataproc cluster. (templated)
:type region: str
:param num_workers: The new number of workers
:type num_workers: int
:param num_preemptible_workers: The new number of preemptible workers
:type num_preemptible_workers: int
:param graceful_decommission_timeout: Timeout for graceful YARN decommissioning.
Maximum value is 1d
:type graceful_decommission_timeout: str
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:type gcp_conn_id: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = ['cluster_name', 'project_id', 'region', 'impersonation_chain']
operator_extra_links = (DataprocClusterLink(),)
def __init__(
self,
*,
cluster_name: str,
project_id: Optional[str] = None,
region: str = 'global',
num_workers: int = 2,
num_preemptible_workers: int = 0,
graceful_decommission_timeout: Optional[str] = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.region = region
self.cluster_name = cluster_name
self.num_workers = num_workers
self.num_preemptible_workers = num_preemptible_workers
self.graceful_decommission_timeout = graceful_decommission_timeout
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
# TODO: Remove one day
warnings.warn(
f"The `{type(self).__name__}` operator is deprecated, "
"please use `DataprocUpdateClusterOperator` instead.",
DeprecationWarning,
stacklevel=1,
)
def _build_scale_cluster_data(self) -> dict:
scale_data = {
'config': {
'worker_config': {'num_instances': self.num_workers},
'secondary_worker_config': {'num_instances': self.num_preemptible_workers},
}
}
return scale_data
@property
def _graceful_decommission_timeout_object(self) -> Optional[Dict[str, int]]:
if not self.graceful_decommission_timeout:
return None
timeout = None
match = re.match(r"^(\d+)([smdh])$", self.graceful_decommission_timeout)
if match:
if match.group(2) == "s":
timeout = int(match.group(1))
elif match.group(2) == "m":
val = float(match.group(1))
timeout = int(timedelta(minutes=val).total_seconds())
elif match.group(2) == "h":
val = float(match.group(1))
timeout = int(timedelta(hours=val).total_seconds())
elif match.group(2) == "d":
val = float(match.group(1))
timeout = int(timedelta(days=val).total_seconds())
if not timeout:
raise AirflowException(
"DataprocClusterScaleOperator "
" should be expressed in day, hours, minutes or seconds. "
" i.e. 1d, 4h, 10m, 30s"
)
return {'seconds': timeout}
def execute(self, context) -> None:
"""Scale, up or down, a cluster on Google Cloud Dataproc."""
self.log.info("Scaling cluster: %s", self.cluster_name)
scaling_cluster_data = self._build_scale_cluster_data()
update_mask = ["config.worker_config.num_instances", "config.secondary_worker_config.num_instances"]
hook = DataprocHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
# Save data required to display extra link no matter what the cluster status will be
self.xcom_push(
context,
key="cluster_conf",
value={
"cluster_name": self.cluster_name,
"region": self.region,
"project_id": self.project_id,
},
)
operation = hook.update_cluster(
project_id=self.project_id,
region=self.region,
cluster_name=self.cluster_name,
cluster=scaling_cluster_data,
graceful_decommission_timeout=self._graceful_decommission_timeout_object,
update_mask={'paths': update_mask},
)
operation.result()
self.log.info("Cluster scaling finished")
class DataprocDeleteClusterOperator(BaseOperator):
"""
Deletes a cluster in a project.
:param project_id: Required. The ID of the Google Cloud project that the cluster belongs to (templated).
:type project_id: str
:param region: Required. The Cloud Dataproc region in which to handle the request (templated).
:type region: str
:param cluster_name: Required. The cluster name (templated).
:type cluster_name: str
:param cluster_uuid: Optional. Specifying the ``cluster_uuid`` means the RPC should fail
if cluster with specified UUID does not exist.
:type cluster_uuid: str
:param request_id: Optional. A unique id used to identify the request. If the server receives two
``DeleteClusterRequest`` requests with the same id, then the second request will be ignored and the
first ``google.longrunning.Operation`` created and stored in the backend is returned.
:type request_id: str
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
retried.
:type retry: google.api_core.retry.Retry
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: Sequence[Tuple[str, str]]
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:type gcp_conn_id: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = ('project_id', 'region', 'cluster_name', 'impersonation_chain')
def __init__(
self,
*,
project_id: str,
region: str,
cluster_name: str,
cluster_uuid: Optional[str] = None,
request_id: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
):
super().__init__(**kwargs)
self.project_id = project_id
self.region = region
self.cluster_name = cluster_name
self.cluster_uuid = cluster_uuid
self.request_id = request_id
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: dict) -> None:
hook = DataprocHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
self.log.info("Deleting cluster: %s", self.cluster_name)
operation = hook.delete_cluster(
project_id=self.project_id,
region=self.region,
cluster_name=self.cluster_name,
cluster_uuid=self.cluster_uuid,
request_id=self.request_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
operation.result()
self.log.info("Cluster deleted.")
class DataprocJobBaseOperator(BaseOperator):
"""
The base class for operators that launch job on DataProc.
:param job_name: The job name used in the DataProc cluster. This name by default
is the task_id appended with the execution data, but can be templated. The
name will always be appended with a random number to avoid name clashes.
:type job_name: str
:param cluster_name: The name of the DataProc cluster.
:type cluster_name: str
:param project_id: The ID of the Google Cloud project the cluster belongs to,
if not specified the project will be inferred from the provided GCP connection.
:type project_id: str
:param dataproc_properties: Map for the Hive properties. Ideal to put in
default arguments (templated)
:type dataproc_properties: dict
:param dataproc_jars: HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop
MapReduce (MR) tasks. Can contain Hive SerDes and UDFs. (templated)
:type dataproc_jars: list
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:type gcp_conn_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param labels: The labels to associate with this job. Label keys must contain 1 to 63 characters,
and must conform to RFC 1035. Label values may be empty, but, if present, must contain 1 to 63
characters, and must conform to RFC 1035. No more than 32 labels can be associated with a job.
:type labels: dict
:param region: The specified region where the dataproc cluster is created.
:type region: str
:param job_error_states: Job states that should be considered error states.
Any states in this set will result in an error being raised and failure of the
task. Eg, if the ``CANCELLED`` state should also be considered a task failure,
pass in ``{'ERROR', 'CANCELLED'}``. Possible values are currently only
``'ERROR'`` and ``'CANCELLED'``, but could change in the future. Defaults to
``{'ERROR'}``.
:type job_error_states: set
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
:param asynchronous: Flag to return after submitting the job to the Dataproc API.
This is useful for submitting long running jobs and
waiting on them asynchronously using the DataprocJobSensor
:type asynchronous: bool
:var dataproc_job_id: The actual "jobId" as submitted to the Dataproc API.
This is useful for identifying or linking to the job in the Google Cloud Console
Dataproc UI, as the actual "jobId" submitted to the Dataproc API is appended with
an 8 character random string.
:vartype dataproc_job_id: str
"""
job_type = ""
operator_extra_links = (DataprocJobLink(),)
def __init__(
self,
*,
job_name: str = '{{task.task_id}}_{{ds_nodash}}',
cluster_name: str = "cluster-1",
project_id: Optional[str] = None,
dataproc_properties: Optional[Dict] = None,
dataproc_jars: Optional[List[str]] = None,
gcp_conn_id: str = 'google_cloud_default',
delegate_to: Optional[str] = None,
labels: Optional[Dict] = None,
region: Optional[str] = None,
job_error_states: Optional[Set[str]] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
asynchronous: bool = False,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.labels = labels
self.job_name = job_name
self.cluster_name = cluster_name
self.dataproc_properties = dataproc_properties
self.dataproc_jars = dataproc_jars
if region is None:
warnings.warn(
"Default region value `global` will be deprecated. Please, provide region value.",
DeprecationWarning,
stacklevel=2,
)
region = 'global'
self.region = region
self.job_error_states = job_error_states if job_error_states is not None else {'ERROR'}
self.impersonation_chain = impersonation_chain
self.hook = DataprocHook(gcp_conn_id=gcp_conn_id, impersonation_chain=impersonation_chain)
self.project_id = self.hook.project_id if project_id is None else project_id
self.job_template = None
self.job = None
self.dataproc_job_id = None
self.asynchronous = asynchronous
def create_job_template(self):
"""Initialize `self.job_template` with default values"""
self.job_template = DataProcJobBuilder(
project_id=self.project_id,
task_id=self.task_id,
cluster_name=self.cluster_name,
job_type=self.job_type,
properties=self.dataproc_properties,
)
self.job_template.set_job_name(self.job_name)
self.job_template.add_jar_file_uris(self.dataproc_jars)
self.job_template.add_labels(self.labels)
def _generate_job_template(self) -> str:
if self.job_template:
job = self.job_template.build()
return job['job']
raise Exception("Create a job template before")
def execute(self, context):
if self.job_template:
self.job = self.job_template.build()
self.dataproc_job_id = self.job["job"]["reference"]["job_id"]
self.log.info('Submitting %s job %s', self.job_type, self.dataproc_job_id)
job_object = self.hook.submit_job(
project_id=self.project_id, job=self.job["job"], region=self.region
)
job_id = job_object.reference.job_id
self.log.info('Job %s submitted successfully.', job_id)
# Save data required for extra links no matter what the job status will be
self.xcom_push(
context,
key='job_conf',
value={'job_id': job_id, 'region': self.region, 'project_id': self.project_id},
)
if not self.asynchronous:
self.log.info('Waiting for job %s to complete', job_id)
self.hook.wait_for_job(job_id=job_id, region=self.region, project_id=self.project_id)
self.log.info('Job %s completed successfully.', job_id)
return job_id
else:
raise AirflowException("Create a job template before")
def on_kill(self) -> None:
"""
Callback called when the operator is killed.
Cancel any running job.
"""
if self.dataproc_job_id:
self.hook.cancel_job(project_id=self.project_id, job_id=self.dataproc_job_id, region=self.region)
class DataprocSubmitPigJobOperator(DataprocJobBaseOperator):
"""
Start a Pig query Job on a Cloud DataProc cluster. The parameters of the operation
will be passed to the cluster.
It's a good practice to define dataproc_* parameters in the default_args of the dag
like the cluster name and UDFs.
.. code-block:: python
default_args = {
"cluster_name": "cluster-1",
"dataproc_pig_jars": [
"gs://example/udf/jar/datafu/1.2.0/datafu.jar",
"gs://example/udf/jar/gpig/1.2/gpig.jar",
],
}
You can pass a pig script as string or file reference. Use variables to pass on
variables for the pig script to be resolved on the cluster or use the parameters to
be resolved in the script as template parameters.
**Example**: ::
t1 = DataProcPigOperator(
task_id='dataproc_pig',
query='a_pig_script.pig',
variables={'out': 'gs://example/output/{{ds}}'},
dag=dag)
.. seealso::
For more detail on about job submission have a look at the reference:
https://cloud.google.com/dataproc/reference/rest/v1/projects.regions.jobs
:param query: The query or reference to the query
file (pg or pig extension). (templated)
:type query: str
:param query_uri: The HCFS URI of the script that contains the Pig queries.
:type query_uri: str
:param variables: Map of named parameters for the query. (templated)
:type variables: dict
"""
template_fields = [
'query',
'variables',
'job_name',
'cluster_name',
'region',
'dataproc_jars',
'dataproc_properties',
'impersonation_chain',
]
template_ext = ('.pg', '.pig')
ui_color = '#0273d4'
job_type = 'pig_job'
operator_extra_links = (DataprocJobLink(),)
def __init__(
self,
*,
query: Optional[str] = None,
query_uri: Optional[str] = None,
variables: Optional[Dict] = None,
**kwargs,
) -> None:
# TODO: Remove one day
warnings.warn(
"The `{cls}` operator is deprecated, please use `DataprocSubmitJobOperator` instead. You can use"
" `generate_job` method of `{cls}` to generate dictionary representing your job"
" and use it with the new operator.".format(cls=type(self).__name__),
DeprecationWarning,
stacklevel=1,
)
super().__init__(**kwargs)
self.query = query
self.query_uri = query_uri
self.variables = variables
def generate_job(self):
"""
Helper method for easier migration to `DataprocSubmitJobOperator`.
:return: Dict representing Dataproc job
"""
self.create_job_template()
if self.query is None:
self.job_template.add_query_uri(self.query_uri)
else:
self.job_template.add_query(self.query)
self.job_template.add_variables(self.variables)
return self._generate_job_template()
def execute(self, context):
self.create_job_template()
if self.query is None:
self.job_template.add_query_uri(self.query_uri)
else:
self.job_template.add_query(self.query)
self.job_template.add_variables(self.variables)
super().execute(context)
class DataprocSubmitHiveJobOperator(DataprocJobBaseOperator):
"""
Start a Hive query Job on a Cloud DataProc cluster.
:param query: The query or reference to the query file (q extension).
:type query: str
:param query_uri: The HCFS URI of the script that contains the Hive queries.
:type query_uri: str
:param variables: Map of named parameters for the query.
:type variables: dict
"""
template_fields = [
'query',
'variables',
'job_name',
'cluster_name',
'region',
'dataproc_jars',
'dataproc_properties',
'impersonation_chain',
]
template_ext = ('.q', '.hql')
ui_color = '#0273d4'
job_type = 'hive_job'
def __init__(
self,
*,
query: Optional[str] = None,
query_uri: Optional[str] = None,
variables: Optional[Dict] = None,
**kwargs,
) -> None:
# TODO: Remove one day
warnings.warn(
"The `{cls}` operator is deprecated, please use `DataprocSubmitJobOperator` instead. You can use"
" `generate_job` method of `{cls}` to generate dictionary representing your job"
" and use it with the new operator.".format(cls=type(self).__name__),
DeprecationWarning,
stacklevel=1,
)
super().__init__(**kwargs)
self.query = query
self.query_uri = query_uri
self.variables = variables
if self.query is not None and self.query_uri is not None:
raise AirflowException('Only one of `query` and `query_uri` can be passed.')
def generate_job(self):
"""
Helper method for easier migration to `DataprocSubmitJobOperator`.
:return: Dict representing Dataproc job
"""
self.create_job_template()
if self.query is None:
self.job_template.add_query_uri(self.query_uri)
else:
self.job_template.add_query(self.query)
self.job_template.add_variables(self.variables)
return self._generate_job_template()
def execute(self, context):
self.create_job_template()
if self.query is None:
self.job_template.add_query_uri(self.query_uri)
else:
self.job_template.add_query(self.query)
self.job_template.add_variables(self.variables)
super().execute(context)
class DataprocSubmitSparkSqlJobOperator(DataprocJobBaseOperator):
"""
Start a Spark SQL query Job on a Cloud DataProc cluster.
:param query: The query or reference to the query file (q extension). (templated)
:type query: str
:param query_uri: The HCFS URI of the script that contains the SQL queries.
:type query_uri: str
:param variables: Map of named parameters for the query. (templated)
:type variables: dict
"""
template_fields = [
'query',
'variables',
'job_name',
'cluster_name',
'region',
'dataproc_jars',
'dataproc_properties',
'impersonation_chain',
]
template_ext = ('.q',)
ui_color = '#0273d4'
job_type = 'spark_sql_job'
def __init__(
self,
*,
query: Optional[str] = None,
query_uri: Optional[str] = None,
variables: Optional[Dict] = None,
**kwargs,
) -> None:
# TODO: Remove one day
warnings.warn(
"The `{cls}` operator is deprecated, please use `DataprocSubmitJobOperator` instead. You can use"
" `generate_job` method of `{cls}` to generate dictionary representing your job"
" and use it with the new operator.".format(cls=type(self).__name__),
DeprecationWarning,
stacklevel=1,
)
super().__init__(**kwargs)
self.query = query
self.query_uri = query_uri
self.variables = variables
if self.query is not None and self.query_uri is not None:
raise AirflowException('Only one of `query` and `query_uri` can be passed.')
def generate_job(self):
"""
Helper method for easier migration to `DataprocSubmitJobOperator`.
:return: Dict representing Dataproc job
"""
self.create_job_template()
if self.query is None:
self.job_template.add_query_uri(self.query_uri)
else:
self.job_template.add_query(self.query)
self.job_template.add_variables(self.variables)
return self._generate_job_template()
def execute(self, context):
self.create_job_template()
if self.query is None:
self.job_template.add_query_uri(self.query_uri)
else:
self.job_template.add_query(self.query)
self.job_template.add_variables(self.variables)
super().execute(context)
class DataprocSubmitSparkJobOperator(DataprocJobBaseOperator):
"""
Start a Spark Job on a Cloud DataProc cluster.
:param main_jar: The HCFS URI of the jar file that contains the main class
(use this or the main_class, not both together).
:type main_jar: str
:param main_class: Name of the job class. (use this or the main_jar, not both
together).
:type main_class: str
:param arguments: Arguments for the job. (templated)
:type arguments: list
:param archives: List of archived files that will be unpacked in the work
directory. Should be stored in Cloud Storage.
:type archives: list
:param files: List of files to be copied to the working directory
:type files: list
"""
template_fields = [
'arguments',
'job_name',
'cluster_name',
'region',
'dataproc_jars',
'dataproc_properties',
'impersonation_chain',
]
ui_color = '#0273d4'
job_type = 'spark_job'
def __init__(
self,
*,
main_jar: Optional[str] = None,
main_class: Optional[str] = None,
arguments: Optional[List] = None,
archives: Optional[List] = None,
files: Optional[List] = None,
**kwargs,
) -> None:
# TODO: Remove one day
warnings.warn(
"The `{cls}` operator is deprecated, please use `DataprocSubmitJobOperator` instead. You can use"
" `generate_job` method of `{cls}` to generate dictionary representing your job"
" and use it with the new operator.".format(cls=type(self).__name__),
DeprecationWarning,
stacklevel=1,
)
super().__init__(**kwargs)
self.main_jar = main_jar
self.main_class = main_class
self.arguments = arguments
self.archives = archives
self.files = files
def generate_job(self):
"""
Helper method for easier migration to `DataprocSubmitJobOperator`.
:return: Dict representing Dataproc job
"""
self.create_job_template()
self.job_template.set_main(self.main_jar, self.main_class)
self.job_template.add_args(self.arguments)
self.job_template.add_archive_uris(self.archives)
self.job_template.add_file_uris(self.files)
return self._generate_job_template()
def execute(self, context):
self.create_job_template()
self.job_template.set_main(self.main_jar, self.main_class)
self.job_template.add_args(self.arguments)
self.job_template.add_archive_uris(self.archives)
self.job_template.add_file_uris(self.files)
super().execute(context)
class DataprocSubmitHadoopJobOperator(DataprocJobBaseOperator):
"""
Start a Hadoop Job on a Cloud DataProc cluster.
:param main_jar: The HCFS URI of the jar file containing the main class
(use this or the main_class, not both together).
:type main_jar: str
:param main_class: Name of the job class. (use this or the main_jar, not both
together).
:type main_class: str
:param arguments: Arguments for the job. (templated)
:type arguments: list
:param archives: List of archived files that will be unpacked in the work
directory. Should be stored in Cloud Storage.
:type archives: list
:param files: List of files to be copied to the working directory
:type files: list
"""
template_fields = [
'arguments',
'job_name',
'cluster_name',
'region',
'dataproc_jars',
'dataproc_properties',
'impersonation_chain',
]
ui_color = '#0273d4'
job_type = 'hadoop_job'
def __init__(
self,
*,
main_jar: Optional[str] = None,
main_class: Optional[str] = None,
arguments: Optional[List] = None,
archives: Optional[List] = None,
files: Optional[List] = None,
**kwargs,
) -> None:
# TODO: Remove one day
warnings.warn(
"The `{cls}` operator is deprecated, please use `DataprocSubmitJobOperator` instead. You can use"
" `generate_job` method of `{cls}` to generate dictionary representing your job"
" and use it with the new operator.".format(cls=type(self).__name__),
DeprecationWarning,
stacklevel=1,
)
super().__init__(**kwargs)
self.main_jar = main_jar
self.main_class = main_class
self.arguments = arguments
self.archives = archives
self.files = files
def generate_job(self):
"""
Helper method for easier migration to `DataprocSubmitJobOperator`.
:return: Dict representing Dataproc job
"""
self.create_job_template()
self.job_template.set_main(self.main_jar, self.main_class)
self.job_template.add_args(self.arguments)
self.job_template.add_archive_uris(self.archives)
self.job_template.add_file_uris(self.files)
return self._generate_job_template()
def execute(self, context):
self.create_job_template()
self.job_template.set_main(self.main_jar, self.main_class)
self.job_template.add_args(self.arguments)
self.job_template.add_archive_uris(self.archives)
self.job_template.add_file_uris(self.files)
super().execute(context)
class DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
"""
Start a PySpark Job on a Cloud DataProc cluster.
:param main: [Required] The Hadoop Compatible Filesystem (HCFS) URI of the main
Python file to use as the driver. Must be a .py file. (templated)
:type main: str
:param arguments: Arguments for the job. (templated)
:type arguments: list
:param archives: List of archived files that will be unpacked in the work
directory. Should be stored in Cloud Storage.
:type archives: list
:param files: List of files to be copied to the working directory
:type files: list
:param pyfiles: List of Python files to pass to the PySpark framework.
Supported file types: .py, .egg, and .zip
:type pyfiles: list
"""
template_fields = [
'main',
'arguments',
'job_name',
'cluster_name',
'region',
'dataproc_jars',
'dataproc_properties',
'impersonation_chain',
]
ui_color = '#0273d4'
job_type = 'pyspark_job'
@staticmethod
def _generate_temp_filename(filename):
date = time.strftime('%Y%m%d%H%M%S')
return f"{date}_{str(uuid.uuid4())[:8]}_{ntpath.basename(filename)}"
def _upload_file_temp(self, bucket, local_file):
"""Upload a local file to a Google Cloud Storage bucket."""
temp_filename = self._generate_temp_filename(local_file)
if not bucket:
raise AirflowException(
"If you want Airflow to upload the local file to a temporary bucket, set "
"the 'temp_bucket' key in the connection string"
)
self.log.info("Uploading %s to %s", local_file, temp_filename)
GCSHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain).upload(
bucket_name=bucket,
object_name=temp_filename,
mime_type='application/x-python',
filename=local_file,
)
return f"gs://{bucket}/{temp_filename}"
def __init__(
self,
*,
main: str,
arguments: Optional[List] = None,
archives: Optional[List] = None,
pyfiles: Optional[List] = None,
files: Optional[List] = None,
**kwargs,
) -> None:
# TODO: Remove one day
warnings.warn(
"The `{cls}` operator is deprecated, please use `DataprocSubmitJobOperator` instead. You can use"
" `generate_job` method of `{cls}` to generate dictionary representing your job"
" and use it with the new operator.".format(cls=type(self).__name__),
DeprecationWarning,
stacklevel=1,
)
super().__init__(**kwargs)
self.main = main
self.arguments = arguments
self.archives = archives
self.files = files
self.pyfiles = pyfiles
def generate_job(self):
"""
Helper method for easier migration to `DataprocSubmitJobOperator`.
:return: Dict representing Dataproc job
"""
self.create_job_template()
# Check if the file is local, if that is the case, upload it to a bucket
if os.path.isfile(self.main):
cluster_info = self.hook.get_cluster(
project_id=self.project_id, region=self.region, cluster_name=self.cluster_name
)
bucket = cluster_info['config']['config_bucket']
self.main = f"gs://{bucket}/{self.main}"
self.job_template.set_python_main(self.main)
self.job_template.add_args(self.arguments)
self.job_template.add_archive_uris(self.archives)
self.job_template.add_file_uris(self.files)
self.job_template.add_python_file_uris(self.pyfiles)
return self._generate_job_template()
def execute(self, context):
self.create_job_template()
# Check if the file is local, if that is the case, upload it to a bucket
if os.path.isfile(self.main):
cluster_info = self.hook.get_cluster(
project_id=self.project_id, region=self.region, cluster_name=self.cluster_name
)
bucket = cluster_info['config']['config_bucket']
self.main = self._upload_file_temp(bucket, self.main)
self.job_template.set_python_main(self.main)
self.job_template.add_args(self.arguments)
self.job_template.add_archive_uris(self.archives)
self.job_template.add_file_uris(self.files)
self.job_template.add_python_file_uris(self.pyfiles)
super().execute(context)
class DataprocCreateWorkflowTemplateOperator(BaseOperator):
"""
Creates new workflow template.
:param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
:type project_id: str
:param region: Required. The Cloud Dataproc region in which to handle the request.
:type region: str
:param location: (To be deprecated). The Cloud Dataproc region in which to handle the request.
:type location: str
:param template: The Dataproc workflow template to create. If a dict is provided,
it must be of the same form as the protobuf message WorkflowTemplate.
:type template: Union[dict, WorkflowTemplate]
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
retried.
:type retry: google.api_core.retry.Retry
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: Sequence[Tuple[str, str]]
"""
template_fields = ("region", "template")
template_fields_renderers = {"template": "json"}
def __init__(
self,
*,
template: Dict,
project_id: str,
region: str = None,
location: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
):
if region is None:
if location is not None:
warnings.warn(
"Parameter `location` will be deprecated. "
"Please provide value through `region` parameter instead.",
DeprecationWarning,
stacklevel=2,
)
region = location
else:
raise TypeError("missing 1 required keyword argument: 'region'")
super().__init__(**kwargs)
self.region = region
self.template = template
self.project_id = project_id
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context):
hook = DataprocHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
self.log.info("Creating template")
try:
workflow = hook.create_workflow_template(
region=self.region,
template=self.template,
project_id=self.project_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
self.log.info("Workflow %s created", workflow.name)
except AlreadyExists:
self.log.info("Workflow with given id already exists")
class DataprocInstantiateWorkflowTemplateOperator(BaseOperator):
"""
Instantiate a WorkflowTemplate on Google Cloud Dataproc. The operator will wait
until the WorkflowTemplate is finished executing.
.. seealso::
Please refer to:
https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.workflowTemplates/instantiate
:param template_id: The id of the template. (templated)
:type template_id: str
:param project_id: The ID of the google cloud project in which
the template runs
:type project_id: str
:param region: The specified region where the dataproc cluster is created.
:type region: str
:param parameters: a map of parameters for Dataproc Template in key-value format:
map (key: string, value: string)
Example: { "date_from": "2019-08-01", "date_to": "2019-08-02"}.
Values may not exceed 100 characters. Please refer to:
https://cloud.google.com/dataproc/docs/concepts/workflows/workflow-parameters
:type parameters: Dict[str, str]
:param request_id: Optional. A unique id used to identify the request. If the server receives two
``SubmitJobRequest`` requests with the same id, then the second request will be ignored and the first
``Job`` created and stored in the backend is returned.
It is recommended to always set this value to a UUID.
:type request_id: str
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
retried.
:type retry: google.api_core.retry.Retry
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: Sequence[Tuple[str, str]]
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:type gcp_conn_id: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = ['template_id', 'impersonation_chain', 'request_id', 'parameters']
template_fields_renderers = {"parameters": "json"}
def __init__(
self,
*,
template_id: str,
region: str,
project_id: Optional[str] = None,
version: Optional[int] = None,
request_id: Optional[str] = None,
parameters: Optional[Dict[str, str]] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.template_id = template_id
self.parameters = parameters
self.version = version
self.project_id = project_id
self.region = region
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.request_id = request_id
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context):
hook = DataprocHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
self.log.info('Instantiating template %s', self.template_id)
operation = hook.instantiate_workflow_template(
project_id=self.project_id,
region=self.region,
template_name=self.template_id,
version=self.version,
request_id=self.request_id,
parameters=self.parameters,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
operation.result()
self.log.info('Template instantiated.')
class DataprocInstantiateInlineWorkflowTemplateOperator(BaseOperator):
"""
Instantiate a WorkflowTemplate Inline on Google Cloud Dataproc. The operator will
wait until the WorkflowTemplate is finished executing.
.. seealso::
Please refer to:
https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.workflowTemplates/instantiateInline
:param template: The template contents. (templated)
:type template: dict
:param project_id: The ID of the google cloud project in which
the template runs
:type project_id: str
:param region: The specified region where the dataproc cluster is created.
:type region: str
:param parameters: a map of parameters for Dataproc Template in key-value format:
map (key: string, value: string)
Example: { "date_from": "2019-08-01", "date_to": "2019-08-02"}.
Values may not exceed 100 characters. Please refer to:
https://cloud.google.com/dataproc/docs/concepts/workflows/workflow-parameters
:type parameters: Dict[str, str]
:param request_id: Optional. A unique id used to identify the request. If the server receives two
``SubmitJobRequest`` requests with the same id, then the second request will be ignored and the first
``Job`` created and stored in the backend is returned.
It is recommended to always set this value to a UUID.
:type request_id: str
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
retried.
:type retry: google.api_core.retry.Retry
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: Sequence[Tuple[str, str]]
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:type gcp_conn_id: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = ['template', 'impersonation_chain']
template_fields_renderers = {"template": "json"}
def __init__(
self,
*,
template: Dict,
region: str,
project_id: Optional[str] = None,
request_id: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.template = template
self.project_id = project_id
self.region = region
self.template = template
self.request_id = request_id
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context):
self.log.info('Instantiating Inline Template')
hook = DataprocHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
operation = hook.instantiate_inline_workflow_template(
template=self.template,
project_id=self.project_id,
region=self.region,
request_id=self.request_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
operation.result()
self.log.info('Template instantiated.')
class DataprocSubmitJobOperator(BaseOperator):
"""
Submits a job to a cluster.
:param project_id: Required. The ID of the Google Cloud project that the job belongs to.
:type project_id: str
:param region: Required. The Cloud Dataproc region in which to handle the request.
:type region: str
:param location: (To be deprecated). The Cloud Dataproc region in which to handle the request.
:type location: str
:param job: Required. The job resource.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.dataproc_v1.types.Job`
:type job: Dict
:param request_id: Optional. A unique id used to identify the request. If the server receives two
``SubmitJobRequest`` requests with the same id, then the second request will be ignored and the first
``Job`` created and stored in the backend is returned.
It is recommended to always set this value to a UUID.
:type request_id: str
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
retried.
:type retry: google.api_core.retry.Retry
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: Sequence[Tuple[str, str]]
:param gcp_conn_id:
:type gcp_conn_id: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
:param asynchronous: Flag to return after submitting the job to the Dataproc API.
This is useful for submitting long running jobs and
waiting on them asynchronously using the DataprocJobSensor
:type asynchronous: bool
:param cancel_on_kill: Flag which indicates whether cancel the hook's job or not, when on_kill is called
:type cancel_on_kill: bool
:param wait_timeout: How many seconds wait for job to be ready. Used only if ``asynchronous`` is False
:type wait_timeout: int
"""
template_fields = ('project_id', 'region', 'job', 'impersonation_chain', 'request_id')
template_fields_renderers = {"job": "json"}
operator_extra_links = (DataprocJobLink(),)
def __init__(
self,
*,
project_id: str,
job: Dict,
region: str = None,
location: Optional[str] = None,
request_id: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
asynchronous: bool = False,
cancel_on_kill: bool = True,
wait_timeout: Optional[int] = None,
**kwargs,
) -> None:
if region is None:
if location is not None:
warnings.warn(
"Parameter `location` will be deprecated. "
"Please provide value through `region` parameter instead.",
DeprecationWarning,
stacklevel=2,
)
region = location
else:
raise TypeError("missing 1 required keyword argument: 'region'")
super().__init__(**kwargs)
self.project_id = project_id
self.region = region
self.job = job
self.request_id = request_id
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.asynchronous = asynchronous
self.cancel_on_kill = cancel_on_kill
self.hook: Optional[DataprocHook] = None
self.job_id: Optional[str] = None
self.wait_timeout = wait_timeout
def execute(self, context: Dict):
self.log.info("Submitting job")
self.hook = DataprocHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
job_object = self.hook.submit_job(
project_id=self.project_id,
region=self.region,
job=self.job,
request_id=self.request_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
job_id = job_object.reference.job_id
self.log.info('Job %s submitted successfully.', job_id)
# Save data required by extra links no matter what the job status will be
self.xcom_push(
context,
key="job_conf",
value={
"job_id": job_id,
"region": self.region,
"project_id": self.project_id,
},
)
if not self.asynchronous:
self.log.info('Waiting for job %s to complete', job_id)
self.hook.wait_for_job(
job_id=job_id, region=self.region, project_id=self.project_id, timeout=self.wait_timeout
)
self.log.info('Job %s completed successfully.', job_id)
self.job_id = job_id
return self.job_id
def on_kill(self):
if self.job_id and self.cancel_on_kill:
self.hook.cancel_job(job_id=self.job_id, project_id=self.project_id, region=self.region)
class DataprocUpdateClusterOperator(BaseOperator):
"""
Updates a cluster in a project.
:param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
:type project_id: str
:param region: Required. The Cloud Dataproc region in which to handle the request.
:type region: str
:param location: (To be deprecated). The Cloud Dataproc region in which to handle the request.
:type location: str
:param cluster_name: Required. The cluster name.
:type cluster_name: str
:param cluster: Required. The changes to the cluster.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.dataproc_v1.types.Cluster`
:type cluster: Union[Dict, google.cloud.dataproc_v1.types.Cluster]
:param update_mask: Required. Specifies the path, relative to ``Cluster``, of the field to update. For
example, to change the number of workers in a cluster to 5, the ``update_mask`` parameter would be
specified as ``config.worker_config.num_instances``, and the ``PATCH`` request body would specify the
new value. If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.protobuf.field_mask_pb2.FieldMask`
:type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
:param graceful_decommission_timeout: Optional. Timeout for graceful YARN decommissioning. Graceful
decommissioning allows removing nodes from the cluster without interrupting jobs in progress. Timeout
specifies how long to wait for jobs in progress to finish before forcefully removing nodes (and
potentially interrupting jobs). Default timeout is 0 (for forceful decommission), and the maximum
allowed timeout is 1 day.
:type graceful_decommission_timeout: Union[Dict, google.protobuf.duration_pb2.Duration]
:param request_id: Optional. A unique id used to identify the request. If the server receives two
``UpdateClusterRequest`` requests with the same id, then the second request will be ignored and the
first ``google.longrunning.Operation`` created and stored in the backend is returned.
:type request_id: str
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
retried.
:type retry: google.api_core.retry.Retry
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: Sequence[Tuple[str, str]]
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:type gcp_conn_id: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = ('impersonation_chain', 'cluster_name')
operator_extra_links = (DataprocClusterLink(),)
def __init__(
self,
*,
cluster_name: str,
cluster: Union[Dict, Cluster],
update_mask: Union[Dict, FieldMask],
graceful_decommission_timeout: Union[Dict, Duration],
region: str = None,
location: Optional[str] = None,
request_id: Optional[str] = None,
project_id: Optional[str] = None,
retry: Retry = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
):
if region is None:
if location is not None:
warnings.warn(
"Parameter `location` will be deprecated. "
"Please provide value through `region` parameter instead.",
DeprecationWarning,
stacklevel=2,
)
region = location
else:
raise TypeError("missing 1 required keyword argument: 'region'")
super().__init__(**kwargs)
self.project_id = project_id
self.region = region
self.cluster_name = cluster_name
self.cluster = cluster
self.update_mask = update_mask
self.graceful_decommission_timeout = graceful_decommission_timeout
self.request_id = request_id
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Dict):
hook = DataprocHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
# Save data required by extra links no matter what the cluster status will be
self.xcom_push(
context,
key="cluster_conf",
value={
"cluster_name": self.cluster_name,
"region": self.region,
"project_id": self.project_id,
},
)
self.log.info("Updating %s cluster.", self.cluster_name)
operation = hook.update_cluster(
project_id=self.project_id,
region=self.region,
cluster_name=self.cluster_name,
cluster=self.cluster,
update_mask=self.update_mask,
graceful_decommission_timeout=self.graceful_decommission_timeout,
request_id=self.request_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
operation.result()
self.log.info("Updated %s cluster.", self.cluster_name)
|
apache/incubator-airflow
|
airflow/providers/google/cloud/operators/dataproc.py
|
Python
|
apache-2.0
| 90,576
|
"""Collection of fixtures and functions for the HomeKit tests."""
from unittest.mock import patch
def patch_debounce():
"""Return patch for debounce method."""
return patch(
"homeassistant.components.homekit.accessories.debounce",
lambda f: lambda *args, **kwargs: f(*args, **kwargs),
)
|
fbradyirl/home-assistant
|
tests/components/homekit/common.py
|
Python
|
apache-2.0
| 317
|
# -*- coding: utf-8 -*-
#
# Peony documentation build configuration file, created by
# sphinx-quickstart on Tue Aug 30 16:36:34 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
import inspect
import os
import pathlib
import re
import sys
conf_py = pathlib.Path(inspect.getfile(inspect.currentframe())).absolute()
docs = conf_py.parent
maindir = docs.parent
sys.path.insert(0, str(maindir))
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon'
]
rtd = "https://%s.readthedocs.io/en/stable"
python_docs = "https://docs.python.org/3"
intersphinx_mapping = {'python': (python_docs, None),
'aiohttp': (rtd % "aiohttp", None)}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Peony'
copyright = '2016-2017, Florian Badie'
author = 'odrling'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
init = maindir / "peony" / "__init__.py"
with init.open() as stream:
ex = r'__version__\s*=\s*?[\"\']([^\"\']*)'
match = re.search(ex, stream.read())
version = match.group(1)
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# on_rtd is whether we are on readthedocs.org, this line of code
# grabbed from docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need
# to specify it
# html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'Peony v0.2.2'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a
# favicon of the docs. This file should be a Windows icon file (.ico)
# being 16x16 or 32x32 pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Peonydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Peony.tex', 'Peony Documentation',
'odrling', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'peony', 'Peony Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Peony', 'Peony Documentation',
author, 'Peony', 'An asynchronous Twitter API client.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
|
odrling/peony-twitter
|
docs/conf.py
|
Python
|
mit
| 10,839
|
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from perfkitbenchmarker import sample
class SampleTestCase(unittest.TestCase):
def testMetadataOptional(self):
instance = sample.Sample(metric='Test', value=1.0, unit='Mbps')
self.assertDictEqual({}, instance.metadata)
def testProvidedMetadataSet(self):
metadata = {'origin': 'unit test'}
instance = sample.Sample(metric='Test', value=1.0, unit='Mbps',
metadata=metadata.copy())
self.assertDictEqual(metadata, instance.metadata)
|
tvansteenburgh/PerfKitBenchmarker
|
tests/sample_test.py
|
Python
|
apache-2.0
| 1,098
|
#!/usr/bin/env python
# Copyright 2015 Byhiras (Europe) Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import shutil
import tempfile
import re
import pytest
import pyavroc
import _testhelper
def test_write_union_obj():
schema = '''[{"name": "Rec1", "type": "record",
"fields": [ {"name": "attr1", "type": "int"} ] },
{"name": "Rec2", "type": "record",
"fields": [ {"name": "attr2", "type": "string"} ]}
]'''
dirname = tempfile.mkdtemp()
filename = os.path.join(dirname, 'test.avro')
avtypes = pyavroc.create_types(schema)
assert avtypes.Rec1._fieldtypes == {'attr1': int}
assert avtypes.Rec2._fieldtypes == {'attr2': str}
recs = [avtypes.Rec1(attr1=123), avtypes.Rec2(attr2='hello')]
with open(filename, 'w') as fp:
writer = pyavroc.AvroFileWriter(fp, schema)
for rec in recs:
writer.write(rec)
writer.close()
orig_rec1 = avtypes.Rec1
orig_rec2 = avtypes.Rec2
# read using existing types
with open(filename) as fp:
reader = pyavroc.AvroFileReader(fp, types=avtypes)
read_recs = list(reader)
assert reader.types.Rec1 is orig_rec1
assert reader.types.Rec2 is orig_rec2
assert read_recs == recs
# read and create new types
with open(filename) as fp:
reader = pyavroc.AvroFileReader(fp, types=True)
read_recs = list(reader)
assert reader.types.Rec1 is not orig_rec1
assert reader.types.Rec2 is not orig_rec2
assert read_recs != recs
assert _testhelper.objs_to_dicts(read_recs) == _testhelper.objs_to_dicts(recs)
shutil.rmtree(dirname)
def test_write_wrong_value():
schema = '''[{"name": "Rec1", "type": "record",
"fields": [ {"name": "attr1", "type": "int"} ] },
{"name": "Rec2", "type": "record",
"fields": [ {"name": "attr2", "type": "string"} ]}
]'''
dirname = tempfile.mkdtemp()
filename = os.path.join(dirname, 'test.avro')
avtypes = pyavroc.create_types(schema)
with pytest.raises(TypeError) as excinfo:
with open(filename, 'w') as fp:
writer = pyavroc.AvroFileWriter(fp, schema)
writer.write(avtypes.Rec1(attr1='x' * 120))
writer.close()
expected_error = "when writing to Rec1.attr1, invalid python object '" \
+ ('x' * 99) + ", an integer is required"
expected_error2 = "when writing to Rec1.attr1, invalid python object '" \
+ ('x' * 120) + "', an integer is required"
assert expected_error in str(excinfo.value) \
or expected_error2 in str(excinfo.value)
with pytest.raises(TypeError) as excinfo:
with open(filename, 'w') as fp:
writer = pyavroc.AvroFileWriter(fp, schema)
writer.write(avtypes.Rec2(attr2=123))
writer.close()
expected_error = "when writing to Rec2.attr2, invalid python object 123," \
" expected.*Unicode.*, int found"
assert re.search(expected_error, str(excinfo.value))
shutil.rmtree(dirname)
def test_write_closed():
schema = '''[{"name": "Rec1", "type": "record",
"fields": [ {"name": "attr1", "type": "int"} ] },
{"name": "Rec2", "type": "record",
"fields": [ {"name": "attr2", "type": "string"} ]}
]'''
dirname = tempfile.mkdtemp()
filename = os.path.join(dirname, 'test.avro')
avtypes = pyavroc.create_types(schema)
fp = open(filename, 'w')
writer = pyavroc.AvroFileWriter(fp, schema)
writer.write(avtypes.Rec1(attr1=123))
writer.close()
fp.close()
with pytest.raises(IOError):
writer.write(avtypes.Rec1(attr1=456))
shutil.rmtree(dirname)
def test_write_read_empty():
schema = '''[{"name": "Rec1", "type": "record",
"fields": [ {"name": "attr1", "type": "int"} ] },
{"name": "Rec2", "type": "record",
"fields": [ {"name": "attr2", "type": "string"} ]}
]'''
dirname = tempfile.mkdtemp()
filename = os.path.join(dirname, 'test.avro')
avtypes = pyavroc.create_types(schema)
with open(filename, 'w') as fp:
writer = pyavroc.AvroFileWriter(fp, schema)
writer.close()
# read using existing types
with open(filename) as fp:
reader = pyavroc.AvroFileReader(fp, types=avtypes)
read_recs = list(reader)
assert len(read_recs) == 0
shutil.rmtree(dirname)
def test_write_union_of_dicts():
schema = '''[{"name": "Rec1", "type": "record",
"fields": [ {"name": "attr1", "type": "int"} ] },
{"name": "Rec2", "type": "record",
"fields": [ {"name": "attr2", "type": "string"} ]}
]'''
dirname = tempfile.mkdtemp()
filename = os.path.join(dirname, 'test.avro')
recs = [{'attr1': 123}, {'attr2': 'hello'}]
with open(filename, 'w') as fp:
writer = pyavroc.AvroFileWriter(fp, schema)
for rec in recs:
writer.write(rec)
writer.close()
with open(filename) as fp:
reader = pyavroc.AvroFileReader(fp, types=False)
read_recs = list(reader)
assert read_recs == recs
shutil.rmtree(dirname)
def test_bad_file_argument():
try:
with tempfile.NamedTemporaryFile() as fp:
writer = pyavroc.AvroFileWriter(fp, '["null", "int"]')
writer.close()
except TypeError:
pass
def test_write_wrong_type_primitive():
schema = '''{
"type": "record",
"name": "Obj",
"fields": [
{"name": "string", "type": "string"},
{"name": "number", "type": "int"}
]
}'''
avtypes = pyavroc.create_types(schema)
serializer = pyavroc.AvroSerializer(schema)
# this shouldn't raise
serializer.serialize(avtypes.Obj(string="pippo", number=1))
# place an int in place of a str
u = avtypes.Obj(string=1, number=1)
with pytest.raises(TypeError):
serializer.serialize(u)
# string in place of int
u = avtypes.Obj(string="a", number="a")
with pytest.raises(TypeError):
serializer.serialize(u)
def test_coerce_int_long_in_unions():
schema = ''' [ "null", "long"] '''
with open('/dev/null', 'w') as fp:
writer = pyavroc.AvroFileWriter(fp, schema)
writer.write(33) # an integer. Should be coerced to long without an error
writer.close()
def test_coerce_int_long():
schema = '''{
"type": "record",
"name": "Rec",
"fields": [ {"name": "attr1", "type": "long"} ]
}'''
av_types = pyavroc.create_types(schema)
rec = av_types.Rec(attr1=33) # an integer. Should be coerced to long without an error
with open('/dev/null', 'w') as fp:
writer = pyavroc.AvroFileWriter(fp, schema)
writer.write(rec)
writer.close()
def test_union_with_bool():
schema = '''{
"type": "record",
"name": "Rec",
"fields": [ {"name": "attr1", "type": [ "null", "boolean" ]} ]
}'''
av_types = pyavroc.create_types(schema)
with tempfile.NamedTemporaryFile() as tmpfile:
writer = pyavroc.AvroFileWriter(tmpfile.file, schema)
# Try writing null
writer.write(av_types.Rec(attr1=None))
# Try writing a boolean value
writer.write(av_types.Rec(attr1=True))
# Try writing an integer. Should be coerced to boolean without an error
writer.write(av_types.Rec(attr1=33))
writer.write(av_types.Rec(attr1=0))
writer.close()
tmpfile.flush()
tmpfile.seek(0)
reader = pyavroc.AvroFileReader(tmpfile.file, types=av_types)
read_recs = list(reader)
attr_values = [ r.attr1 for r in read_recs ]
assert attr_values == [ None, True, True, False ]
|
crs4/pyavroc
|
tests/test_write.py
|
Python
|
apache-2.0
| 8,092
|
import RPi.GPIO as GPIO
import time
def move(angle=90):
"Move the servo to the specified angle"
pulse = int(angle) / 18.0 + 2.5
GPIO.setmode(GPIO.BOARD)
GPIO.setup(16,GPIO.OUT)
p = GPIO.PWM(16,50)
p.start(pulse)
time.sleep(1)
p.stop()
GPIO.cleanup()
if __name__ == "__main__":
import sys
angle = 90
if len(sys.argv) == 2:
angle = sys.argv[1]
move(angle)
#try:
# while True:
# p.ChangeDutyCycle(7.5)
# time.sleep(1)
# p.ChangeDutyCycle(12.5)
# time.sleep(1)
# p.ChangeDutyCycle(2.5)
# time.sleep(1)
#
#except KeyboardInterrupt:
# p.stop()
# GPIO.cleanup()
|
dmollaaliod/raspicamserver
|
servo.py
|
Python
|
gpl-2.0
| 583
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "production.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
effa/smartoo
|
manage.py
|
Python
|
gpl-2.0
| 253
|
# coding: utf-8
"""Setup lightgbm package."""
import logging
import struct
import subprocess
import sys
from os import chdir
from pathlib import Path
from platform import system
from shutil import copyfile, copytree, rmtree
from typing import List, Optional, Union
from setuptools import find_packages, setup
from setuptools.command.install import install
from setuptools.command.install_lib import install_lib
from setuptools.command.sdist import sdist
from wheel.bdist_wheel import bdist_wheel
LIGHTGBM_OPTIONS = [
('mingw', 'm', 'Compile with MinGW'),
('integrated-opencl', None, 'Compile integrated OpenCL version'),
('gpu', 'g', 'Compile GPU version'),
('cuda', None, 'Compile CUDA version'),
('mpi', None, 'Compile MPI version'),
('nomp', None, 'Compile version without OpenMP support'),
('hdfs', 'h', 'Compile HDFS version'),
('bit32', None, 'Compile 32-bit version'),
('precompile', 'p', 'Use precompiled library'),
('boost-root=', None, 'Boost preferred installation prefix'),
('boost-dir=', None, 'Directory with Boost package configuration file'),
('boost-include-dir=', None, 'Directory containing Boost headers'),
('boost-librarydir=', None, 'Preferred Boost library directory'),
('opencl-include-dir=', None, 'OpenCL include directory'),
('opencl-library=', None, 'Path to OpenCL library')
]
def find_lib() -> List[str]:
libpath_py = CURRENT_DIR / 'lightgbm' / 'libpath.py'
libpath = {'__file__': libpath_py}
exec(compile(libpath_py.read_bytes(), libpath_py, 'exec'), libpath, libpath)
LIB_PATH = libpath['find_lib_path']() # type: ignore
logger.info(f"Installing lib_lightgbm from: {LIB_PATH}")
return LIB_PATH
def copy_files(integrated_opencl: bool = False, use_gpu: bool = False) -> None:
def copy_files_helper(folder_name: Union[str, Path]) -> None:
src = CURRENT_DIR.parent / folder_name
if src.is_dir():
dst = CURRENT_DIR / 'compile' / folder_name
if dst.is_dir():
rmtree(dst)
copytree(src, dst)
else:
raise Exception(f'Cannot copy {src} folder')
if not IS_SOURCE_FLAG_PATH.is_file():
copy_files_helper('include')
copy_files_helper('src')
for submodule in (CURRENT_DIR.parent / 'external_libs').iterdir():
submodule_stem = submodule.stem
if submodule_stem == 'compute' and not use_gpu:
continue
copy_files_helper(Path('external_libs') / submodule_stem)
(CURRENT_DIR / "compile" / "windows").mkdir(parents=True, exist_ok=True)
copyfile(CURRENT_DIR.parent / "windows" / "LightGBM.sln",
CURRENT_DIR / "compile" / "windows" / "LightGBM.sln")
copyfile(CURRENT_DIR.parent / "windows" / "LightGBM.vcxproj",
CURRENT_DIR / "compile" / "windows" / "LightGBM.vcxproj")
copyfile(CURRENT_DIR.parent / "LICENSE",
CURRENT_DIR / "LICENSE")
copyfile(CURRENT_DIR.parent / "CMakeLists.txt",
CURRENT_DIR / "compile" / "CMakeLists.txt")
if integrated_opencl:
(CURRENT_DIR / "compile" / "cmake").mkdir(parents=True, exist_ok=True)
copyfile(CURRENT_DIR.parent / "cmake" / "IntegratedOpenCL.cmake",
CURRENT_DIR / "compile" / "cmake" / "IntegratedOpenCL.cmake")
def clear_path(path: Path) -> None:
if path.is_dir():
for file_name in path.iterdir():
if file_name.is_dir():
rmtree(file_name)
else:
file_name.unlink()
def silent_call(cmd: List[str], raise_error: bool = False, error_msg: str = '') -> int:
try:
with open(LOG_PATH, "ab") as log:
subprocess.check_call(cmd, stderr=log, stdout=log)
return 0
except Exception as err:
if raise_error:
raise Exception("\n".join((error_msg, LOG_NOTICE)))
return 1
def compile_cpp(
use_mingw: bool = False,
use_gpu: bool = False,
use_cuda: bool = False,
use_mpi: bool = False,
use_hdfs: bool = False,
boost_root: Optional[str] = None,
boost_dir: Optional[str] = None,
boost_include_dir: Optional[str] = None,
boost_librarydir: Optional[str] = None,
opencl_include_dir: Optional[str] = None,
opencl_library: Optional[str] = None,
nomp: bool = False,
bit32: bool = False,
integrated_opencl: bool = False
) -> None:
build_dir = CURRENT_DIR / "build_cpp"
rmtree(build_dir, ignore_errors=True)
build_dir.mkdir(parents=True)
original_dir = Path.cwd()
chdir(build_dir)
logger.info("Starting to compile the library.")
cmake_cmd = ["cmake", str(CURRENT_DIR / "compile")]
if integrated_opencl:
use_gpu = False
cmake_cmd.append("-D__INTEGRATE_OPENCL=ON")
if use_gpu:
cmake_cmd.append("-DUSE_GPU=ON")
if boost_root:
cmake_cmd.append(f"-DBOOST_ROOT={boost_root}")
if boost_dir:
cmake_cmd.append(f"-DBoost_DIR={boost_dir}")
if boost_include_dir:
cmake_cmd.append(f"-DBoost_INCLUDE_DIR={boost_include_dir}")
if boost_librarydir:
cmake_cmd.append(f"-DBOOST_LIBRARYDIR={boost_librarydir}")
if opencl_include_dir:
cmake_cmd.append(f"-DOpenCL_INCLUDE_DIR={opencl_include_dir}")
if opencl_library:
cmake_cmd.append(f"-DOpenCL_LIBRARY={opencl_library}")
elif use_cuda:
cmake_cmd.append("-DUSE_CUDA=ON")
if use_mpi:
cmake_cmd.append("-DUSE_MPI=ON")
if nomp:
cmake_cmd.append("-DUSE_OPENMP=OFF")
if use_hdfs:
cmake_cmd.append("-DUSE_HDFS=ON")
if system() in {'Windows', 'Microsoft'}:
if use_mingw:
if use_mpi:
raise Exception('MPI version cannot be compiled by MinGW due to the miss of MPI library in it')
logger.info("Starting to compile with CMake and MinGW.")
silent_call(cmake_cmd + ["-G", "MinGW Makefiles"], raise_error=True,
error_msg='Please install CMake and all required dependencies first')
silent_call(["mingw32-make.exe", "_lightgbm", f"-I{build_dir}", "-j4"], raise_error=True,
error_msg='Please install MinGW first')
else:
status = 1
lib_path = CURRENT_DIR / "compile" / "windows" / "x64" / "DLL" / "lib_lightgbm.dll"
if not any((use_gpu, use_cuda, use_mpi, use_hdfs, nomp, bit32, integrated_opencl)):
logger.info("Starting to compile with MSBuild from existing solution file.")
platform_toolsets = ("v143", "v142", "v141", "v140")
for pt in platform_toolsets:
status = silent_call(["MSBuild",
str(CURRENT_DIR / "compile" / "windows" / "LightGBM.sln"),
"/p:Configuration=DLL",
"/p:Platform=x64",
f"/p:PlatformToolset={pt}"])
if status == 0 and lib_path.is_file():
break
else:
clear_path(CURRENT_DIR / "compile" / "windows" / "x64")
if status != 0 or not lib_path.is_file():
logger.warning("Compilation with MSBuild from existing solution file failed.")
if status != 0 or not lib_path.is_file():
arch = "Win32" if bit32 else "x64"
vs_versions = (
"Visual Studio 17 2022",
"Visual Studio 16 2019",
"Visual Studio 15 2017",
"Visual Studio 14 2015"
)
for vs in vs_versions:
logger.info(f"Starting to compile with {vs} ({arch}).")
status = silent_call(cmake_cmd + ["-G", vs, "-A", arch])
if status == 0:
break
else:
clear_path(build_dir)
if status != 0:
raise Exception("\n".join(('Please install Visual Studio or MS Build and all required dependencies first',
LOG_NOTICE)))
silent_call(["cmake", "--build", str(build_dir), "--target", "_lightgbm", "--config", "Release"], raise_error=True,
error_msg='Please install CMake first')
else: # Linux, Darwin (macOS), etc.
logger.info("Starting to compile with CMake.")
silent_call(cmake_cmd, raise_error=True, error_msg='Please install CMake and all required dependencies first')
silent_call(["make", "_lightgbm", f"-I{build_dir}", "-j4"], raise_error=True,
error_msg='An error has occurred while building lightgbm library file')
chdir(original_dir)
class CustomInstallLib(install_lib):
def install(self) -> List[str]:
outfiles = install_lib.install(self)
src = find_lib()[0]
dst = Path(self.install_dir) / 'lightgbm'
dst, _ = self.copy_file(src, str(dst))
outfiles.append(dst)
return outfiles
class CustomInstall(install):
user_options = install.user_options + LIGHTGBM_OPTIONS
def initialize_options(self) -> None:
install.initialize_options(self)
self.mingw = False
self.integrated_opencl = False
self.gpu = False
self.cuda = False
self.boost_root = None
self.boost_dir = None
self.boost_include_dir = None
self.boost_librarydir = None
self.opencl_include_dir = None
self.opencl_library = None
self.mpi = False
self.hdfs = False
self.precompile = False
self.nomp = False
self.bit32 = False
def run(self) -> None:
if (8 * struct.calcsize("P")) != 64:
if self.bit32:
logger.warning("You're installing 32-bit version. "
"This version is slow and untested, so use it on your own risk.")
else:
raise Exception("Cannot install LightGBM in 32-bit Python, "
"please use 64-bit Python instead.")
LOG_PATH.touch()
if not self.precompile:
copy_files(integrated_opencl=self.integrated_opencl, use_gpu=self.gpu)
compile_cpp(use_mingw=self.mingw, use_gpu=self.gpu, use_cuda=self.cuda, use_mpi=self.mpi,
use_hdfs=self.hdfs, boost_root=self.boost_root, boost_dir=self.boost_dir,
boost_include_dir=self.boost_include_dir, boost_librarydir=self.boost_librarydir,
opencl_include_dir=self.opencl_include_dir, opencl_library=self.opencl_library,
nomp=self.nomp, bit32=self.bit32, integrated_opencl=self.integrated_opencl)
install.run(self)
if LOG_PATH.is_file():
LOG_PATH.unlink()
class CustomBdistWheel(bdist_wheel):
user_options = bdist_wheel.user_options + LIGHTGBM_OPTIONS
def initialize_options(self) -> None:
bdist_wheel.initialize_options(self)
self.mingw = False
self.integrated_opencl = False
self.gpu = False
self.cuda = False
self.boost_root = None
self.boost_dir = None
self.boost_include_dir = None
self.boost_librarydir = None
self.opencl_include_dir = None
self.opencl_library = None
self.mpi = False
self.hdfs = False
self.precompile = False
self.nomp = False
self.bit32 = False
def finalize_options(self) -> None:
bdist_wheel.finalize_options(self)
install = self.reinitialize_command('install')
install.mingw = self.mingw
install.integrated_opencl = self.integrated_opencl
install.gpu = self.gpu
install.cuda = self.cuda
install.boost_root = self.boost_root
install.boost_dir = self.boost_dir
install.boost_include_dir = self.boost_include_dir
install.boost_librarydir = self.boost_librarydir
install.opencl_include_dir = self.opencl_include_dir
install.opencl_library = self.opencl_library
install.mpi = self.mpi
install.hdfs = self.hdfs
install.precompile = self.precompile
install.nomp = self.nomp
install.bit32 = self.bit32
class CustomSdist(sdist):
def run(self) -> None:
copy_files(integrated_opencl=True, use_gpu=True)
IS_SOURCE_FLAG_PATH.touch()
rmtree(CURRENT_DIR / 'lightgbm' / 'Release', ignore_errors=True)
rmtree(CURRENT_DIR / 'lightgbm' / 'windows' / 'x64', ignore_errors=True)
lib_file = CURRENT_DIR / 'lightgbm' / 'lib_lightgbm.so'
if lib_file.is_file():
lib_file.unlink()
sdist.run(self)
if IS_SOURCE_FLAG_PATH.is_file():
IS_SOURCE_FLAG_PATH.unlink()
if __name__ == "__main__":
CURRENT_DIR = Path(__file__).absolute().parent
LOG_PATH = Path.home() / 'LightGBM_compilation.log'
LOG_NOTICE = f"The full version of error log was saved into {LOG_PATH}"
IS_SOURCE_FLAG_PATH = CURRENT_DIR / '_IS_SOURCE_PACKAGE.txt'
_version_src = CURRENT_DIR.parent / 'VERSION.txt'
_version_dst = CURRENT_DIR / 'lightgbm' / 'VERSION.txt'
if _version_src.is_file():
copyfile(_version_src, _version_dst)
version = _version_dst.read_text(encoding='utf-8').strip()
readme = (CURRENT_DIR / 'README.rst').read_text(encoding='utf-8')
sys.path.insert(0, str(CURRENT_DIR))
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('LightGBM')
setup(name='lightgbm',
version=version,
description='LightGBM Python Package',
long_description=readme,
install_requires=[
'wheel',
'numpy',
'scipy',
'scikit-learn!=0.22.0'
],
extras_require={
'dask': [
'dask[array]>=2.0.0',
'dask[dataframe]>=2.0.0',
'dask[distributed]>=2.0.0',
'pandas',
],
},
maintainer='Yu Shi',
maintainer_email='yushi2@microsoft.com',
zip_safe=False,
cmdclass={
'install': CustomInstall,
'install_lib': CustomInstallLib,
'bdist_wheel': CustomBdistWheel,
'sdist': CustomSdist,
},
packages=find_packages(),
include_package_data=True,
license='The MIT License (Microsoft)',
url='https://github.com/microsoft/LightGBM',
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Scientific/Engineering :: Artificial Intelligence'])
|
henry0312/LightGBM
|
python-package/setup.py
|
Python
|
mit
| 15,596
|
from pawt import swing
lst = swing.JList(['a', 'b', 'c'])
swing.test(lst)
|
DarioGT/OMS-PluginXML
|
org.modelsphere.sms/lib/jython-2.2.1/Demo/swing/ListDemo.py
|
Python
|
gpl-3.0
| 83
|
import os
import shutil
from lutris import settings
from lutris.config import LutrisConfig
from lutris.gui.dialogs import QuestionDialog, FileDialog
from lutris.runners.runner import Runner
from lutris.util.system import find_executable
class pcsxr(Runner):
"""PlayStation emulator"""
human_name = "PCSX-Reloaded"
package = "pcsxr"
platform = "Playstation"
game_options = [
{
"option": "iso",
"type": "file",
"label": "Disk image",
"default_path": "game_path",
'help': ("An ISO file containing the game data.")
}
]
runner_options = [
{
"option": "bios",
"type": "file",
"label": "Bios file",
'help': ("The Playstation bios file.\n"
"This file contains code from the original hardware "
"necessary to the emulation.")
},
{
'option': 'nogui',
'type': 'bool',
'label': "No emulator interface on exit",
'default': False,
'help': ("With this option on, hitting the Escape key during "
"play will stop the game. Otherwise it pauses the "
"emulation and displays PCSX-Reloaded's user interface, "
"allowing you to configure the emulator.")
}
]
tarballs = {
'x64': 'pcsxr-x86_64-1.9.95.tar.gz',
}
def get_executable(self):
# Lutris provided emulator
pcsxr_path = os.path.join(settings.RUNNER_DIR, 'pcsxr/bin/pcsxr')
if os.path.exists(pcsxr_path):
return pcsxr_path
# System wide available emulator
candidates = ('pcsx', 'pcsxr')
for candidate in candidates:
executable = find_executable(candidate)
if executable:
return executable
def install(self):
success = super(pcsxr, self).install()
if not success:
return False
config_path = os.path.expanduser('~/.pcsxr')
if not os.path.exists(config_path):
os.makedirs(config_path)
# Bios
bios_path = os.path.expanduser('~/.pcsxr/bios')
if not os.path.exists(bios_path):
os.makedirs(bios_path)
dlg = QuestionDialog({
'question': ("Do you want to select a Playstation BIOS file?\n\n"
"The BIOS is the core code running the machine.\n"
"PCSX-Reloaded includes an emulated BIOS, but it is "
"still incomplete. \n"
"Using an original BIOS avoids some bugs and reduced "
"compatibility \n"
"with some games."),
'title': "Use BIOS file?",
})
if dlg.result == dlg.YES:
bios_dlg = FileDialog("Select a BIOS file")
bios_src = bios_dlg.filename
shutil.copy(bios_src, bios_path)
# Save bios in config
bios_path = os.path.join(bios_path, os.path.basename(bios_src))
config = LutrisConfig(runner_slug='pcsxr')
config.raw_runner_config.update({'bios': bios_path})
config.save()
return True
def play(self):
"""Run Playstation game"""
iso = self.game_config.get('iso')
command = [self.get_executable()]
# Options
if self.runner_config.get('nogui') \
and os.path.exists(os.path.expanduser("~/.pcsxr")):
command.append("-nogui")
command.append("-cdfile")
command.append(iso)
command.append("-runcd")
return {'command': command}
|
GoeGaming/lutris
|
lutris/runners/pcsxr.py
|
Python
|
gpl-3.0
| 3,721
|
# Copyright (c) 2014, Matt Layman
import os
import tempfile
from handroll.site import Site
class Factory(object):
"""A factory to produce commonly needed objects"""
def make_site(self):
"""Make a valid site instance."""
site = tempfile.mkdtemp()
open(os.path.join(site, 'template.html'), 'w').close()
return Site(site)
|
mblayman/handroll
|
handroll/tests/factory.py
|
Python
|
bsd-2-clause
| 364
|
from decimal import Decimal as D
from django.utils.translation import ugettext_lazy as _
from oscar.apps.shipping.base import Base
class Free(Base):
code = 'free-shipping'
name = _('Free shipping')
is_tax_known = True
charge_incl_tax = charge_excl_tax = D('0.00')
class NoShippingRequired(Free):
"""
This is a special shipping method that indicates that no shipping is
actually required (eg for digital goods).
"""
code = 'no-shipping-required'
name = _('No shipping required')
class FixedPrice(Base):
code = 'fixed-price-shipping'
name = _('Fixed price shipping')
def __init__(self, charge_excl_tax, charge_incl_tax=None):
self.charge_excl_tax = charge_excl_tax
if charge_incl_tax is not None:
self.charge_incl_tax = charge_incl_tax
self.is_tax_known = True
class OfferDiscount(Base):
"""
Wrapper class that applies a discount to an existing shipping method's
charges
"""
def __init__(self, method, offer):
self.method = method
self.offer = offer
@property
def is_discounted(self):
# We check to see if the discount is non-zero. It is possible to have
# zero shipping already in which case this the offer does not lead to
# any further discount.
return self.discount > 0
@property
def discount(self):
return self.get_discount()['discount']
@property
def code(self):
return self.method.code
@property
def name(self):
return self.method.name
@property
def description(self):
return self.method.description
def get_discount(self):
# Return a 'discount' dictionary in the same form as that used by the
# OfferApplications class
return {
'offer': self.offer,
'result': None,
'name': self.offer.name,
'description': '',
'voucher': self.offer.get_voucher(),
'freq': 1,
'discount': self.effective_discount}
@property
def charge_incl_tax_before_discount(self):
return self.method.charge_incl_tax
@property
def charge_excl_tax_before_discount(self):
return self.method.charge_excl_tax
@property
def is_tax_known(self):
return self.method.is_tax_known
@property
def effective_discount(self):
"""
The discount value.
"""
raise NotImplemented()
@property
def charge_excl_tax(self):
raise NotImplemented()
@property
def charge_incl_tax(self):
raise NotImplemented()
class TaxExclusiveOfferDiscount(OfferDiscount):
@property
def effective_discount(self):
parent_charge = self.method.charge_excl_tax
return self.offer.shipping_discount(parent_charge)
@property
def charge_excl_tax(self):
parent_charge = self.method.charge_excl_tax
discount = self.offer.shipping_discount(parent_charge)
return parent_charge - discount
@property
def charge_incl_tax(self):
"""
Tax needs to be assigned later on
"""
return self.charge_excl_tax + self.tax
class TaxInclusiveOfferDiscount(OfferDiscount):
@property
def effective_discount(self):
parent_charge = self.method.charge_incl_tax
return self.offer.shipping_discount(parent_charge)
@property
def charge_incl_tax(self):
parent_charge = self.method.charge_incl_tax
discount = self.offer.shipping_discount(parent_charge)
return parent_charge - discount
@property
def charge_excl_tax(self):
# Adjust tax exclusive rate using the ratio of the two tax inclusive
# charges.
parent_charge_excl_tax = self.method.charge_excl_tax
parent_charge_incl_tax = self.method.charge_incl_tax
charge_incl_tax = self.charge_incl_tax
if parent_charge_incl_tax == 0:
return D('0.00')
charge = parent_charge_excl_tax * (charge_incl_tax /
parent_charge_incl_tax)
return charge.quantize(D('0.01'))
|
elliotthill/django-oscar
|
oscar/apps/shipping/methods.py
|
Python
|
bsd-3-clause
| 4,173
|
from __future__ import absolute_import
from datetime import timedelta
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils import timezone
from allauth.exceptions import ImmediateHttpResponse
from allauth.utils import build_absolute_uri
from allauth.socialaccount.helpers import render_authentication_error
from allauth.socialaccount import providers
from allauth.socialaccount.providers.oauth2.client import (OAuth2Client,
OAuth2Error)
from allauth.socialaccount.helpers import complete_social_login
from allauth.socialaccount.models import SocialToken, SocialLogin
from allauth.utils import get_request_param
from ..base import AuthAction, AuthError
class OAuth2Adapter(object):
expires_in_key = 'expires_in'
supports_state = True
redirect_uri_protocol = None
access_token_method = 'POST'
login_cancelled_error = 'access_denied'
scope_delimiter = ' '
basic_auth = False
headers = None
def __init__(self, request):
self.request = request
def get_provider(self):
return providers.registry.by_id(self.provider_id, self.request)
def complete_login(self, request, app, access_token, **kwargs):
"""
Returns a SocialLogin instance
"""
raise NotImplementedError
def parse_token(self, data):
token = SocialToken(token=data['access_token'])
token.token_secret = data.get('refresh_token', '')
expires_in = data.get(self.expires_in_key, None)
if expires_in:
token.expires_at = timezone.now() + timedelta(
seconds=int(expires_in))
return token
class OAuth2View(object):
@classmethod
def adapter_view(cls, adapter):
def view(request, *args, **kwargs):
self = cls()
self.request = request
self.adapter = adapter(request)
try:
return self.dispatch(request, *args, **kwargs)
except ImmediateHttpResponse as e:
return e.response
return view
def get_client(self, request, app):
callback_url = reverse(self.adapter.provider_id + "_callback")
callback_url = build_absolute_uri(
request, callback_url,
protocol=self.adapter.redirect_uri_protocol)
provider = self.adapter.get_provider()
scope = provider.get_scope(request)
client = OAuth2Client(self.request, app.client_id, app.secret,
self.adapter.access_token_method,
self.adapter.access_token_url,
callback_url,
scope,
scope_delimiter=self.adapter.scope_delimiter,
headers=self.adapter.headers,
basic_auth=self.adapter.basic_auth)
return client
class OAuth2LoginView(OAuth2View):
def dispatch(self, request):
provider = self.adapter.get_provider()
app = provider.get_app(self.request)
client = self.get_client(request, app)
action = request.GET.get('action', AuthAction.AUTHENTICATE)
auth_url = self.adapter.authorize_url
auth_params = provider.get_auth_params(request, action)
client.state = SocialLogin.stash_state(request)
try:
return HttpResponseRedirect(client.get_redirect_url(
auth_url, auth_params))
except OAuth2Error as e:
return render_authentication_error(
request,
provider.id,
exception=e)
class OAuth2CallbackView(OAuth2View):
def dispatch(self, request):
if 'error' in request.GET or 'code' not in request.GET:
# Distinguish cancel from error
auth_error = request.GET.get('error', None)
if auth_error == self.adapter.login_cancelled_error:
error = AuthError.CANCELLED
else:
error = AuthError.UNKNOWN
return render_authentication_error(
request,
self.adapter.provider_id,
error=error)
app = self.adapter.get_provider().get_app(self.request)
client = self.get_client(request, app)
try:
access_token = client.get_access_token(request.GET['code'])
token = self.adapter.parse_token(access_token)
token.app = app
login = self.adapter.complete_login(request,
app,
token,
response=access_token)
login.token = token
if self.adapter.supports_state:
login.state = SocialLogin \
.verify_and_unstash_state(
request,
get_request_param(request, 'state'))
else:
login.state = SocialLogin.unstash_state(request)
return complete_social_login(request, login)
except (PermissionDenied, OAuth2Error) as e:
return render_authentication_error(
request,
self.adapter.provider_id,
exception=e)
|
rajexp/stepMuzic
|
allauth/socialaccount/providers/oauth2/views.py
|
Python
|
unlicense
| 5,413
|
#!/usr/bin/env python
# Copyright (C) 2013-2014 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Register and locate services within ezDiscovery."""
import optparse
import sys
from kazoo import client
NAMESPACE = 'ezDiscovery'
COMMON_APP_NAME = 'common_services'
ENDPOINTS = 'endpoints'
SECURITY = 'security'
SECURITY_ID = 'security_id'
zk = None
class NotConnected(Exception):
"""Thrown when an api call is sent before the Zookeeper client is setup."""
pass
def connect(host):
"""Create a connection to Zookeeper for use in discovery calls."""
global zk
zk = client.KazooClient(hosts=host)
zk.start()
def disconnect():
"""Disconnect from Zookeeper if the module has a connection."""
if zk:
zk.stop()
def _verify_connected():
"""Shortcut method to validate Zookeeper connection."""
if not zk:
raise NotConnected('Active connection required for operation.')
def register_endpoint(app_name, service_name, host, port):
"""Register an endpoint with Zookeeper."""
_verify_connected()
zk.ensure_path('/'.join([
NAMESPACE,
app_name,
service_name,
ENDPOINTS,
host + ':' + str(port)
]))
def register_common_endpoint(service_name, host, port):
"""Register a common endpoint under the default application name."""
register_endpoint(COMMON_APP_NAME, service_name, host, port)
def _recurse_deletion(path_parts):
"""Moves up the tree of the given path parts deleting if empty.
NOTE: Will not delete a root node.
"""
if len(path_parts) > 1:
path = '/'.join(path_parts)
if zk.exists(path) and not len(zk.get_children(path)):
zk.delete(path)
_recurse_deletion(path_parts[:-1])
def unregister_endpoint(app_name, service_name, host, port):
"""Unregister and endpoint with Zookeeper."""
_verify_connected()
_recurse_deletion([
NAMESPACE,
app_name,
service_name,
ENDPOINTS,
host + ':' + str(port)
])
def unregister_common_endpoint(service_name, host, port):
"""Unregister a common endpoint under the default application name."""
unregister_endpoint(COMMON_APP_NAME, service_name, host, port)
def _get_children(path):
"""Shortcut method to return the children on the given path."""
_verify_connected()
if zk.exists(path):
return zk.get_children(path)
else:
return []
def get_applications():
"""Get a list of applications registered in Zookeeper."""
return _get_children(NAMESPACE)
def get_services(app_name):
"""Get a list services by the given application name."""
return _get_children('/'.join([NAMESPACE, app_name]))
def get_common_services():
"""Get a list services under the common application name."""
return get_services(COMMON_APP_NAME)
def get_endpoints(app_name, service_name):
"""Get a list of endpoints by the given application and service name."""
return _get_children(
'/'.join([NAMESPACE, app_name, service_name, ENDPOINTS])
)
def get_common_endpoints(service_name):
"""Get a list of endpoints from the common application name and given
service name.
"""
return get_endpoints(COMMON_APP_NAME, service_name)
def is_service_common(service_name):
"""Checks if the given service name is in the common services application.
NOTE: Returns false if the service does not exist.
"""
_verify_connected()
return bool(zk.exists('/'.join([
NAMESPACE,
COMMON_APP_NAME,
service_name
])))
def set_security_id_for_application(app_name, security_id):
"""Set the security id for the given application."""
# TODO(jmears): Should these be cleaned up when an application is
# unregistered?
_verify_connected()
path = '/'.join([
NAMESPACE,
app_name,
SECURITY,
SECURITY_ID
])
zk.ensure_path(path)
zk.set(path, security_id)
def set_security_id_for_common_service(service_name, security_id):
"""Set the security id for the given common service."""
# TODO(jmears): Should these be cleaned up when the service is
# unregistered?
_verify_connected()
set_security_id_for_application(
'/'.join([COMMON_APP_NAME, service_name]),
security_id
)
def get_security_id_for_application(app_name):
"""Fetch the security id for the given application."""
_verify_connected()
try:
return zk.get('/'.join([
NAMESPACE,
app_name,
SECURITY,
SECURITY_ID
]))[0]
except client.NoNodeError:
pass
def get_security_id_for_common_service(service_name):
"""Fetch the security id for the given common service."""
_verify_connected()
return get_security_id_for_application(
'/'.join([COMMON_APP_NAME, service_name])
)
def _arg_count(args, number, message='Invalid arguments.'):
"""Counts the arguments given and exits with failed status if needed.
Really just a convenience method for the main method, not part of the
discovery API.
"""
if len(args) < number:
print message
sys.exit(1)
def _invalid_action(action=''):
"""Prints an error message and exits."""
if action:
print 'Invalid action: ' % (action)
else:
print 'Action not specified.'
sys.exit(1)
ACTIONS = {
'register': {
'args': [5, 'Must provide app name, service name, host and port.'],
'method': register_endpoint
},
'unregister': {
'args': [5, 'Must provide app name, service name, host and port.'],
'method': unregister_endpoint
},
'list-applications': {
'method': get_applications
},
'list-services': {
'args': [2, 'Must provide an app name.'],
'method': get_services
},
'list-common-services': {
'method': get_common_services
},
'list-endpoints': {
'args': [3, 'Must provide app name, service name, host and port.'],
'method': get_endpoints
},
'list-common-endpoints': {
'args': [2, 'Must provide a common service name.'],
'method': get_common_endpoints
},
'is-service-common': {
'args': [2, 'Must provide a service name.'],
'method': is_service_common
},
'application-set-security-id': {
'args': [3, 'Must provide an app name and security id.'],
'method': set_security_id_for_application
},
'application-get-security-id': {
'args': [2, 'Must provide an app name.'],
'method': get_security_id_for_application
},
'common-service-set-security-id': {
'args': [3, 'Must provide a service name and security id.'],
'method': set_security_id_for_common_service
},
'common-service-get-security-id': {
'args': [2, 'Must provide a service name.'],
'method': get_security_id_for_common_service
},
}
def main():
"""Module will act as a command line utility if not imported as a module in
another application.
"""
parser = optparse.OptionParser(
usage='usage: %prog [options] ACTION arg1 arg2 ...'
)
parser.add_option(
'-z',
'--zookeeper',
default='localhost:2181',
help='Zookeeper location (host:port).'
)
options, args = parser.parse_args()
if not args:
_invalid_action()
action = args[0]
if action in ACTIONS:
action = ACTIONS[action]
if 'args' in action:
_arg_count(args, action['args'][0], action['args'][1])
connect(options.zookeeper)
method_args = args[1:]
result = None
if method_args:
result = action['method'](*method_args)
else:
result = action['method']()
if result is not None: # Some commands return a boolean.
if isinstance(result, list):
for i in result:
print i
else:
print result
else:
print 'Invalid action: ' % (action)
sys.exit(1)
disconnect()
if __name__ == '__main__':
main()
|
ezbake/ezbake-discovery
|
servicediscovery/python/ezdiscovery.py
|
Python
|
apache-2.0
| 8,729
|
#
# jython examples for jas.
# $Id$
#
import sys
from java.lang import System
from jas import PolyRing, QQ, AN, RF
from jas import terminate, startLog
# polynomial examples: prime/primary decomposition in Q(sqrt(2))(x)(sqrt(x))[y,z]
Q = PolyRing(QQ(),"w2",PolyRing.lex);
print "Q = " + str(Q);
[e,a] = Q.gens();
#print "e = " + str(e);
print "a = " + str(a);
root = a**2 - 2;
print "root = " + str(root);
Q2 = AN(root,field=True);
print "Q2 = " + str(Q2.factory());
[one,w2] = Q2.gens();
#print "one = " + str(one);
#print "w2 = " + str(w2);
print;
Qp = PolyRing(Q2,"x",PolyRing.lex);
print "Qp = " + str(Qp);
[ep,wp,ap] = Qp.gens();
#print "ep = " + str(ep);
#print "wp = " + str(wp);
#print "ap = " + str(ap);
print;
Qr = RF(Qp);
print "Qr = " + str(Qr.factory());
[er,wr,ar] = Qr.gens();
#print "er = " + str(er);
#print "wr = " + str(wr);
#print "ar = " + str(ar);
print;
Qwx = PolyRing(Qr,"wx",PolyRing.lex);
print "Qwx = " + str(Qwx);
[ewx,wwx,ax,wx] = Qwx.gens();
#print "ewx = " + str(ewx);
print "ax = " + str(ax);
#print "wwx = " + str(wwx);
print "wx = " + str(wx);
print;
rootx = wx**2 - ax;
print "rootx = " + str(rootx);
Q2x = AN(rootx,field=True);
print "Q2x = " + str(Q2x.factory());
[ex2,w2x2,ax2,wx] = Q2x.gens();
#print "ex2 = " + str(ex2);
#print "w2x2 = " + str(w2x2);
#print "ax2 = " + str(ax2);
#print "wx = " + str(wx);
print;
Yr = PolyRing(Q2x,"y,z",PolyRing.lex)
print "Yr = " + str(Yr);
[e,w2,x,wx,y,z] = Yr.gens();
print "e = " + str(e);
print "w2 = " + str(w2);
print "x = " + str(x);
print "wx = " + str(wx);
print "y = " + str(y);
print "z = " + str(z);
print;
f1 = ( y**2 - x ) * ( y**2 - 2 );
#f1 = ( y**2 - x )**3 * ( y**2 - 2 )**2;
f2 = ( z**2 - y**2 );
print "f1 = ", f1;
print "f2 = ", f2;
print;
F = Yr.ideal( list=[f1,f2] );
print "F = ", F;
print;
#sys.exit();
startLog();
t = System.currentTimeMillis();
P = F.primeDecomp();
#P = F.primaryDecomp();
t1 = System.currentTimeMillis() - t;
print "P = ", P;
print;
print "prime/primary decomp time =", t1, "milliseconds";
print;
print "F = ", F;
print;
#startLog();
terminate();
|
breandan/java-algebra-system
|
examples/prime-decomp_algeb_trans.py
|
Python
|
gpl-2.0
| 2,178
|
from flask import Flask, Response
import json
from rdflib import Namespace
app = Flask(__name__)
vocabsNS = Namespace("http://memesearch.herokuapp.com/static/vocabs/")
###===================================================================
### Data access functions
###===================================================================
MEME_DB = {
"bn0x9np": {
"@id": "/meme/bn0x9np",
"@type": ["hydra:Link", "meme:MemeDetailPage"],
"name": "Its an unbearably small world",
"image": "http://i.imgur.com/ej3mFrZb.jpg",
"url": "/meme/bn0x9np",
},
"DiaODFK": {
"@id": "/meme/DiaODFK",
"@type": ["hydra:Link", "meme:MemeDetailPage"],
"name": "I shall not pass.",
"image": "http://i.imgur.com/DiaODFKb.jpg",
"url": "/meme/DiaODFK"
}
}
class DB(object):
def search(self, query):
"""
Returns a list of meme:Meme objects
"""
return [MEME_DB['bn0x9np'], MEME_DB['DiaODFK']]
def recent(self):
"""
Returns a list of recent meme:Meme objects
"""
return [MEME_DB['bn0x9np'], MEME_DB['DiaODFK']]
def meme(self, index):
return MEME_DB.get(index)
###===================================================================
### Utilities
###===================================================================
def _json_response(data):
return Response(
json.dumps(
_merge_dict(_web_page(), data)
),
content_type = "application/json",
)
def _merge_dict(base, extension):
return dict(base, **extension)
###===================================================================
### Base objects
###===================================================================
def _web_page():
return {
"@context": vocabsNS['meme.jsonld'].toPython(),
"homepage": "/",
"searchLink": {
"@type": "IriTemplate",
"template": "/meme-search{?q}",
"mappings": [
{
"@type": "IriTemplateMapping",
"variable": "q",
"property": "hydra:freetextQuery",
"required": True,
}
]
},
}
###===================================================================
### Resources
###===================================================================
###-------------------------------------------------------------------
### meme.jsonld resource
###-------------------------------------------------------------------
@app.route("/meme.jsonld")
def meme_jsonld():
return Response(
open("./meme.jsonld").read(),
content_type = "application/json"
)
###-------------------------------------------------------------------
### meme:SearchResultsPage
###-------------------------------------------------------------------
@app.route("/meme-search")
def meme_search():
query = ""
return _json_response(
_search_collection(query)
)
def _search_collection(query):
"""
Returns a meme:SearchResultPage
"""
return {
"@type": "meme:SearchResultPage",
"member": _search_member(query)
}
def _search_member(query):
"""
Returns a list of meme:Meme objects
"""
# TODO: do the full text search
return DB().search(query)
###-------------------------------------------------------------------
### Meme Detail Page
###-------------------------------------------------------------------
@app.route("/meme/<index>")
def meme(index):
# TODO: do lookup
return _json_response(_meme_resource(index))
def _meme_resource(index):
"""
Returns a meme:MemeDetailPage
"""
meme = DB().meme(index)
if meme is not None:
return meme
###-------------------------------------------------------------------
### meme:IndexPage
###-------------------------------------------------------------------
@app.route("/")
def index():
return _json_response(_index_resource())
def _index_resource():
"""
Returns a meme:IndexPage
"""
return {
"@type": "meme:IndexPage",
"recent": _recent_collection(),
}
def _recent_collection():
return {
"@type": "meme:RecentMemeCollection",
"member": DB().recent()
}
if __name__ == '__main__':
app.run(debug=True)
|
ericmoritz/memesearch-rdf
|
memesearch_www.py
|
Python
|
bsd-3-clause
| 4,433
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import re
import sys
import os
import glob
import helper
from mockable_test_result import MockableTestResult
from runner import path_to_enlightenment
from libs.colorama import init, Fore, Style
init() # init colorama
class Sensei(MockableTestResult):
def __init__(self, stream):
unittest.TestResult.__init__(self)
self.stream = stream
self.prevTestClassName = None
self.tests = path_to_enlightenment.koans()
self.pass_count = 0
self.lesson_pass_count = 0
self.all_lessons = None
def startTest(self, test):
MockableTestResult.startTest(self, test)
if helper.cls_name(test) != self.prevTestClassName:
self.prevTestClassName = helper.cls_name(test)
if not self.failures:
self.stream.writeln()
self.stream.writeln("{0}{1}Thinking {2}".format(
Fore.RESET, Style.NORMAL, helper.cls_name(test)))
if helper.cls_name(test) != 'AboutAsserts':
self.lesson_pass_count += 1
def addSuccess(self, test):
if self.passesCount():
MockableTestResult.addSuccess(self, test)
self.stream.writeln( \
" {0}{1}{2} has expanded your awareness.{3}{4}" \
.format(Fore.GREEN, Style.BRIGHT, test._testMethodName, \
Fore.RESET, Style.NORMAL))
self.pass_count += 1
def addError(self, test, err):
# Having 1 list for errors and 1 list for failures would mess with
# the error sequence
self.addFailure(test, err)
def passesCount(self):
return not (self.failures and helper.cls_name(self.failures[0][0]) !=
self.prevTestClassName)
def addFailure(self, test, err):
MockableTestResult.addFailure(self, test, err)
def sortFailures(self, testClassName):
table = list()
for test, err in self.failures:
if helper.cls_name(test) == testClassName:
m = re.search("(?<= line )\d+" ,err)
if m:
tup = (int(m.group(0)), test, err)
table.append(tup)
if table:
return sorted(table)
else:
return None
def firstFailure(self):
if not self.failures: return None
table = self.sortFailures(helper.cls_name(self.failures[0][0]))
if table:
return (table[0][1], table[0][2])
else:
return None
def learn(self):
self.errorReport()
self.stream.writeln("")
self.stream.writeln("")
self.stream.writeln(self.report_progress())
if self.failures:
self.stream.writeln(self.report_remaining())
self.stream.writeln("")
self.stream.writeln(self.say_something_zenlike())
if self.failures: sys.exit(-1)
self.stream.writeln(
"\n{0}**************************************************" \
.format(Fore.RESET))
self.stream.writeln("\n{0}That was the last one, well done!" \
.format(Fore.MAGENTA))
self.stream.writeln(
"\nIf you want more, take a look at about_extra_credit_task.py")
def errorReport(self):
problem = self.firstFailure()
if not problem: return
test, err = problem
self.stream.writeln(" {0}{1}{2} has damaged your "
"karma.".format(Fore.RED, Style.BRIGHT, test._testMethodName))
self.stream.writeln("\n{0}{1}You have not yet reached enlightenment ..." \
.format(Fore.RESET, Style.NORMAL))
self.stream.writeln("{0}{1}{2}".format(Fore.RED, \
Style.BRIGHT, self.scrapeAssertionError(err)))
self.stream.writeln("")
self.stream.writeln("{0}{1}Please meditate on the following code:" \
.format(Fore.RESET, Style.NORMAL))
self.stream.writeln("{0}{1}{2}{3}{4}".format(Fore.YELLOW, Style.BRIGHT, \
self.scrapeInterestingStackDump(err), Fore.RESET, Style.NORMAL))
def scrapeAssertionError(self, err):
if not err: return ""
error_text = ""
count = 0
for line in err.splitlines():
m = re.search("^[^^ ].*$",line)
if m and m.group(0):
count+=1
if count>1:
error_text += (" " + line.strip()).rstrip() + '\n'
return error_text.strip('\n')
def scrapeInterestingStackDump(self, err):
if not err:
return ""
lines = err.splitlines()
sep = '@@@@@SEP@@@@@'
stack_text = ""
for line in lines:
m = re.search("^ File .*$",line)
if m and m.group(0):
stack_text += '\n' + line
m = re.search("^ \w(\w)+.*$",line)
if m and m.group(0):
stack_text += sep + line
lines = stack_text.splitlines()
stack_text = ""
for line in lines:
m = re.search("^.*[/\\\\]koans[/\\\\].*$",line)
if m and m.group(0):
stack_text += line + '\n'
stack_text = stack_text.replace(sep, '\n').strip('\n')
stack_text = re.sub(r'(about_\w+.py)',
r"{0}\1{1}".format(Fore.BLUE, Fore.YELLOW), stack_text)
stack_text = re.sub(r'(line \d+)',
r"{0}\1{1}".format(Fore.BLUE, Fore.YELLOW), stack_text)
return stack_text
def report_progress(self):
return "You have completed {0} koans and " \
"{1} lessons.".format(
self.pass_count,
self.lesson_pass_count)
def report_remaining(self):
koans_remaining = self.total_koans() - self.pass_count
lessons_remaining = self.total_lessons() - self.pass_count
return "You are now {0} koans and {1} lessons away from " \
"reaching enlightenment.".format(
koans_remaining,
lessons_remaining)
# Hat's tip to Tim Peters for the zen statements from The 'Zen
# of Python' (http://www.python.org/dev/peps/pep-0020/)
#
# Also a hat's tip to Ara T. Howard for the zen statements from his
# metakoans Ruby Quiz (http://rubyquiz.com/quiz67.html) and
# Edgecase's later permutation in the Ruby Koans
def say_something_zenlike(self):
if self.failures:
turn = self.pass_count % 37
zenness = "";
if turn == 0:
zenness = "Beautiful is better than ugly."
elif turn == 1 or turn == 2:
zenness = "Explicit is better than implicit."
elif turn == 3 or turn == 4:
zenness = "Simple is better than complex."
elif turn == 5 or turn == 6:
zenness = "Complex is better than complicated."
elif turn == 7 or turn == 8:
zenness = "Flat is better than nested."
elif turn == 9 or turn == 10:
zenness = "Sparse is better than dense."
elif turn == 11 or turn == 12:
zenness = "Readability counts."
elif turn == 13 or turn == 14:
zenness = "Special cases aren't special enough to " \
"break the rules."
elif turn == 15 or turn == 16:
zenness = "Although practicality beats purity."
elif turn == 17 or turn == 18:
zenness = "Errors should never pass silently."
elif turn == 19 or turn == 20:
zenness = "Unless explicitly silenced."
elif turn == 21 or turn == 22:
zenness = "In the face of ambiguity, refuse the " \
"temptation to guess."
elif turn == 23 or turn == 24:
zenness = "There should be one-- and preferably only " \
"one --obvious way to do it."
elif turn == 25 or turn == 26:
zenness = "Although that way may not be obvious at " \
"first unless you're Dutch."
elif turn == 27 or turn == 28:
zenness = "Now is better than never."
elif turn == 29 or turn == 30:
zenness = "Although never is often better than right " \
"now."
elif turn == 31 or turn == 32:
zenness = "If the implementation is hard to explain, " \
"it's a bad idea."
elif turn == 33 or turn == 34:
zenness = "If the implementation is easy to explain, " \
"it may be a good idea."
else:
zenness = "Namespaces are one honking great idea -- " \
"let's do more of those!"
return "{0}{1}{2}{3}".format(Fore.CYAN, zenness, Fore.RESET, Style.NORMAL);
else:
return "{0}Nobody ever expects the Spanish Inquisition." \
.format(Fore.CYAN)
# Hopefully this will never ever happen!
return "The temple is collapsing! Run!!!"
def total_lessons(self):
all_lessons = self.filter_all_lessons()
if all_lessons:
return len(all_lessons)
else:
return 0
def total_koans(self):
return self.tests.countTestCases()
def filter_all_lessons(self):
cur_dir = os.path.split(os.path.realpath(__file__))[0]
if not self.all_lessons:
self.all_lessons = glob.glob('{0}/../koans/about*.py'.format(cur_dir))
self.all_lessons = filter(lambda filename:
"about_extra_credit" not in filename,
self.all_lessons)
return self.all_lessons
|
ammiranda/python_koans
|
python2/runner/sensei.py
|
Python
|
mit
| 9,856
|
from django.contrib import admin
from django.contrib.contenttypes import generic
from models import *
class TaxonomyItemInline(admin.StackedInline):
model = TaxonomyItem
extra = 1
class TaxonomyMapInline(generic.GenericStackedInline):
model = TaxonomyMap
extra = 1
class TaxonomyGroupAdmin(admin.ModelAdmin):
inlines = [TaxonomyItemInline]
class TaxonomyItemAdmin(admin.ModelAdmin):
list_display = ('name', 'taxonomy_group')
list_filter = ('taxonomy_group',)
admin.site.register(TaxonomyGroup, TaxonomyGroupAdmin)
admin.site.register(TaxonomyItem, TaxonomyItemAdmin)
admin.site.register(TaxonomyMap)
|
jmichalicek/django-taxonomies
|
taxonomy/admin.py
|
Python
|
bsd-2-clause
| 633
|
import os
import textwrap
import unittest
from mock import Mock
from conans.client.generators import PremakeGenerator
from conans.model.build_info import CppInfo
from conans.model.conan_file import ConanFile
from conans.model.env_info import EnvValues
from conans.model.ref import ConanFileReference
from conans.model.settings import Settings
from conans.test.utils.test_files import temp_folder
from conans.util.files import save
class PremakeGeneratorTest(unittest.TestCase):
content_template = textwrap.dedent("""\
#!lua
conan_build_type = "None"
conan_arch = "None"
conan_includedirs = {{"{include1}",
"{include2}"}}
conan_libdirs = {{"{lib1}",
"{lib2}"}}
conan_bindirs = {{"{bin1}",
"{bin2}"}}
conan_libs = {{"libfoo", "libbar"}}
conan_system_libs = {{"syslib1", "syslib2"}}
conan_defines = {{"MYDEFINE2", "MYDEFINE1"}}
conan_cxxflags = {{"-march=native", "-fPIE"}}
conan_cflags = {{"-mtune=native", "-fPIC"}}
conan_sharedlinkflags = {{"-framework AudioFoundation", "-framework \\\"Some Spaced Framework\\\"", "-framework Cocoa"}}
conan_exelinkflags = {{"-framework VideoToolbox", "-framework \\\"Other Spaced Framework\\\"", "-framework QuartzCore"}}
conan_frameworks = {{"AudioUnit.framework"}}
conan_includedirs_MyPkg1 = {{"{include1}"}}
conan_libdirs_MyPkg1 = {{"{lib1}"}}
conan_bindirs_MyPkg1 = {{"{bin1}"}}
conan_libs_MyPkg1 = {{"libfoo"}}
conan_system_libs_MyPkg1 = {{"syslib1"}}
conan_defines_MyPkg1 = {{"MYDEFINE1"}}
conan_cxxflags_MyPkg1 = {{"-fPIE"}}
conan_cflags_MyPkg1 = {{"-fPIC"}}
conan_sharedlinkflags_MyPkg1 = {{"-framework Cocoa"}}
conan_exelinkflags_MyPkg1 = {{"-framework QuartzCore"}}
conan_frameworks_MyPkg1 = {{"AudioUnit.framework"}}
conan_rootpath_MyPkg1 = "{root1}"
conan_includedirs_MyPkg2 = {{"{include2}"}}
conan_libdirs_MyPkg2 = {{"{lib2}"}}
conan_bindirs_MyPkg2 = {{"{bin2}"}}
conan_libs_MyPkg2 = {{"libbar"}}
conan_system_libs_MyPkg2 = {{"syslib2"}}
conan_defines_MyPkg2 = {{"MYDEFINE2"}}
conan_cxxflags_MyPkg2 = {{"-march=native"}}
conan_cflags_MyPkg2 = {{"-mtune=native"}}
conan_sharedlinkflags_MyPkg2 = {{"-framework AudioFoundation", "-framework \\\"Some Spaced Framework\\\""}}
conan_exelinkflags_MyPkg2 = {{"-framework VideoToolbox", "-framework \\\"Other Spaced Framework\\\""}}
conan_frameworks_MyPkg2 = {{}}
conan_rootpath_MyPkg2 = "{root2}"
function conan_basic_setup()
configurations{{conan_build_type}}
architecture(conan_arch)
includedirs{{conan_includedirs}}
libdirs{{conan_libdirs}}
links{{conan_libs}}
links{{conan_system_libs}}
links{{conan_frameworks}}
defines{{conan_defines}}
bindirs{{conan_bindirs}}
end
""")
def setUp(self):
self.tmp_folder1 = temp_folder()
self.tmp_folder2 = temp_folder()
save(os.path.join(self.tmp_folder1, "include1", "file.h"), "")
save(os.path.join(self.tmp_folder2, "include2", "file.h"), "")
save(os.path.join(self.tmp_folder1, "lib1", "file.a"), "")
save(os.path.join(self.tmp_folder2, "lib2", "file.a"), "")
save(os.path.join(self.tmp_folder1, "bin1", "file.bin"), "")
save(os.path.join(self.tmp_folder2, "bin2", "file.bin"), "")
self.conanfile = ConanFile(Mock(), None)
self.conanfile.initialize(Settings({}), EnvValues())
ref = ConanFileReference.loads("MyPkg1/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, self.tmp_folder1)
cpp_info.defines = ["MYDEFINE1"]
cpp_info.includedirs = ['include1']
cpp_info.libdirs = ['lib1']
cpp_info.libs = ['libfoo']
cpp_info.system_libs = ['syslib1']
cpp_info.bindirs = ['bin1']
cpp_info.version = "0.1"
cpp_info.cflags = ['-fPIC']
cpp_info.cxxflags = ['-fPIE']
cpp_info.sharedlinkflags = ['-framework Cocoa']
cpp_info.exelinkflags = ['-framework QuartzCore']
cpp_info.frameworks = ['AudioUnit']
self.conanfile.deps_cpp_info.add(ref.name, cpp_info)
ref = ConanFileReference.loads("MyPkg2/3.2.3@lasote/stables")
cpp_info = CppInfo(ref.name, self.tmp_folder2)
cpp_info.defines = ["MYDEFINE2"]
cpp_info.includedirs = ['include2']
cpp_info.libdirs = ['lib2']
cpp_info.libs = ['libbar']
cpp_info.system_libs = ['syslib2']
cpp_info.bindirs = ['bin2']
cpp_info.version = "3.2.3"
cpp_info.cflags = ['-mtune=native']
cpp_info.cxxflags = ['-march=native']
cpp_info.sharedlinkflags = ['-framework AudioFoundation', '-framework "Some Spaced Framework"']
cpp_info.exelinkflags = ['-framework VideoToolbox', '-framework "Other Spaced Framework"']
self.conanfile.deps_cpp_info.add(ref.name, cpp_info)
def test_variables_content(self):
generator = PremakeGenerator(self.conanfile)
content = generator.content
inc1 = os.path.join(self.tmp_folder1, 'include1').replace('\\', '/')
inc2 = os.path.join(self.tmp_folder2, 'include2').replace('\\', '/')
lib1 = os.path.join(self.tmp_folder1, 'lib1').replace('\\', '/')
lib2 = os.path.join(self.tmp_folder2, 'lib2').replace('\\', '/')
bin1 = os.path.join(self.tmp_folder1, 'bin1').replace('\\', '/')
bin2 = os.path.join(self.tmp_folder2, 'bin2').replace('\\', '/')
root1 = self.tmp_folder1.replace('\\', '/')
root2 = self.tmp_folder2.replace('\\', '/')
expected_content = self.content_template.format(include1=inc1, include2=inc2,
lib1=lib1, lib2=lib2,
bin1=bin1, bin2=bin2,
root1=root1, root2=root2)
self.assertEqual(expected_content, content)
|
conan-io/conan
|
conans/test/unittests/client/generators/premake_test.py
|
Python
|
mit
| 5,948
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.