code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
import sublime, sublime_plugin
class RunOnSave(sublime_plugin.EventListener):
def on_post_save(self, view):
# Check if project has run-on-save enabled.
settings = view.settings()
if settings.get('run_on_save') == 1:
command = settings.get('command')
if command is not None:
option_dict = {'cmd': command}
folders = view.window().folders()
if folders is not None and len(folders) > 0:
option_dict['working_dir'] = folders[0]
path = settings.get('path')
if path is not None:
option_dict['path'] = path
environment_dict = settings.get('environment_variables')
if environment_dict is not None and len(environment_dict) > 0:
option_dict['env'] = environment_dict;
view.window().run_command('exec', option_dict)
| chrishadi/SublimeRunOnSave | runonsave.py | Python | mit | 830 |
from django.test import SimpleTestCase
class MaintenanceModeTestCase(SimpleTestCase):
def test_maintenance_mode_enabled_home_page(self):
with self.settings(MAINTENANCE_MODE=True):
response = self.client.get("/", follow=True)
self.assertEqual(503, response.status_code)
self.assertIn("This service is down for maintenance", response.content)
self.assertEqual([("http://testserver/maintenance", 302)], response.redirect_chain)
def test_maintenance_mode_enabled_maintenance_page(self):
with self.settings(MAINTENANCE_MODE=True):
response = self.client.get("/maintenance", follow=False)
self.assertEqual(503, response.status_code)
self.assertIn("This service is down for maintenance", response.content)
def test_maintenance_mode_disabled_home_page(self):
with self.settings(MAINTENANCE_MODE=False):
response = self.client.get("/", follow=True)
self.assertEqual(200, response.status_code)
self.assertNotIn("This service is down for maintenance", response.content)
def test_maintenance_mode_disabled_maintenance_page(self):
with self.settings(MAINTENANCE_MODE=False):
response = self.client.get("/maintenance", follow=True)
self.assertEqual(200, response.status_code)
self.assertEqual(("http://testserver/", 302), response.redirect_chain[0])
self.assertNotIn("This service is down for maintenance", response.content)
| ministryofjustice/cla_frontend | cla_frontend/apps/core/testing/test_views.py | Python | mit | 1,532 |
from django.conf.urls import url
from django.views.generic import RedirectView
from .views import wordcloud_demoview
urlpatterns = [
url(r"wordcloud/?$", wordcloud_demoview, name="wordcloud")
]
| grapeshot/django-wordcloud-demo | wordcloud_demo/urls.py | Python | mit | 200 |
#!python
#-*- encoding=utf-8 -*-
import sys, sqlite3, logging, os, os.path
import wx, time, re, copy, webbrowser
import wx.grid, wx.html
import json, math
from cfg import config
from util import *
from model import *
from view import *
class App(wx.App):
instance = None
def __init__(self, conf, *args):
wx.App.__init__(self, *args) # do not redirect for now
self.user = None
self.cfg = conf
self.printer = None
def OnInit(self):
return True
@staticmethod
def GetInstance():
return App.instance
def Quit(self):
wx.Exit()
sys.exit()
def Run(self):
if not os.path.isfile( self.cfg.datapath ):
self.setup()
else:
self.bootstrap()
self.checkExpiration()
AuthFrame(self).Show()
def bootstrap(self):
self.dbconn = DB.getInstance().conn
self.modelUser = ModelUser( self.dbconn )
self.modelSheet= ModelSheet( self.dbconn )
self.logger = XLog.getDefaultLogger()
def setup(self):
try:
os.makedirs( os.path.join(self.cfg.rootdir, r'data') )
os.makedirs( os.path.join(self.cfg.rootdir, r'log') )
os.makedirs( os.path.join(self.cfg.rootdir, r'cache') )
except:
alert( u'程序初始化失败, 即将退出' )
self.Quit()
self.bootstrap()
self.modelUser.initTable()
self.modelSheet.initTable()
def checkExpiration(self):
self.expirationTip = ''
return True # skip expiration check
self.appExpireInDays = self.cfg.expiration
time0 = self.modelUser.getEarliestDate()
daysElapsed = 0
if time0>0:
daysElapsed = int( (time.time()-time0)/86400 )
if daysElapsed > -1:
if self.appExpireInDays < daysElapsed:
self.expire()
self.appExpireInDays -= daysElapsed
daysElapsed
self.expirationTip = u'试用版,{}天后过期'.format(self.appExpireInDays)
else:
self.expire(u'×系统时间混乱×\n程序退出')
def expire(self):
alert(u'本软件已过期,\n不能继续使用', u'试用过期啦~')
self.Quit()
def authOk(self, user):
self.user = user
self.cfg.user = user
mf = MainFrame(parent=None, title=self.user['name'] + u' 你好,欢迎使用本软件') #( {} )'.format(self.expirationTip) )
mf.app = self
if self.user.isAdmin():
ManagerPanel(mf, self)
else:
OperatorPanel(mf)
mf.maxWindow()
def getPrinter(self):
if not self.printer:
self.printer = wx.html.HtmlEasyPrinting()
return self.printer
def printSheet(self, sheet):
# self.printViaHtml( sheet )
self.getPrinter().GetPrintData().SetPaperSize( wx.Size(-1, 400) )
self.getPrinter().GetPrintData().PaperSize = wx.Size(-1, 400)
self.getPrinter().PrintText( self.getSheetHtml(sheet) )
def getSheetHtml(self, sheet):
data = sheet.getDict()
data['bigamount'] = cnNumber( data['amount'] )
return getPrintTpl().format( **data )
def printViaHtml(self, sheet):
filepath = os.path.join(self.cfg.cachepath, "{}.html".format(sheet['id']) )
file = open(filepath, 'wb')
file.write( self.getSheetHtml(sheet).encode('utf-8') )
file.close()
webbrowser.open(filepath)
# if '__main__'==__name__:
app = App(config, False) # True, os.path.join(ctx.dir, 'run.dat') )
App.instance = app
config.app = app
app.Run()
app.MainLoop()
| dlutxx/cement | main.py | Python | mit | 3,137 |
import scipy as sp
import matplotlib
import matplotlib.pyplot as plt
def simple_log_qqplot(quantiles_list, png_file=None, pdf_file=None, quantile_labels=None, line_colors=None,
max_val=5, title=None, text=None, plot_label=None, ax=None, **kwargs):
storeFig = False
if ax is None:
f = plt.figure(figsize=(5.4, 5))
ax = f.add_axes([0.1, 0.09, 0.88, 0.86])
storeFig = True
ax.plot([0, max_val], [0, max_val], 'k--', alpha=0.5, linewidth=2.0)
num_dots = len(quantiles_list[0])
exp_quantiles = sp.arange(1, num_dots + 1, dtype='single') / (num_dots + 1) * max_val
for i, quantiles in enumerate(quantiles_list):
if line_colors:
c = line_colors[i]
else:
c = 'b'
if quantile_labels:
ax.plot(exp_quantiles, quantiles, label=quantile_labels[i], c=c, alpha=0.5, linewidth=2.2)
else:
ax.plot(exp_quantiles, quantiles, c=c, alpha=0.5, linewidth=2.2)
ax.set_ylabel("Observed $-log_{10}(p$-value$)$")
ax.set_xlabel("Expected $-log_{10}(p$-value$)$")
if title:
ax.title(title)
max_x = max_val
max_y = max(map(max, quantiles_list))
ax.axis([-0.025 * max_x, 1.025 * max_x, -0.025 * max_y, 1.025 * max_y])
if quantile_labels:
fontProp = matplotlib.font_manager.FontProperties(size=10)
ax.legend(loc=2, numpoints=2, handlelength=0.05, markerscale=1, prop=fontProp, borderaxespad=0.018)
y_min, y_max = plt.ylim()
if text:
f.text(0.05 * max_val, y_max * 0.9, text)
if plot_label:
f.text(-0.138 * max_val, y_max * 1.01, plot_label, fontsize=14)
if storeFig == False:
return
if png_file != None:
f.savefig(png_file)
if pdf_file != None:
f.savefig(pdf_file, format='pdf')
def simple_qqplot(quantiles_list, png_file=None, pdf_file=None, quantile_labels=None, line_colors=None,
title=None, text=None, ax=None, plot_label=None, **kwargs):
storeFig = False
if ax is None:
f = plt.figure(figsize=(5.4, 5))
ax = f.add_axes([0.11, 0.09, 0.87, 0.86])
storeFig = True
ax.plot([0, 1], [0, 1], 'k--', alpha=0.5, linewidth=2.0)
num_dots = len(quantiles_list[0])
exp_quantiles = sp.arange(1, num_dots + 1, dtype='single') / (num_dots + 1)
for i, quantiles in enumerate(quantiles_list):
if line_colors:
c = line_colors[i]
else:
c = 'b'
if quantile_labels:
ax.plot(exp_quantiles, quantiles, label=quantile_labels[i], c=c, alpha=0.5, linewidth=2.2)
else:
ax.plot(exp_quantiles, quantiles, c=c, alpha=0.5, linewidth=2.2)
ax.set_ylabel("Observed $p$-value")
ax.set_xlabel("Expected $p$-value")
if title:
ax.title(title)
ax.axis([-0.025, 1.025, -0.025, 1.025])
if quantile_labels:
fontProp = matplotlib.font_manager.FontProperties(size=10)
ax.legend(loc=2, numpoints=2, handlelength=0.05, markerscale=1, prop=fontProp, borderaxespad=0.018)
if text:
f.text(0.05, 0.9, text)
if plot_label:
f.text(-0.151, 1.04, plot_label, fontsize=14)
if storeFig == False:
return
if png_file != None:
f.savefig(png_file)
if pdf_file != None:
f.savefig(pdf_file, format='pdf')
def plot_simple_qqplots(png_file_prefix, results, result_labels=None, line_colors=None,
num_dots=1000, title=None, max_neg_log_val=5):
"""
Plots both log QQ-plots and normal QQ plots.
"""
qs = []
log_qs = []
for res in results:
pvals = res.snp_results['scores'][:]
qs.append(get_quantiles(pvals, num_dots))
log_qs.append(get_log_quantiles(pvals, num_dots, max_neg_log_val))
simple_qqplot(qs, png_file_prefix + '_qq.png', quantile_labels=result_labels,
line_colors=line_colors, num_dots=num_dots, title=title)
simple_log_qqplot(log_qs, png_file_prefix + '_log_qq.png', quantile_labels=result_labels,
line_colors=line_colors, num_dots=num_dots, title=title, max_val=max_neg_log_val)
def plot_simple_qqplots_pvals(png_file_prefix, pvals_list, result_labels=None, line_colors=None,
num_dots=1000, title=None, max_neg_log_val=5):
"""
Plots both log QQ-plots and normal QQ plots.
"""
qs = []
log_qs = []
for pvals in pvals_list:
qs.append(get_quantiles(pvals, num_dots))
log_qs.append(get_log_quantiles(pvals, num_dots, max_neg_log_val))
simple_qqplot(qs, png_file_prefix + '_qq.png', quantile_labels=result_labels,
line_colors=line_colors, num_dots=num_dots, title=title)
simple_log_qqplot(log_qs, png_file_prefix + '_log_qq.png', quantile_labels=result_labels,
line_colors=line_colors, num_dots=num_dots, title=title, max_val=max_neg_log_val)
| timeu/PyGWAS | pygwas/core/plot.py | Python | mit | 4,855 |
import forecastio
class ForecastAPI:
_API_KEY = "8eefab4d187a39b993ca9c875fef6159"
_LAZY = False
_LAT = 0
_LNG = 0
_forecast = ()
def __init__(self,key,lat,lng,lazy=False):
self._LAT = lat
self._LNG = lng
self._API_KEY = key
self._LAZY = lazy
self._forecast = forecastio.load_forecast(self._API_KEY,self._LAT,self._LNG,lazy=lazy)
def get_7day_forecast_detailed(self):
return self._forecast.daily().data
"""
Help getting cloud data from the future
"""
def get_7_day_cloudCover(self):
c_data = self._forecast.daily().data
cloud_results = {}
for day in c_data:
cloud_results[day.time.isoformat()] = day.cloudCover
return cloud_results
"""
Helper on getting cloud sunrise and sunset data
"""
def get_n_day_minimal_solar(self,n_days):
c_data = self._forecast.daily().data
sun_results = {}
count = 0
for day in c_data:
if count < n_days:
sun_results[day.time.isoformat()] = {"sunrise":day.sunriseTime,"sunset":day.sunsetTime,"stat":day.icon,"cloudcover":day.cloudCover}
count = count + 1
return sun_results
"""
Helper on getting cloud sunrise and sunset data from the past
"""
def get_historical_day_minimal_solar(self,days):
#TODO get temp just for reference
sun_results = {}
for day in days:
print "getting date for %s"%day
self._forecast = forecastio.load_forecast(self._API_KEY,self._LAT,self._LNG,lazy=self._LAZY,time=day)
c_data = self._forecast.daily().data
for f_day in c_data:
print "adding date for %s"%f_day
sun_results[day.isoformat()] = {"sunrise":f_day.sunriseTime,"sunset":f_day.sunsetTime,"stat":f_day.icon,"cloudcover":f_day.cloudCover}
return sun_results
| GreatLakesEnergy/sesh-dash-beta | seshdash/api/forecast.py | Python | mit | 1,952 |
import copy
import os
from visitor import *
from stringstream import *
class Rewriter(NodeVisitor):
""" Class for rewriting of the original AST. Includes:
1. the initial small rewritings,
2. transformation into our representation,
3. transforming from our representation to C-executable code,
4. creating our representation of the device kernel code,
5. creating a C-executable kernel code,
6. Creating the host code (boilerplate code)
"""
def __init__(self):
# List of loop indices
self.index = list()
# dict of the upper limit of the loop indices
self.UpperLimit = dict()
# dict of the lower limit of the loop indices
self.LowerLimit = dict()
# The local work group size
self.Local = dict()
self.Local['name'] = 'LSIZE'
self.Local['size'] = ['64']
# The number of dimensions of each array
self.NumDims = dict()
# The Ids of arrays, or pointers
self.ArrayIds = set()
# The indices that appear in the subscript of each array
self.IndexInSubscript = dict()
# All Ids that are not arrays, or pointers
self.NonArrayIds = set()
# Ids that we remove due to parallelization of loops
self.RemovedIds = set()
# The mapping from the indices that we parallelize
# to their function returning their thread id in the kernel
self.IndexToThreadId = dict()
self.IndexToLocalId = dict()
self.IndexToLocalVar = dict()
# The indices that we parallelize
self.GridIndices = list()
# The OpenCl kernel before anything
self.Kernel = None
# The "inside" of the OpenCl kernel after parallelization
self.InsideKernel = None
# The name of the kernel, i.e. the FuncName + Kernel
self.KernelName = None
# The mapping from the array ids to a list of
# the names of their dimensions
self.ArrayIdToDimName = dict()
# ArrayRef inside a loop in the kernel
# Mapping from Id to AST ArrayRef node
self.LoopArray = dict()
# The argument list in our IR
self.DevArgList = list()
# The name and type of the kernel function.
self.DevFuncTypeId = None
# The name of the kernel function.
self.DevFuncId = None
# The device names of the pointers in the boilerplate code
self.DevId = dict()
# The host names of the pointers in the boilerplate code
self.HstId = dict()
# The types of the arguments for the kernel
self.Type = dict()
# The name of the variable denoting the memory size
self.Mem = dict()
# Dimension of the parallelization
self.ParDim = None
# VarName of the global/local worksize array.
self.Worksize = dict()
# The dimension that the index indexes
self.IdxToDim = dict()
# Whether an array is read, write or both
self.ReadWrite = dict()
# List of arrays that are write only
self.WriteOnly = list()
# List of arrays that are read only
self.ReadOnly = list()
# dict of indices to loops in the kernel
self.Loops = dict()
# Contains the loop indices for each subscript
self.SubIdx = dict()
# Contains a list for each arrayref of what loop indices
# appear in the subscript.
self.Subscript = dict()
# The same as above but names are saved as strings instead
# of Id(strings)
self.SubscriptNoId = dict()
# Decides whether we read back data from GPU
self.NoReadBack = False
# A list of calls to the transpose function which we perform
# after data was read back from the GPU.
self.WriteTranspose = list()
# A mapping from array references to the loops they appear in.
self.RefToLoop = dict()
# List of arguments for the kernel
## self.KernelArgs = list()
########################################################
# Datastructures used when performing transformations #
########################################################
# Holds the sub-AST in AllocateBuffers
# that we add transposition to.
self.Transposition = None
# Holds the sub-AST in AllocateBuffers
# that we add constant memory pointer initializations to.
self.ConstantMemory = None
# Holds the sub-AST in AllocateBuffers
# where we set the defines for the kernel.
self.Define = None
# Dict containing the name and type for each kernel argument
# set in SetArguments
self.KernelArgs = dict()
# Holds information about which names have been swapped
# in a transposition
self.NameSwap = dict()
# Holds information about which subscripts have been swapped
# in a transposition
self.SubSwap = dict()
# Holds information about which indices have been swapped
# in a transposition
self.IdxSwap = dict()
# Holds information about which dimensions have been swapped
# in a transposition
self.DimSwap = dict()
# Holds additional global variables such as pointers that we add
# when we to perform transformations
self.GlobalVars = dict()
# Holds additional cl_mem variables that we add
# when we to perform Constant Memory transformation
self.ConstantMem = dict()
# Name swap in relation to [local memory]
self.LocalSwap = dict()
# Extra things that we add to ids [local memory]
self.Add = dict()
# Holds includes for the kernel
self.Includes = list()
# Holds the ast for a function that returns the kernelstring
self.KernelStringStream = list()
# Holds a list of which loops we will unroll
self.UnrollLoops = list()
# True is SetDefine were called.
self.DefinesAreMade = False
# List of what kernel arguments changes
self.Change = list()
self.IfThenElse = None
def initOriginal(self, ast):
loops = ForLoops()
loops.visit(ast)
forLoopAst = loops.ast
loopIndices = LoopIndices()
loopIndices.visit(forLoopAst)
self.index = loopIndices.index
self.UpperLimit = loopIndices.end
self.LowerLimit = loopIndices.start
norm = Norm(self.index)
norm.visit(forLoopAst)
arrays = Arrays(self.index)
arrays.visit(ast)
for n in arrays.numIndices:
if arrays.numIndices[n] == 2:
arrays.numSubscripts[n] = 2
elif arrays.numIndices[n] > 2:
arrays.numSubscripts[n] = 1
self.NumDims = arrays.numSubscripts
self.IndexInSubscript = arrays.indexIds
typeIds = TypeIds()
typeIds.visit(loops.ast)
typeIds2 = TypeIds()
typeIds2.visit(ast)
outsideTypeIds = typeIds2.ids - typeIds.ids
for n in typeIds.ids:
typeIds2.dictIds.pop(n)
self.Type = typeIds2.dictIds
ids = Ids()
ids.visit(ast)
## print "typeIds.ids ", typeIds.ids
## print "arrays.ids ", arrays.ids
## print "ids.ids ", ids.ids
otherIds = ids.ids - arrays.ids - typeIds.ids
self.ArrayIds = arrays.ids - typeIds.ids
self.NonArrayIds = otherIds
def initNewRepr(self, ast, dev = 'GPU'):
## findIncludes = FindIncludes()
## findIncludes.visit(ast)
## self.Includes = findIncludes.includes
perfectForLoop = PerfectForLoop()
perfectForLoop.visit(ast)
if self.ParDim is None:
self.ParDim = perfectForLoop.depth
if self.ParDim == 1:
self.Local['size'] = ['256']
if dev == 'CPU':
self.Local['size'] = ['16']
else:
self.Local['size'] = ['16','16']
if dev == 'CPU':
self.Local['size'] = ['4','4']
innerbody = perfectForLoop.inner
if perfectForLoop.depth == 2 and self.ParDim == 1:
innerbody = perfectForLoop.outer
firstLoop = ForLoops()
firstLoop.visit(innerbody.compound)
loopIndices = LoopIndices()
if firstLoop.ast is not None:
loopIndices.visit(innerbody.compound)
self.Loops = loopIndices.Loops
self.InsideKernel = firstLoop.ast
arrays = Arrays(self.index)
arrays.visit(innerbody.compound)
self.NumDims = arrays.numSubscripts
self.LoopArrays = arrays.LoopArrays
initIds = InitIds()
initIds.visit(perfectForLoop.ast.init)
gridIds = list()
idMap = dict()
localMap = dict()
localVarMap = dict()
firstIdx = initIds.index[0]
idMap[firstIdx] = 'get_global_id(0)'
localMap[firstIdx] = 'get_local_id(0)'
localVarMap[firstIdx] = 'l' + firstIdx
self.ReverseIdx = dict()
self.ReverseIdx[0] = 1
gridIds.extend(initIds.index)
kernel = perfectForLoop.ast.compound
self.ReverseIdx[1] = 0
if self.ParDim == 2:
initIds = InitIds()
initIds.visit(kernel.statements[0].init)
kernel = kernel.statements[0].compound
secondIdx = initIds.index[0]
idMap[secondIdx] = 'get_global_id(1)'
localMap[secondIdx] = 'get_local_id(1)'
localVarMap[secondIdx] = 'l' + secondIdx
gridIds.extend(initIds.index)
(idMap[gridIds[0]], idMap[gridIds[1]]) = (idMap[gridIds[1]], idMap[gridIds[0]])
(localMap[gridIds[0]], localMap[gridIds[1]]) = (localMap[gridIds[1]], localMap[gridIds[0]])
## (localVarMap[gridIds[0]], localVarMap[gridIds[1]]) = (localVarMap[gridIds[1]], localVarMap[gridIds[0]])
self.IndexToLocalId = localMap
self.IndexToLocalVar = localVarMap
self.IndexToThreadId = idMap
self.GridIndices = gridIds
self.Kernel = kernel
for i, n in enumerate(reversed(self.GridIndices)):
self.IdxToDim[i] = n
findDim = FindDim(self.NumDims)
findDim.visit(ast)
self.ArrayIdToDimName = findDim.dimNames
self.RemovedIds = set(self.UpperLimit[i] for i in self.GridIndices)
idsStillInKernel = Ids()
idsStillInKernel.visit(self.Kernel)
self.RemovedIds = self.RemovedIds - idsStillInKernel.ids
otherIds = self.ArrayIds.union(self.NonArrayIds)
findDeviceArgs = FindDeviceArgs(otherIds)
findDeviceArgs.visit(ast)
self.DevArgList = findDeviceArgs.arglist
findFunction = FindFunction()
findFunction.visit(ast)
self.DevFuncTypeId = findFunction.typeid
self.DevFuncId = self.DevFuncTypeId.name.name
for n in self.ArrayIds:
self.DevId[n] = 'dev_ptr' + n
self.HstId[n] = 'hst_ptr' + n
self.Mem[n] = 'hst_ptr' + n + '_mem_size'
## for n in self.DevArgList:
## name = n.name.name
## type = n.type[-2:]
## self.Type[name] = type
for n in self.ArrayIdToDimName:
for m in self.ArrayIdToDimName[n]:
self.Type[m] = ['size_t']
kernelName = self.DevFuncTypeId.name.name
self.KernelName = kernelName + 'Kernel'
self.Worksize['local'] = kernelName + '_local_worksize'
self.Worksize['global'] = kernelName + '_global_worksize'
self.Worksize['offset'] = kernelName + '_global_offset'
findReadWrite = FindReadWrite(self.ArrayIds)
findReadWrite.visit(ast)
self.ReadWrite = findReadWrite.ReadWrite
for n in self.ReadWrite:
pset = self.ReadWrite[n]
if len(pset) == 1:
if 'write' in pset:
self.WriteOnly.append(n)
else:
self.ReadOnly.append(n)
argIds = self.NonArrayIds.union(self.ArrayIds) - self.RemovedIds
for n in argIds:
tmplist = [n]
try:
if self.NumDims[n] == 2:
tmplist.append(self.ArrayIdToDimName[n][0])
except KeyError:
pass
for m in tmplist:
self.KernelArgs[m] = self.Type[m]
self.Transposition = GroupCompound([Comment('// Transposition')])
self.ConstantMemory = GroupCompound([Comment('// Constant Memory')])
self.Define = GroupCompound([Comment('// Defines for the kernel')])
arrays = Arrays(self.index)
arrays.visit(ast)
self.Subscript = arrays.Subscript
self.SubIdx = arrays.SubIdx
self.SubscriptNoId = copy.deepcopy(self.Subscript)
for n in self.SubscriptNoId.values():
for m in n:
for i,k in enumerate(m):
try:
m[i] = k.name
except AttributeError:
try:
m[i] = k.value
except AttributeError:
m[i] = 'unknown'
refToLoop = RefToLoop(self.GridIndices)
refToLoop.visit(ast)
self.RefToLoop = refToLoop.RefToLoop
def DataStructures(self):
print "self.index " , self.index
print "self.UpperLimit " , self.UpperLimit
print "self.LowerLimit " , self.LowerLimit
print "self.NumDims " , self.NumDims
print "self.ArrayIds " , self.ArrayIds
print "self.IndexInSubscript " , self.IndexInSubscript
print "self.NonArrayIds " , self.NonArrayIds
print "self.RemovedIds " , self.RemovedIds
print "self.IndexToThreadId " , self.IndexToThreadId
print "self.IndexToLocalId " , self.IndexToLocalId
print "self.IndexToLocalVar " , self.IndexToLocalVar
print "self.ReverseIdx ", self.ReverseIdx
print "self.GridIndices " , self.GridIndices
## print "self.Kernel " , self.Kernel
print "self.ArrayIdToDimName " , self.ArrayIdToDimName
print "self.DevArgList " , self.DevArgList
print "self.DevFuncTypeId " , self.DevFuncTypeId
print "self.DevId " , self.DevId
print "self.HstId " , self.HstId
print "self.Type " , self.Type
print "self.Mem " , self.Mem
print "self.ParDim " , self.ParDim
print "self.Worksize " , self.Worksize
print "self.IdxToDim " , self.IdxToDim
print "self.WriteOnly " , self.WriteOnly
print "self.ReadOnly " , self.ReadOnly
print "self.Subscript " , self.Subscript
print "self.SubscriptNoId " , self.SubscriptNoId
print "TRANSFORMATIONS"
print "self.Transposition " , self.Transposition
print "self.ConstantMemory " , self.ConstantMemory
print "self.KernelArgs " , self.KernelArgs
print "self.NameSwap " , self.NameSwap
print "self.LocalSwap " , self.LocalSwap
print "self.LoopArrays " , self.LoopArrays
print "self.Add ", self.Add
print "self.GlobalVars ", self.GlobalVars
print "self.ConstantMem " , self.ConstantMem
print "self.Loops " , self.Loops
print "self.RefToLoop ", self.RefToLoop
def rewrite(self, ast, functionname = 'FunctionName', changeAST = True):
""" Rewrites a few things in the AST to increase the
abstraction level.
"""
typeid = TypeId(['void'], Id(functionname),ast.coord)
arraysArg = list()
for arrayid in self.ArrayIds:
arraysArg.append(TypeId(self.Type[arrayid], Id(arrayid,ast.coord),ast.coord))
for iarg in xrange(self.NumDims[arrayid]):
arraysArg.append(TypeId(['size_t'], Id('hst_ptr'+arrayid+'_dim'+str(iarg+1),ast.coord),ast.coord))
for arrayid in self.NonArrayIds:
arraysArg.append(TypeId(self.Type[arrayid], Id(arrayid,ast.coord),ast.coord))
arglist = ArgList([] + arraysArg)
while isinstance(ast.ext[0], Include):
include = ast.ext.pop(0)
self.Includes.append(include)
while not isinstance(ast.ext[0], ForLoop):
ast.ext.pop(0)
compound = Compound(ast.ext)
if changeAST:
ast.ext = list()
ast.ext.append(FuncDecl(typeid,arglist,compound))
def rewriteToSequentialC(self, ast):
loops = ForLoops()
loops.visit(ast)
forLoopAst = loops.ast
loopIndices = LoopIndices()
loopIndices.visit(forLoopAst)
self.index = loopIndices.index
arrays2 = Arrays(self.index)
arrays2.visit(ast)
findDim = FindDim(arrays2.numIndices)
findDim.visit(ast)
rewriteArrayRef = RewriteArrayRef(self.NumDims,
self.ArrayIdToDimName,
self)
rewriteArrayRef.visit(ast)
def rewriteToDeviceCTemp(self, ast, changeAST = True):
findDeviceArgs = FindDeviceArgs(self.NonArrayIds)
findDeviceArgs.visit(ast)
findFunction = FindFunction()
findFunction.visit(ast)
# add OpenCL keywords to indicate the kernel function.
findFunction.typeid.type.insert(0, '__kernel')
exchangeIndices = ExchangeIndices(self.IndexToThreadId)
exchangeIndices.visit(self.Kernel)
newast = FuncDecl(findFunction.typeid, ArgList(findDeviceArgs.arglist,ast.coord), self.Kernel, ast.coord)
if changeAST:
ast.ext = list()
ast.ext.append(newast)
def InSourceKernel(self, ast, cond, filename, kernelstringname):
self.rewriteToDeviceCRelease(ast)
ssprint = SSGenerator()
newast = FileAST([])
ssprint.createKernelStringStream(ast, newast, self.UnrollLoops, kernelstringname, filename = filename)
self.KernelStringStream.append({'name' : kernelstringname, \
'ast' : newast,
'cond' : cond})
def rewriteToDeviceCRelease(self, ast):
arglist = list()
argIds = self.NonArrayIds.union(self.ArrayIds) - self.RemovedIds
# The list of arguments for the kernel
dictTypeHostPtrs = copy.deepcopy(self.Type)
for n in self.ArrayIds:
dictTypeHostPtrs[self.ArrayIdToDimName[n][0]] = ['size_t']
for n in self.KernelArgs:
type = copy.deepcopy(self.KernelArgs[n])
if type[0] == 'size_t':
type[0] = 'unsigned'
if len(type) == 2:
type.insert(0, '__global')
arglist.append(TypeId(type, Id(n)))
exchangeArrayId = ExchangeArrayId(self.LocalSwap)
for n in self.LoopArrays.values():
for m in n:
exchangeArrayId.visit(m)
## for n in self.Add:
## addToIds = AddToId(n, self.Add[n])
## addToIds.visit(self.InsideKernel.compound)
MyKernel = copy.deepcopy(self.Kernel)
rewriteArrayRef = RewriteArrayRef(self.NumDims, self.ArrayIdToDimName, self)
rewriteArrayRef.visit(MyKernel)
arrays = self.ArrayIds
exchangeIndices = ExchangeId(self.IndexToThreadId)
exchangeIndices.visit(MyKernel)
exchangeTypes = ExchangeTypes()
exchangeTypes.visit(MyKernel)
typeid = copy.deepcopy(self.DevFuncTypeId)
typeid.type.insert(0, '__kernel')
ext = copy.deepcopy(self.Includes)
newast = FileAST(ext)
for n in arglist:
if len(n.type) == 3:
if n.type[1] == 'double':
ext.insert(0, Compound([Id("#pragma OPENCL EXTENSION cl_khr_fp64: enable")]))
break
else:
if n.type[0] == 'double':
ext.insert(0,Compound([Id("#pragma OPENCL EXTENSION cl_khr_fp64: enable")]))
break
ext.append(FuncDecl(typeid, ArgList(arglist), MyKernel))
ast.ext = list()
## ast.ext.append(Id('#define LSIZE ' + str(self.Local['size'])))
ast.ext.append(newast)
def constantMemory2(self, arrDict):
arrNames = arrDict.keys()
# find out if we need to split into global and constant memory space
split = dict()
for name in arrNames:
if len(arrDict[name]) != len(self.Subscript[name]):
# Every aref to name is not put in constant memory
# so we split.
split[name] = True
else:
split[name] = False
# Add new constant pointer
ptrname = 'Constant' + ''.join(arrNames)
hst_ptrname = 'hst_ptr' + ptrname
dev_ptrname = 'dev_ptr' + ptrname
typeset = set()
for name in arrNames:
typeset.add(self.Type[name][0])
if len(typeset) > 1:
print "Conflicting types in constant memory transformation... Aborting"
return
ptrtype = [typeset.pop(), '*']
# Add the ptr to central data structures
self.Type[ptrname] = ptrtype
self.DevId[ptrname] = dev_ptrname
self.HstId[ptrname] = hst_ptrname
self.Mem[ptrname] = self.HstId[ptrname]+'_mem_size'
# Add the ptr to be a kernel argument
self.KernelArgs[ptrname] = ['__constant'] + ptrtype
self.GlobalVars[ptrname] = ''
self.ConstantMem[ptrname] = arrNames
# Delete original arguments if we split
for n in split:
if not split[n]:
self.KernelArgs.pop(n)
self.DevId.pop(n)
# Add pointer allocation to AllocateBuffers
lval = Id(self.HstId[ptrname])
rval = Id('new ' + self.Type[ptrname][0] + '['\
+ self.Mem[ptrname] + ']')
self.ConstantMemory.statements.append(Assignment(lval, rval))
# find the loop the we need to add to the allocation section
# Do it by looking at the loop indices in the subscripts
ids = []
for s in arrDict:
# Just look at only the first subscript at the moment
array = arrDict[s]
subs = self.LoopArrays[s]
try:
sub = subs[array[0]]
except IndexError:
print array[0]
print subs
print "ConstantMemory: Wrong index... Are you using zero indexing for the beginning of the loop?"
return
arrays = Arrays(self.Loops.keys())
arrays.visit(sub)
ids = set(arrays.SubIdx[s][0]) - set([None]) - set(self.GridIndices)
break
if len(ids) > 1:
print "Constant memory only supported for one loop at the moment"
return
# Add the loop to the allocation code
forloop = copy.deepcopy(self.Loops[iter(ids).next()])
newcomp = []
forcomp = []
groupComp = GroupCompound(newcomp)
forloop.compound = Compound(forcomp)
loopcount = forloop.init.lval.name.name
# Add the for loop from the kernel
newcomp.append(forloop)
# find dimension of the constant ptr
constantdim = sum([ (len(arrDict[m])) for m in arrDict])
# add constant writes
writes = []
for i in xrange(constantdim):
writes.append((
[BinOp(BinOp(Id(str(constantdim)), '*', \
Id(loopcount)), '+', Id(str(i)))]))
# for rewriting the ARefs that we copy
rewriteArrayRef = RewriteArrayRef(self.NumDims, self.ArrayIdToDimName, self)
# add global loadings
count = 0
for n in arrDict:
for i in arrDict[n]:
aref = copy.deepcopy(self.LoopArrays[n][i])
name = aref.name.name
rewriteArrayRef.visit(aref)
aref.name.name = self.HstId[name]
lval = ArrayRef(Id(self.HstId[ptrname]), writes[count])
assign = Assignment(lval, aref)
forcomp.append(assign)
count += 1
# Must now replace global arefs with constant arefs
count = 0
for n in arrDict:
for i in (arrDict[n]):
aref_new = writes[count]
aref_old = self.LoopArrays[n][i]
# Copying the internal data of the two arefs
aref_old.name.name = ptrname
aref_old.subscript = aref_new
count += 1
self.ConstantMemory.statements.append(groupComp)
def generateBoilerplateCode(self, ast):
dictNToNumScripts = self.NumDims
dictNToDimNames = self.ArrayIdToDimName
idMap = self.IndexToThreadId
gridIds = self.GridIndices
NonArrayIds = copy.deepcopy(self.NonArrayIds)
otherIds = self.ArrayIds.union(self.NonArrayIds) - self.RemovedIds
fileAST = FileAST([])
fileAST.ext.append(Id('#include \"../../../utils/StartUtil.cpp\"'))
fileAST.ext.append(Id('using namespace std;'))
## fileAST.ext.append(Id('#define LSIZE ' + str(self.Local['size'][0])))
kernelId = Id(self.KernelName)
kernelTypeid = TypeId(['cl_kernel'], kernelId, 0)
fileAST.ext.append(kernelTypeid)
## fileAST.show()
listDevBuffers = []
for n in self.ArrayIds:
try:
name = self.DevId[n]
listDevBuffers.append(TypeId(['cl_mem'], Id(name)))
except KeyError:
pass
for n in self.ConstantMem:
name = self.DevId[n]
listDevBuffers.append(TypeId(['cl_mem'], Id(name)))
dictNToDevPtr = self.DevId
listDevBuffers = GroupCompound(listDevBuffers)
fileAST.ext.append(listDevBuffers)
listHostPtrs = []
dictTypeHostPtrs = dict()
dictNToHstPtr = dict()
for n in self.DevArgList:
name = n.name.name
type = self.Type[name]
try:
name = self.HstId[name]
except KeyError:
pass
listHostPtrs.append(TypeId(type, Id(name), 0))
for n in self.GlobalVars:
type = self.Type[n]
name = self.HstId[n]
listHostPtrs.append(TypeId(type, Id(name), 0))
dictNToHstPtr = self.HstId
dictTypeHostPtrs = copy.deepcopy(self.Type)
listHostPtrs = GroupCompound(listHostPtrs)
fileAST.ext.append(listHostPtrs)
listMemSize = []
listDimSize = []
listMemSizeCalcTemp = []
dictMemSizeCalc = dict()
dictNToSize = self.Mem
for n in self.Mem:
sizeName = self.Mem[n]
listMemSize.append(TypeId(['size_t'], Id(sizeName)))
for n in self.ArrayIds:
for dimName in self.ArrayIdToDimName[n]:
listDimSize.append(\
TypeId(['size_t'], Id(dimName)))
fileAST.ext.append(GroupCompound(listMemSize))
fileAST.ext.append(GroupCompound(listDimSize))
misc = []
lval = TypeId(['size_t'], Id('isFirstTime'))
rval = Constant(1)
misc.append(Assignment(lval,rval))
lval = TypeId(['std::string'], Id('KernelDefines'))
rval = Constant('""')
misc.append(Assignment(lval,rval))
lval = TypeId(['Stopwatch'], Id('timer'))
misc.append(lval)
fileAST.ext.append(GroupCompound(misc))
# Generate the GetKernelCode function
for optim in self.KernelStringStream:
fileAST.ext.append(optim['ast'])
getKernelCode = EmptyFuncDecl('GetKernelCode', type = ['std::string'])
getKernelStats = []
getKernelCode.compound.statements = getKernelStats
getKernelStats.append(self.IfThenElse)
## getKernelStats.append(Id('return str.str();'))
fileAST.ext.append(getKernelCode)
allocateBuffer = EmptyFuncDecl('AllocateBuffers')
fileAST.ext.append(allocateBuffer)
listSetMemSize = []
for entry in self.ArrayIds:
n = self.ArrayIdToDimName[entry]
lval = Id(self.Mem[entry])
rval = BinOp(Id(n[0]),'*', Id('sizeof('+\
self.Type[entry][0]+')'))
if len(n) == 2:
rval = BinOp(Id(n[1]),'*', rval)
listSetMemSize.append(Assignment(lval,rval))
for n in self.ConstantMem:
terms = self.ConstantMem[n]
rval = Id(self.Mem[terms[0]])
for s in terms[1:]:
rval = BinOp(rval, '+', Id(self.Mem[s]))
lval = Id(self.Mem[n])
listSetMemSize.append(Assignment(lval,rval))
allocateBuffer.compound.statements.append(\
GroupCompound(listSetMemSize))
allocateBuffer.compound.statements.append(\
self.Transposition)
allocateBuffer.compound.statements.append(\
self.ConstantMemory)
allocateBuffer.compound.statements.append(\
self.Define)
ErrName = 'oclErrNum'
lval = TypeId(['cl_int'], Id(ErrName))
rval = Id('CL_SUCCESS')
clSuc = Assignment(lval,rval)
allocateBuffer.compound.statements.extend(\
[GroupCompound([clSuc])])
for n in dictNToDevPtr:
lval = Id(dictNToDevPtr[n])
op = '='
arrayn = dictNToHstPtr[n]
try:
arrayn = self.NameSwap[arrayn]
except KeyError:
pass
if n in self.WriteOnly:
flag = Id('CL_MEM_WRITE_ONLY')
arraynId = Id('NULL')
elif n in self.ReadOnly:
flag = Id('CL_MEM_USE_HOST_PTR | CL_MEM_READ_ONLY')
arraynId = Id(arrayn)
else:
flag = Id('CL_MEM_USE_HOST_PTR')
arraynId = Id(arrayn)
arglist = ArgList([Id('context'),\
flag,\
Id(dictNToSize[n]),\
arraynId,\
Id('&'+ErrName)])
rval = FuncDecl(Id('clCreateBuffer'), arglist, Compound([]))
allocateBuffer.compound.statements.append(\
Assignment(lval,rval))
arglist = ArgList([Id(ErrName), Constant("clCreateBuffer " + lval.name)])
ErrCheck = FuncDecl(Id('oclCheckErr'),arglist, Compound([]))
allocateBuffer.compound.statements.append(ErrCheck)
setArgumentsKernel = EmptyFuncDecl('SetArguments'+self.DevFuncId)
fileAST.ext.append(setArgumentsKernel)
ArgBody = setArgumentsKernel.compound.statements
ArgBody.append(clSuc)
cntName = Id('counter')
lval = TypeId(['int'], cntName)
rval = Constant(0)
ArgBody.append(Assignment(lval,rval))
for n in dictNToDimNames:
## add dim arguments to set of ids
NonArrayIds.add(dictNToDimNames[n][0])
# Add types of dimensions for size arguments
dictTypeHostPtrs[dictNToDimNames[n][0]] = ['size_t']
for n in self.RemovedIds:
dictTypeHostPtrs.pop(n,None)
## clSetKernelArg for Arrays
for n in self.KernelArgs:
lval = Id(ErrName)
op = '|='
type = self.Type[n]
if len(type) == 2:
arglist = ArgList([kernelId,\
Increment(cntName,'++'),\
Id('sizeof(cl_mem)'),\
Id('(void *) &' + dictNToDevPtr[n])])
rval = FuncDecl(Id('clSetKernelArg'),arglist, Compound([]))
else:
try:
n = self.NameSwap[n]
except KeyError:
pass
cl_type = type[0]
if cl_type == 'size_t':
cl_type = 'unsigned'
arglist = ArgList([kernelId,\
Increment(cntName,'++'),\
Id('sizeof('+cl_type+')'),\
Id('(void *) &' + n)])
rval = FuncDecl(Id('clSetKernelArg'),arglist, Compound([]))
ArgBody.append(Assignment(lval,rval,op))
arglist = ArgList([Id(ErrName), Constant('clSetKernelArg')])
ErrId = Id('oclCheckErr')
ErrCheck = FuncDecl(ErrId, arglist, Compound([]))
ArgBody.append(ErrCheck)
execKernel = EmptyFuncDecl('Exec' + self.DevFuncTypeId.name.name)
fileAST.ext.append(execKernel)
execBody = execKernel.compound.statements
execBody.append(clSuc)
eventName = Id('GPUExecution')
event = TypeId(['cl_event'], eventName)
execBody.append(event)
for n in self.Worksize:
lval = TypeId(['size_t'], Id(self.Worksize[n] + '[]'))
if n == 'local':
local_worksize = [Id(i) for i in self.Local['size']]
rval = ArrayInit(local_worksize)
elif n == 'global':
initlist = []
for m in reversed(self.GridIndices):
initlist.append(Id(self.UpperLimit[m]\
+' - '+ self.LowerLimit[m]))
rval = ArrayInit(initlist)
else:
initlist = []
for m in reversed(self.GridIndices):
initlist.append(Id(self.LowerLimit[m]))
rval = ArrayInit(initlist)
execBody.append(Assignment(lval,rval))
lval = Id(ErrName)
arglist = ArgList([Id('command_queue'),\
Id(self.KernelName),\
Constant(self.ParDim),\
Id(self.Worksize['offset']),\
Id(self.Worksize['global']),\
Id(self.Worksize['local']),\
Constant(0), Id('NULL'), \
Id('&' + eventName.name)])
rval = FuncDecl(Id('clEnqueueNDRangeKernel'),arglist, Compound([]))
execBody.append(Assignment(lval,rval))
arglist = ArgList([Id(ErrName), Constant('clEnqueueNDRangeKernel')])
ErrCheck = FuncDecl(ErrId, arglist, Compound([]))
execBody.append(ErrCheck)
arglist = ArgList([Id('command_queue')])
finish = FuncDecl(Id('clFinish'), arglist, Compound([]))
execBody.append(Assignment(Id(ErrName), finish))
arglist = ArgList([Id(ErrName), Constant('clFinish')])
ErrCheck = FuncDecl(ErrId, arglist, Compound([]))
execBody.append(ErrCheck)
if not self.NoReadBack:
for n in self.WriteOnly:
lval = Id(ErrName)
Hstn = self.HstId[n]
try:
Hstn = self.NameSwap[Hstn]
except KeyError:
pass
arglist = ArgList([Id('command_queue'),\
Id(self.DevId[n]),\
Id('CL_TRUE'),\
Constant(0),\
Id(self.Mem[n]),\
Id(Hstn),\
Constant(1),
Id('&' + eventName.name),Id('NULL')])
rval = FuncDecl(Id('clEnqueueReadBuffer'),arglist, Compound([]))
execBody.append(Assignment(lval,rval))
arglist = ArgList([Id(ErrName), Constant('clEnqueueReadBuffer')])
ErrCheck = FuncDecl(ErrId, arglist, Compound([]))
execBody.append(ErrCheck)
# add clFinish statement
arglist = ArgList([Id('command_queue')])
finish = FuncDecl(Id('clFinish'), arglist, Compound([]))
execBody.append(Assignment(Id(ErrName), finish))
arglist = ArgList([Id(ErrName), Constant('clFinish')])
ErrCheck = FuncDecl(ErrId, arglist, Compound([]))
execBody.append(ErrCheck)
for n in self.WriteTranspose:
execBody.append(n)
runOCL = EmptyFuncDecl('RunOCL' + self.KernelName)
fileAST.ext.append(runOCL)
runOCLBody = runOCL.compound.statements
argIds = self.NonArrayIds.union(self.ArrayIds) #
typeIdList = []
ifThenList = []
for n in argIds:
type = self.Type[n]
argn = Id('arg_'+n)
typeIdList.append(TypeId(type,argn))
try:
newn = self.HstId[n]
except KeyError:
newn = n
lval = Id(newn)
rval = argn
ifThenList.append(Assignment(lval,rval))
try:
for m in self.ArrayIdToDimName[n]:
type = ['size_t']
argm = Id('arg_'+m)
lval = Id(m)
rval = argm
ifThenList.append(Assignment(lval,rval))
typeIdList.append(TypeId(type, argm))
except KeyError:
pass
arglist = ArgList(typeIdList)
runOCL.arglist = arglist
arglist = ArgList([])
ifThenList.append(FuncDecl(Id('StartUpGPU'), arglist, Compound([])))
ifThenList.append(FuncDecl(Id('AllocateBuffers'), arglist, Compound([])))
useFile = 'true'
if self.KernelStringStream:
useFile = 'false'
ifThenList.append(Id('cout << "$Defines " << KernelDefines << endl;'))
arglist = ArgList([Constant(self.DevFuncId),
Constant(self.DevFuncId+'.cl'),
Id('GetKernelCode()'),
Id(useFile),
Id('&' + self.KernelName),
Id('KernelDefines')])
ifThenList.append(FuncDecl(Id('compileKernel'), arglist, Compound([])))
ifThenList.append(FuncDecl(Id('SetArguments'+self.DevFuncId), ArgList([]), Compound([])))
runOCLBody.append(IfThen(Id('isFirstTime'), Compound(ifThenList)))
arglist = ArgList([])
# Insert timing
runOCLBody.append(Id('timer.start();'))
runOCLBody.append(FuncDecl(Id('Exec' + self.DevFuncId), arglist, Compound([])))
runOCLBody.append(Id('cout << "$Time " << timer.stop() << endl;'))
return fileAST
| dikujepsen/OpenTran | v1.0/src/framework/rewriter.py | Python | mit | 39,294 |
from re import compile
# ----------------- Local variables ----------------- #
__reCompiles = []
# ----------------- Global methods ----------------- #
def compileTitleRe():
"""Generates and compiles regex patterns"""
rePats = [
r'[\{\(\[].*?[\)\]\}/\\]',
r'^.*?\(',
r'[\)\]\}\-\'\"\,:]',
r'\s+'
]
__reCompiles.extend([compile(pat) for pat in rePats])
def regexify(title):
"""Applies regular expression methods and trims whitespace to the specified
format
title: the string to be regexified
"""
return __reCompiles[3].sub( # replace multiple \s with one \s
' ', __reCompiles[2].sub( # replace excess punctuations with one \s
'', __reCompiles[1].sub( # remove everything before '('
'', __reCompiles[0].sub( # remove everything between brackets
'', title.lower() # convert to lower case first
)
)
).rstrip().lstrip() # strip whitespace from beginning and end only
)
| kug3lblitz/Heat-Replay | src/code/settings/regexify.py | Python | mit | 1,045 |
RegObj.dll is an ActiveX server--and, hence, has an automation interface--that is available with documentation in
the distribution file known as RegObji.exe, from the following page:
http://msdn.microsoft.com/vbasic/downloads/addins.asp
To provide early binding for RegObj use
>>> from win32com.client import gencache
>>> gencache.EnsureModule('{DE10C540-810E-11CF-BBE7-444553540000}', 0, 1, 0)
or the MakePy utility within PythonWin, referring to "Regstration Manipulation Classes (1.0)" (Please notice
the spelling error.)
Sample use, to determine what command is associated with a Python file:
>>> from win32com.client import Dispatch, gencache
>>> from win32con import HKEY_CLASSES_ROOT
>>> gencache.EnsureModule('{DE10C540-810E-11CF-BBE7-444553540000}', 0, 1, 0)
>>> regobj = Dispatch ( 'RegObj.Registry' )
>>> HKCR = regobj.RegKeyFromHKey ( HKEY_CLASSES_ROOT )
>>> PythonFileKey = HKCR.ParseKeyName('Python.File\Shell\Open\command')
>>> PythonFileKey.Value
u'J:\\Python22\\pythonw.exe "%1" %*'
| ActiveState/code | recipes/Python/137551_Using_RegObj_Automatiaccess_MSW/recipe-137551.py | Python | mit | 1,024 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
from db.common import Base
from db.specific_event import SpecificEvent
from db.event import Event
from db.player import Player
from db.team import Team
class Takeaway(Base, SpecificEvent):
__tablename__ = 'takeaways'
__autoload__ = True
HUMAN_READABLE = 'takeaway'
STANDARD_ATTRS = [
"team_id", "player_id", "zone", "taken_from_team_id"
]
def __init__(self, event_id, data_dict):
self.takeaway_id = uuid.uuid4().urn
self.event_id = event_id
for attr in self.STANDARD_ATTRS:
if attr in data_dict:
setattr(self, attr, data_dict[attr])
else:
setattr(self, attr, None)
def __str__(self):
plr = Player.find_by_id(self.player_id)
event = Event.find_by_id(self.event_id)
team = Team.find_by_id(self.team_id)
return "Takeaway: %s (%s) - %s" % (
plr.name, team.abbr, event)
| leaffan/pynhldb | db/takeaway.py | Python | mit | 991 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import sys
from RPLCD_i2c import CharLCD
from RPLCD_i2c import Alignment, CursorMode, ShiftMode
from RPLCD_i2c import cursor, cleared
try:
input = raw_input
except NameError:
pass
try:
unichr = unichr
except NameError:
unichr = chr
lcd = CharLCD(address=0x38, port=1, cols=20, rows=4, dotsize=8)
input('Display should be blank. ')
lcd.cursor_mode = CursorMode.blink
input('The cursor should now blink. ')
lcd.cursor_mode = CursorMode.line
input('The cursor should now be a line. ')
lcd.write_string('Hello world!')
input('"Hello world!" should be on the LCD. ')
assert lcd.cursor_pos == (0, 12), 'cursor_pos should now be (0, 12)'
lcd.cursor_pos = (1, 0)
lcd.write_string('2')
lcd.cursor_pos = (2, 0)
lcd.write_string('3')
lcd.cursor_pos = (3, 0)
lcd.write_string('4')
assert lcd.cursor_pos == (3, 1), 'cursor_pos should now be (3, 1)'
input('Lines 2, 3 and 4 should now be labelled with the right numbers. ')
lcd.clear()
input('Display should now be clear, cursor should be at initial position. ')
lcd.cursor_pos = (0, 5)
lcd.write_string('12345')
input('The string should have a left offset of 5 characters. ')
lcd.write_shift_mode = ShiftMode.display
lcd.cursor_pos = (1, 5)
lcd.write_string('12345')
input('Both strings should now be at column 0. ')
lcd.write_shift_mode = ShiftMode.cursor
lcd.cursor_pos = (2, 5)
lcd.write_string(lcd.write_shift_mode.name)
input('The string "cursor" should now be on the third row, column 0. ')
lcd.home()
input('Cursor should now be at initial position. Everything should be shifted to the right by 5 characters. ')
with cursor(lcd, 3, 19):
lcd.write_string('X')
input('The last character on the LCD should now be an "X"')
lcd.display_enabled = False
input('Display should now be blank. ')
with cleared(lcd):
lcd.write_string('Eggs, Ham, Bacon\n\rand Spam')
lcd.display_enabled = True
input('Display should now show "Eggs, Ham, Bacon and Spam". ')
lcd.shift_display(4)
input('Text should now be shifted to the right by 4 characters. ')
lcd.shift_display(-4)
input('Shift should now be undone. ')
lcd.text_align_mode = Alignment.right
lcd.cursor_mode = CursorMode.hide
lcd.write_string(' Spam')
input('The word "Spam" should now be inverted. ')
lcd.text_align_mode = Alignment.left
lcd.cursor_mode = CursorMode.hide
lcd.write_string(' Wurscht')
input('The word "mapS" should now be replaced with "Wurscht". ')
lcd.clear()
lcd.write_string('1\n')
lcd.write_string('2\n')
lcd.write_string('3\n')
lcd.write_string('4')
input('The numbers 1-4 should now be displayed, each line shifted to the right by 1 char more than the previous. ')
lcd.clear()
lcd.write_string('This is a long string that will wrap across multiple lines!')
input('Text should nicely wrap around lines. ')
lcd.cursor_mode = CursorMode.hide
# Test custom chars
lcd.clear()
happy = (0b00000, 0b01010, 0b01010, 0b00000, 0b10001, 0b10001, 0b01110, 0b00000)
sad = (0b00000, 0b01010, 0b01010, 0b00000, 0b01110, 0b10001, 0b10001, 0b00000)
lcd.create_char(0, sad)
lcd.write_string(unichr(0))
lcd.create_char(1, happy)
lcd.write_string(unichr(1))
input('You should now see a sad and a happy face next to each other. ')
lcd.create_char(0, happy)
lcd.home()
lcd.write_string(unichr(0))
input('Now both faces should be happy. ')
lcd.clear()
lcd.set_backlight(False)
lcd.home()
lcd.write_string('No backlight')
input('Display backlight should be off (if wired). ')
lcd.clear()
lcd.set_backlight(True)
lcd.home()
lcd.write_string('Backlight')
input('Display backlight should be back on (if wired). ')
lcd.clear()
print('Test done.')
| zador-blood-stained/RPLCD-i2c | test_20x4.py | Python | mit | 3,707 |
from distutils.core import setup
setup(name='jgcspendfrom',
version='1.0',
description='Command-line utility for jgcoin "coin control"',
author='Gavin Andresen',
author_email='gavin@jgcoinfoundation.org',
requires=['jsonrpc'],
scripts=['spendfrom.py'],
)
| cptecdfi/jgcoins | contrib/spendfrom/setup.py | Python | mit | 297 |
#!/usr/bin/env python3.8
import os
import sys
import unittest
SCRIPT_DIR = os.path.dirname(
os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__))))
sys.path.append(
os.path.normpath(os.path.join(SCRIPT_DIR, os.path.pardir, os.path.pardir)))
# pylint: disable=wrong-import-position
from nonogram.raster import BLACK
from nonogram.raster import UNKNOWN
from nonogram.raster import WHITE
from nonogram.raster.block import Block
from nonogram import rules
class TestRules(unittest.TestCase):
# pylint: disable=protected-access
def test_covering_blocks(self):
blocks = [
Block(start=1, end=10, length=4),
Block(start=6, end=9, length=4),
Block(start=3, end=7, length=4),
Block(start=2, end=4, length=4)
]
covering_blocks = rules._covering_blocks(blocks, start=2)
self.assertEqual([
Block(start=1, end=10, length=4),
Block(start=2, end=4, length=4)
], covering_blocks)
covering_blocks = rules._covering_blocks(blocks, start=3, end=5)
self.assertEqual([
Block(start=1, end=10, length=4),
Block(start=3, end=7, length=4)
], covering_blocks)
covering_blocks = rules._covering_blocks(blocks, start=0)
self.assertEqual([], covering_blocks)
def test_get_black_runs(self):
# mask = bytearray(map(ord, 'X.X ..X..X. .X'))
mask = bytearray([
BLACK, UNKNOWN, BLACK, WHITE, WHITE, UNKNOWN, UNKNOWN, BLACK,
UNKNOWN, UNKNOWN, BLACK, UNKNOWN, WHITE, UNKNOWN, BLACK
])
expected = [
Block(start=0, end=0, length=1),
Block(start=2, end=2, length=1),
Block(start=7, end=7, length=1),
Block(start=10, end=10, length=1),
Block(start=14, end=14, length=1)
]
self.assertEqual(expected, rules._get_black_runs(mask))
mask = bytearray([
UNKNOWN, BLACK, BLACK, WHITE, UNKNOWN, WHITE, UNKNOWN, UNKNOWN,
BLACK, BLACK
])
expected = [
Block(start=1, end=2, length=2),
Block(start=8, end=9, length=2)
]
self.assertEqual(expected, rules._get_black_runs(mask))
mask = bytearray([BLACK, BLACK, BLACK, BLACK])
expected = [Block(start=0, end=3, length=4)]
self.assertEqual(expected, rules._get_black_runs(mask))
mask = bytearray([WHITE, UNKNOWN, UNKNOWN, WHITE])
self.assertEqual([], rules._get_black_runs(mask))
mask = bytearray([BLACK, WHITE, BLACK, WHITE] + [BLACK] * 4 +
[UNKNOWN, BLACK])
expected = [
Block(start=0, end=0, length=1),
Block(start=2, end=2, length=1),
Block(start=4, end=7, length=4),
Block(start=9, end=9, length=1)
]
self.assertEqual(expected, rules._get_black_runs(mask))
def test_get_non_white_runs(self):
mask = bytearray(b' X. .....')
expected = [
Block(start=2, end=3, length=2),
Block(start=5, end=9, length=5)
]
self.assertEqual(expected, rules._get_non_white_runs(mask))
mask = bytearray(b'..X .X .')
expected = [
Block(start=0, end=2, length=3),
Block(start=5, end=6, length=2),
Block(start=9, end=9, length=1)
]
self.assertEqual(expected, rules._get_non_white_runs(mask))
mask = bytearray(b'. .X.X ')
expected = [
Block(start=0, end=0, length=1),
Block(start=5, end=8, length=4)
]
self.assertEqual(expected, rules._get_non_white_runs(mask))
mask = bytearray(b'. .X. ')
expected = [
Block(start=0, end=0, length=1),
Block(start=5, end=7, length=3)
]
self.assertEqual(expected, rules._get_non_white_runs(mask))
if __name__ == '__main__':
unittest.main()
| durante987/nonogram_solver | tests/rules/test_init.py | Python | mit | 3,997 |
import json
import os.path
from hashlib import md5
class TargetFile:
def __init__(self, path, data = None):
self.path = path
self.hash = None
self.dependencies = {}
if data is not None:
self.hash = data['hash']
self.dependencies = data['dependencies']
def raw(self):
return {
'hash': self.hash,
'dependencies': self.dependencies
}
def clean(self):
self.hash = None
for dependency in self.dependencies:
self.dependencies[dependency] = None
def changed(self):
changed = False
# File Hash
computed_hash = md5(open(self.path, 'rb').read()).hexdigest()
if self.hash is None or self.hash != computed_hash:
changed = True
self.hash = computed_hash
# File Dependencies
for dependency in self.dependencies:
stored_hash = self.dependencies[dependency]
computed_hash = md5(open(dependency, 'rb').read()).hexdigest()
if stored_hash is None or stored_hash != computed_hash:
changed = True
self.dependencies[dependency] = computed_hash
return changed
def set_dependencies(self, dependencies):
for dependency in [ d for d in dependencies if d not in self.dependencies ]:
self.dependencies[dependency] = None
for dependency in [d for d in self.dependencies if d not in dependencies]:
del self.dependencies[dependency]
class Target(dict):
def __init__(self, fp, data = {}):
dict.__init__(self)
self._file = fp
for f in data:
self[f] = TargetFile(f, data[f])
def raw(self):
data = {}
for f in self:
data[f] = self[f].raw()
return data
def tidyup(self, files):
for f in [tf for tf in self if tf not in files]:
del self[f]
def clean(self):
for f in self:
self[f].clean()
def __getitem__(self, key):
if key not in self:
self[key] = TargetFile(key)
return dict.__getitem__(self, key)
class File(dict):
def __init__(self, path):
dict.__init__(self)
self._path = path
if os.path.exists(self._path):
data = json.load(open(self._path))
if isinstance(data, dict):
for t in data: self[t] = Target(self, data[t])
def write(self):
data = {}
for t in self:
data[t] = self[t].raw()
json.dump(data, open(self._path, 'w'), indent = 4)
def __getitem__(self, key):
if key not in self:
self[key] = Target(self)
return dict.__getitem__(self, key)
| mlowen/Pyke | pyke/meta.py | Python | mit | 2,310 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2018-01-07 16:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('taborniki', '0013_remove_oseba_rojstvo2'),
]
operations = [
migrations.CreateModel(
name='Akcija',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('imeAkcija', models.CharField(max_length=50)),
('porocilo', models.TextField(max_length=10000)),
('organizator', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='taborniki.Oseba')),
('udelezenci', models.ManyToManyField(null=True, related_name='akcija_clan', to='taborniki.Oseba')),
],
),
migrations.RemoveField(
model_name='akcije',
name='organizator',
),
migrations.RemoveField(
model_name='akcije',
name='udelezenci',
),
migrations.AlterField(
model_name='vod',
name='rod',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='rodov_vod', to='taborniki.Rod'),
),
migrations.DeleteModel(
name='Akcije',
),
]
| markbaltic/TaborniskaBaza | taborniki/migrations/0014_auto_20180107_1716.py | Python | mit | 1,457 |
import _plotly_utils.basevalidators
class HoverlabelValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="hoverlabel", parent_name="scattermapbox", **kwargs):
super(HoverlabelValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Hoverlabel"),
data_docs=kwargs.pop(
"data_docs",
"""
align
Sets the horizontal alignment of the text
content within hover label box. Has an effect
only if the hover label text spans more two or
more lines
alignsrc
Sets the source reference on Chart Studio Cloud
for align .
bgcolor
Sets the background color of the hover labels
for this trace
bgcolorsrc
Sets the source reference on Chart Studio Cloud
for bgcolor .
bordercolor
Sets the border color of the hover labels for
this trace.
bordercolorsrc
Sets the source reference on Chart Studio Cloud
for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of
characters) of the trace name in the hover
labels for all traces. -1 shows the whole name
regardless of length. 0-3 shows the first 0-3
characters, and an integer >3 will show the
whole name if it is less than that many
characters, but if it is longer, will truncate
to `namelength - 3` characters and add an
ellipsis.
namelengthsrc
Sets the source reference on Chart Studio Cloud
for namelength .
""",
),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/scattermapbox/_hoverlabel.py | Python | mit | 2,062 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('bookmarks', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='bookmark',
name='added',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='added'),
),
migrations.AlterField(
model_name='bookmark',
name='favicon_checked',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='favicon checked'),
),
migrations.AlterField(
model_name='bookmarkinstance',
name='saved',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='saved'),
),
]
| incuna/incuna-bookmarks | bookmarks/migrations/0002_auto_20160301_1154.py | Python | mit | 897 |
# DO NOT EDIT THIS FILE!
#
# Python module CosCollection__POA generated by omniidl
import omniORB
omniORB.updateModule("CosCollection__POA")
# ** 1. Stub files contributing to this module
import CosCollection_idl
# ** 2. Sub-modules
# ** 3. End
| amonmoce/corba_examples | omniORBpy-4.2.1/build/python/COS/CosCollection__POA/__init__.py | Python | mit | 249 |
import json
import os
from AppKit import NSApplication, NSStatusBar, NSMenu, NSMenuItem, NSVariableStatusItemLength, NSImage
from PyObjCTools import AppHelper
from project_cron.models import Schedule
from threading import Timer
from project_cron.utils import logutil
class App(NSApplication):
def finishLaunching(self):
# Make statusbar item
statusbar = NSStatusBar.systemStatusBar()
self.statusitem = statusbar.statusItemWithLength_(NSVariableStatusItemLength)
self.icon = NSImage.alloc().initByReferencingFile_('icon.png')
self.icon.setScalesWhenResized_(True)
self.icon.setSize_((20, 20))
self.statusitem.setImage_(self.icon)
self._schedules = []
self._menu_items = []
self._initialize_schedules()
self._initialize_menu()
self._timer = Timer(60, self.timer_callback)
self._timer.start()
def _initialize_schedules(self):
USER_ROOT = os.path.expanduser('~')
DOCUMENTS = os.path.join(USER_ROOT, 'Documents')
SCHEDULES = os.path.join(DOCUMENTS, 'schedules.json')
schedules = json.load(open(SCHEDULES, encoding='utf8'))
for raw_info in schedules:
self._schedules.append(Schedule(raw_info))
def _initialize_menu(self):
self.menubarMenu = NSMenu.alloc().init()
for schedule in self._schedules:
menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(schedule.name, 'execute:', '')
self._menu_items.append(menu_item)
self.menubarMenu.addItem_(menu_item)
menu_item = NSMenuItem.separatorItem()
self.menubarMenu.addItem_(menu_item)
self.quit = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_('Quit', 'terminate:', '')
self.menubarMenu.addItem_(self.quit)
self.statusitem.setMenu_(self.menubarMenu)
self.statusitem.setToolTip_('Crow')
def timer_callback(self):
self._timer = None
for schedule in self._schedules:
try:
schedule.execute()
except:
import traceback
logutil.error(schedule.name, traceback.format_exc())
interval = 60
self._timer = Timer(interval, self.timer_callback)
self._timer.start()
def execute_(self, notification):
for schedule in self._schedules:
if schedule.name == notification.title():
try:
schedule.execute_actions()
except:
import traceback
logutil.error(schedule.name, traceback.format_exc())
schedule._reset()
if __name__ == "__main__":
app = App.sharedApplication()
AppHelper.runEventLoop()
| ecleya/project_cron | main.py | Python | mit | 2,772 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-02 09:52
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('cookiecutter_manager', '0002_auto_20170502_0952'),
('requirements_manager', '0001_initial'),
('marketplace', '0002_auto_20170502_0952'),
('pylint_manager', '0001_initial'),
('experiments_manager', '0001_initial'),
('build_manager', '0001_initial'),
('user_manager', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='experiment',
name='language',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='marketplace.Language'),
),
migrations.AddField(
model_name='experiment',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='user_manager.WorkbenchUser'),
),
migrations.AddField(
model_name='experiment',
name='pylint',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='pylint_manager.PylintScan'),
),
migrations.AddField(
model_name='experiment',
name='requirements',
field=models.ManyToManyField(to='requirements_manager.Requirement'),
),
migrations.AddField(
model_name='experiment',
name='template',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cookiecutter_manager.CookieCutterTemplate'),
),
migrations.AddField(
model_name='experiment',
name='travis',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='build_manager.TravisInstance'),
),
migrations.AddField(
model_name='chosenexperimentsteps',
name='experiment',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='experiments_manager.Experiment'),
),
migrations.AddField(
model_name='chosenexperimentsteps',
name='step',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='experiments_manager.ExperimentStep'),
),
]
| MOOCworkbench/MOOCworkbench | experiments_manager/migrations/0002_auto_20170502_0952.py | Python | mit | 2,437 |
from django.conf import settings
from django.db import models
from django_dropimages import settings as di_settings
# if no custom image models is present I load my own
if not di_settings.CONFIG['DROPIMAGEGALLERY_MODEL']:
class DropimagesGallery(models.Model):
gallery_identifier = models.CharField(max_length=36)
creation_timestamp = models.DateTimeField(auto_now_add=True)
owner = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True)
# if no custom image models is present I load my own
if not di_settings.CONFIG['DROPIMAGE_MODEL']:
class DropimagesImage(models.Model):
dropimages_gallery = models.ForeignKey('django_dropimages.DropimagesGallery', related_name='images')
dropimages_original_filename = models.CharField(max_length=256)
image = models.ImageField(upload_to='%y/%m/%d')
| sittizen/django_dropimages | django_dropimages/models.py | Python | mit | 859 |
from nose.tools import istest, eq_
from tetris.values.key import Key
class TestKey(object):
@istest
def same_key_is_equal(self):
eq_(Key("key"), Key("key"))
@istest
def key_is_usable_as_key(self):
key_dict = {
Key("key"): "Value"
}
eq_(key_dict[Key("key")], "Value")
| sirmar/tetris | tetris/values/test/test_key.py | Python | mit | 330 |
# coding: utf-8
"""参数验证相关工具
"""
import re
import ujson
import types
import numbers
from girlfriend.util.lang import args2fields
from girlfriend.exception import InvalidArgumentException
class Rule(object):
"""描述参数验证规则,并执行验证过程
"""
@args2fields()
def __init__(self, name,
type=None,
required=False, min=None, max=None,
regex=None, logic=None, default=None):
"""
:param name 参数名称,通常用于错误提示
:param required 如果为True,那么参数是必须的
:param min 如果是字符串,那么该参数为最小长度(等于此长度合法),
如果是数字(numbers.Number类型),那么为该参数最小值(等于此值算合法)
:param max 同上
:param regex 正则验证
:param type 类型验证,多个参数可以传递元组
:param logic 谓词函数,满足更加复杂的业务验证需要,比如查查数据库邮箱是否存在等等
该谓词函数并非返回True和False,如果有错误,那么返回错误消息的字符串,
如果没有错误,那么直接返回None
:param default 该项的默认值
"""
pass
@property
def name(self):
return self._name
@property
def default(self):
return self._default
@property
def required(self):
return self._required
def validate(self, value):
"""执行验证
:param value 要验证的值
"""
if self._required and self._is_empty(value):
raise InvalidArgumentException(
u"参数 '{}' 的值是必须的,不能为空".format(self._name))
# 如果非必须并且为空,那么接下来的验证就不必运行了
if self._is_empty(value):
return
# 检查类型
self._validate_type(value)
# 检查大小、长度
self._validate_min_max(value)
# 检查正则
self._validate_regex(value)
# 检查逻辑
self._validate_logic(value)
def _validate_type(self, value):
if not self._type:
return
if not isinstance(value, self._type):
raise InvalidArgumentException(
u"参数 '{name}' 的类型不正确,只允许以下类型:{types}".format(
name=self._name,
types=self._type
)
)
def _validate_min_max(self, value):
if self._min is not None:
if isinstance(value, numbers.Number):
if self._min > value:
raise InvalidArgumentException(
u"参数 '{name}' 的值不能小于{min}".format(
name=self._name, min=self._min)
)
else:
if self._min > len(value):
raise InvalidArgumentException(
u"参数 '{name}' 的长度不能小于{min}".format(
name=self._name, min=self._min)
)
if self._max is not None:
if isinstance(value, numbers.Number):
if self._max < value:
raise InvalidArgumentException(
u"参数 '{name}' 的值不能大于{max}".format(
name=self._name, max=self._max)
)
else:
if self._max < len(value):
raise InvalidArgumentException(
u"参数 '{name}' 的长度不能大于{max}".format(
name=self._name, max=self._max)
)
def _validate_regex(self, value):
if not self._regex:
return
value = str(value)
if not re.search(self._regex, value):
raise InvalidArgumentException(
u"参数 '{name}' 不符合正则表达式'{regex}'".format(
name=self._name, regex=self._regex)
)
def _validate_logic(self, value):
if self._logic is None:
return
msg = self._logic(value)
if msg:
raise InvalidArgumentException(msg)
def _is_empty(self, value):
"""判断一个值是否为空
如果值为None,那么返回True
如果值为空字符串,那么返回True
如果值为0, 那么不算空,返回False
"""
if value is None:
return True
if isinstance(value, types.StringType) and not value:
return True
return False
def be_json(name):
def _be_json(value):
try:
ujson.loads(value)
except:
return u"参数 '{}' 必须是json格式".format(name)
return _be_json
| chihongze/girlfriend | girlfriend/util/validating.py | Python | mit | 4,999 |
# -*- coding: utf-8 -*-
class CrazyBoxError(Exception):
"""
The base class for custom exceptions raised by crazybox.
"""
pass
class DockerError(Exception):
"""
An error occurred with the underlying docker system.
"""
pass
| USTB-LETTers/judger | exceptions.py | Python | mit | 256 |
"""superlists URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from lists import views as list_views
from lists import urls as list_urls
urlpatterns = [
url(r'^$', list_views.home_page, name='home'),
url(r'^lists/', include(list_urls)),
# url(r'^admin/', include(admin.site.urls)),
]
| rmelchorv/TDD-Cuervos | superlists/urls.py | Python | mit | 896 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
import scipy
from sklearn.neighbors import KNeighborsClassifier
from scipy.cluster import hierarchy as hier
from scipy.spatial import distance
import json
import codecs
import sys
if len(sys.argv) < 2:
print "Provide file name"
sys.exit(1)
elif len(sys.argv) < 3:
out_file = "nn9m.dat"
else:
out_file = sys.argv[2]
print "Start"
fi = codecs.open(sys.argv[1],"r","utf-8")
words = []
data = []
for line in fi:
if not len(line.strip()): continue
k = line.strip().split()
words.append(k[0])
data.append([float(i) for i in k[-200:]])
fi.close()
vectors = np.array(data)
print "Pre-processing done"
# Calculate the distance matrix
def dist(x,y):
return np.dot(x,y)
knn = KNeighborsClassifier()
knn.fit(vectors,[0]*len(vectors))
fo = codecs.open(out_file,"w","utf-8")
for i,word in enumerate(words):
d,n = knn.kneighbors(vectors[i], n_neighbors = 25, return_distance = True)
if i%1000==0: print d,n
fo.write(word+"\t")
for j in range(1,len(n[0])):
fo.write(words[n[0][j]]+" ({:.6f}), ".format(d[0][j]))
fo.write("\n")
fo.close()
| Shashwat986/thesis | vectoralign/get_nn.py | Python | mit | 1,185 |
# -*- coding: utf-8 -*-
__all__ = ["photometry", "epic", "Data", "Inject", "Likelihood", "Summary",
"FP", "fit_traptransit"]
from . import photometry, epic
from .data import Data
from .inject import Inject
from .likelihood import Likelihood
from .summary import Summary
from .fp import FP
from .traptransit import fit_traptransit
| dfm/ketu | ketu/k2/__init__.py | Python | mit | 345 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('viewer', '0006_meter_on_auditlist'),
]
operations = [
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=64)),
],
options={
},
bases=(models.Model,),
),
migrations.RenameField(
model_name='profiledatapoint',
old_name='kwh',
new_name='kw',
),
migrations.AddField(
model_name='meter',
name='groups',
field=models.ManyToManyField(to='viewer.Group'),
preserve_default=True,
),
]
| impactlab/jps-handoff | webapp/viewer/migrations/0007_auto_20150408_1402.py | Python | mit | 935 |
from django.utils.encoding import force_text
import re
from django.utils import six
from ginger import serializer
from jinja2 import Markup
__all__ = ['html_json', 'html_attrs', "Element", "CssClassList", "CssStyle", 'add_css_class', 'empty']
def html_json(values):
content = serializer.encode(values)
try:
content = content.encode("unicode-escape")
except LookupError:
content = content.encode("string-escape")
return Markup(content)
def html_attrs(*args, **kwargs):
attr = HtmlAttr()
attr.update(*args, **kwargs)
return six.text_type(attr)
def add_css_class(original_class, *css_classes):
css = CssClassList()
css.append(original_class)
css.append(css_classes)
return six.text_type(css)
class CssClassList(object):
def __init__(self):
self.classes = []
def __iter__(self):
return iter(self.classes)
def __len__(self):
return len(self.classes)
def copy(self):
value = CssClassList()
value.classes.extend(self.classes)
return value
def append(self, value):
if isinstance(value, six.text_type):
value = re.sub(r'\s+', ' ', value.strip())
if len(value) == 1:
value = value[0]
if isinstance(value, (tuple, list)):
for val in value:
self.append(val)
else:
if value not in self.classes:
self.classes.append(value)
def __contains__(self, item):
return item in self.classes
def __str__(self):
return " ".join(str(c) for c in self.classes if c)
class CssStyle(dict):
def render(self):
return ";".join("%s:%s" % (key.replace("_", "-"), value) for (key, value) in six.iteritems(self))
def __str__(self):
return self.render()
def copy(self):
return CssStyle(super(CssStyle, self).copy())
def _normalize(key):
if key.endswith("_"):
key = key[:-1]
key = key.replace("__", ":").replace("_", "-")
return key
class HtmlAttr(object):
def __init__(self):
self.attrs = {}
self.styles = CssStyle()
self.classes = CssClassList()
def copy(self):
attr = HtmlAttr()
attr.attrs = self.attrs.copy()
attr.styles = self.styles.copy()
attr.classes = self.classes.copy()
return attr
def dict(self):
return dict(self)
def __setitem__(self, key, value):
self.set(key, value)
def __getitem__(self, item):
return dict(self)[item]
def __len__(self):
return len(dict(self))
def get(self, key):
return dict(self).get(key)
def set(self, key, value):
key = _normalize(key)
if key in {"class"}:
self.classes.append(value)
elif key == "style":
self.styles.update(value)
else:
self.attrs[key] = value
def update(self, *args, **attrs):
values = {}
values.update(*args, **attrs)
for k, v in values.items():
self.set(k, v)
def __iter__(self):
for k, v in six.iteritems(self.attrs):
yield k, v
if self.classes:
yield "class", six.text_type(self.classes)
if self.styles:
yield "style", self.styles.render()
def render(self):
pairs = []
for key, value in self:
if value is None or value is False:
continue
if value is True:
pairs.append(key)
else:
if not isinstance(value, six.string_types):
value = html_json(value)
pairs.append("%s='%s'" % (key, str(value)))
return " ".join(pairs)
def __str__(self):
return self.render()
class Element(object):
def __init__(self, tag):
self.tag = tag
self.attrib = HtmlAttr()
self.children = []
def __call__(self, **kwargs):
el = self.copy()
el.attrib.update(kwargs)
return el
def __getitem__(self, item):
el = self.copy()
if not isinstance(item, (list, tuple)):
item = [item]
for c in item:
el.append(c)
return el
def copy(self):
el = self.__class__(self.tag)
el.attrib = self.attrib.copy()
el.children = self.children[:]
return el
def mutate(self, tag):
el = tag.copy()
el.attrib.update(self.attrib.copy())
el.children = self.children[:]
return el
def append(self, child):
if child is None:
return
if isinstance(child, (list, tuple)):
for c in child:
self.append(c)
else:
self.children.append(child)
def convert_to_text(self, el, *args, **kwargs):
return el.render(*args, **kwargs) if hasattr(el, 'render') else force_text(el)
def render_children(self, *args, **kwargs):
return "".join(filter(None, (self.convert_to_text(c, *args, **kwargs)for c in self.children)))
def render(self, ctx=None):
if self.attrib.get('if') is False:
return None
attrs = self.attrib
content = self.render_children(ctx)
tag = _normalize(self.tag)
return u"<{tag} {attrs}>{content}</{tag}>".format(**locals())
def __str__(self):
return self.render()
def __html__(self):
return self.render()
class Empty(Element):
def render(self, *args, **kwargs):
return self.render_children(*args, **kwargs)
empty = Empty("none")
for name in "html body link meta div span form section article aside main ul li ol dl dd dt p a strong "\
"i fieldset legend b em input select button label nav textarea " \
"table tbody tfoot thead tr td th figure caption img".split(" "):
__all__.append(name)
globals()[name] = Element(name)
if __name__ == '__main__':
print(input(type="radio", checked=False).render()) | vivsh/django-ginger | ginger/html/common.py | Python | mit | 6,023 |
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import bt_configured_feature_column_info1014_all_of
except ImportError:
bt_configured_feature_column_info1014_all_of = sys.modules[
"onshape_client.oas.models.bt_configured_feature_column_info1014_all_of"
]
try:
from onshape_client.oas.models import btp_annotation231
except ImportError:
btp_annotation231 = sys.modules["onshape_client.oas.models.btp_annotation231"]
try:
from onshape_client.oas.models import btp_argument_declaration232
except ImportError:
btp_argument_declaration232 = sys.modules[
"onshape_client.oas.models.btp_argument_declaration232"
]
try:
from onshape_client.oas.models import btp_function_or_predicate_declaration247
except ImportError:
btp_function_or_predicate_declaration247 = sys.modules[
"onshape_client.oas.models.btp_function_or_predicate_declaration247"
]
try:
from onshape_client.oas.models import btp_identifier8
except ImportError:
btp_identifier8 = sys.modules["onshape_client.oas.models.btp_identifier8"]
try:
from onshape_client.oas.models import btp_space10
except ImportError:
btp_space10 = sys.modules["onshape_client.oas.models.btp_space10"]
try:
from onshape_client.oas.models import btp_statement269
except ImportError:
btp_statement269 = sys.modules["onshape_client.oas.models.btp_statement269"]
try:
from onshape_client.oas.models import btp_statement_block271
except ImportError:
btp_statement_block271 = sys.modules[
"onshape_client.oas.models.btp_statement_block271"
]
try:
from onshape_client.oas.models import btp_type_name290
except ImportError:
btp_type_name290 = sys.modules["onshape_client.oas.models.btp_type_name290"]
class BTPFunctionDeclaration246(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("documentation_type",): {
"FUNCTION": "FUNCTION",
"PREDICATE": "PREDICATE",
"CONSTANT": "CONSTANT",
"ENUM": "ENUM",
"USER_TYPE": "USER_TYPE",
"FEATURE_DEFINITION": "FEATURE_DEFINITION",
"FILE_HEADER": "FILE_HEADER",
"UNDOCUMENTABLE": "UNDOCUMENTABLE",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"atomic": (bool,), # noqa: E501
"documentation_type": (str,), # noqa: E501
"end_source_location": (int,), # noqa: E501
"node_id": (str,), # noqa: E501
"short_descriptor": (str,), # noqa: E501
"space_after": (btp_space10.BTPSpace10,), # noqa: E501
"space_before": (btp_space10.BTPSpace10,), # noqa: E501
"space_default": (bool,), # noqa: E501
"start_source_location": (int,), # noqa: E501
"annotation": (btp_annotation231.BTPAnnotation231,), # noqa: E501
"arguments_to_document": (
[btp_argument_declaration232.BTPArgumentDeclaration232],
), # noqa: E501
"deprecated": (bool,), # noqa: E501
"deprecated_explanation": (str,), # noqa: E501
"for_export": (bool,), # noqa: E501
"space_after_export": (btp_space10.BTPSpace10,), # noqa: E501
"symbol_name": (btp_identifier8.BTPIdentifier8,), # noqa: E501
"arguments": (
[btp_argument_declaration232.BTPArgumentDeclaration232],
), # noqa: E501
"body": (btp_statement_block271.BTPStatementBlock271,), # noqa: E501
"precondition": (btp_statement269.BTPStatement269,), # noqa: E501
"return_type": (btp_type_name290.BTPTypeName290,), # noqa: E501
"space_after_arglist": (btp_space10.BTPSpace10,), # noqa: E501
"space_in_empty_list": (btp_space10.BTPSpace10,), # noqa: E501
"name": (btp_identifier8.BTPIdentifier8,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"bt_type": "btType", # noqa: E501
"atomic": "atomic", # noqa: E501
"documentation_type": "documentationType", # noqa: E501
"end_source_location": "endSourceLocation", # noqa: E501
"node_id": "nodeId", # noqa: E501
"short_descriptor": "shortDescriptor", # noqa: E501
"space_after": "spaceAfter", # noqa: E501
"space_before": "spaceBefore", # noqa: E501
"space_default": "spaceDefault", # noqa: E501
"start_source_location": "startSourceLocation", # noqa: E501
"annotation": "annotation", # noqa: E501
"arguments_to_document": "argumentsToDocument", # noqa: E501
"deprecated": "deprecated", # noqa: E501
"deprecated_explanation": "deprecatedExplanation", # noqa: E501
"for_export": "forExport", # noqa: E501
"space_after_export": "spaceAfterExport", # noqa: E501
"symbol_name": "symbolName", # noqa: E501
"arguments": "arguments", # noqa: E501
"body": "body", # noqa: E501
"precondition": "precondition", # noqa: E501
"return_type": "returnType", # noqa: E501
"space_after_arglist": "spaceAfterArglist", # noqa: E501
"space_in_empty_list": "spaceInEmptyList", # noqa: E501
"name": "name", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
"_composed_instances",
"_var_name_to_model_instances",
"_additional_properties_model_instances",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""btp_function_declaration246.BTPFunctionDeclaration246 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
bt_type (str): [optional] # noqa: E501
atomic (bool): [optional] # noqa: E501
documentation_type (str): [optional] # noqa: E501
end_source_location (int): [optional] # noqa: E501
node_id (str): [optional] # noqa: E501
short_descriptor (str): [optional] # noqa: E501
space_after (btp_space10.BTPSpace10): [optional] # noqa: E501
space_before (btp_space10.BTPSpace10): [optional] # noqa: E501
space_default (bool): [optional] # noqa: E501
start_source_location (int): [optional] # noqa: E501
annotation (btp_annotation231.BTPAnnotation231): [optional] # noqa: E501
arguments_to_document ([btp_argument_declaration232.BTPArgumentDeclaration232]): [optional] # noqa: E501
deprecated (bool): [optional] # noqa: E501
deprecated_explanation (str): [optional] # noqa: E501
for_export (bool): [optional] # noqa: E501
space_after_export (btp_space10.BTPSpace10): [optional] # noqa: E501
symbol_name (btp_identifier8.BTPIdentifier8): [optional] # noqa: E501
arguments ([btp_argument_declaration232.BTPArgumentDeclaration232]): [optional] # noqa: E501
body (btp_statement_block271.BTPStatementBlock271): [optional] # noqa: E501
precondition (btp_statement269.BTPStatement269): [optional] # noqa: E501
return_type (btp_type_name290.BTPTypeName290): [optional] # noqa: E501
space_after_arglist (btp_space10.BTPSpace10): [optional] # noqa: E501
space_in_empty_list (btp_space10.BTPSpace10): [optional] # noqa: E501
name (btp_identifier8.BTPIdentifier8): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
constant_args = {
"_check_type": _check_type,
"_path_to_item": _path_to_item,
"_from_server": _from_server,
"_configuration": _configuration,
}
required_args = {}
# remove args whose value is Null because they are unset
required_arg_names = list(required_args.keys())
for required_arg_name in required_arg_names:
if required_args[required_arg_name] is nulltype.Null:
del required_args[required_arg_name]
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in six.iteritems(kwargs):
if (
var_name in unused_args
and self._configuration is not None
and self._configuration.discard_unknown_keys
and not self._additional_properties_model_instances
):
# discard variable.
continue
setattr(self, var_name, var_value)
@staticmethod
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
"anyOf": [],
"allOf": [
bt_configured_feature_column_info1014_all_of.BTConfiguredFeatureColumnInfo1014AllOf,
btp_function_or_predicate_declaration247.BTPFunctionOrPredicateDeclaration247,
],
"oneOf": [],
}
| onshape-public/onshape-clients | python/onshape_client/oas/models/btp_function_declaration246.py | Python | mit | 13,254 |
from wxPython.wx import *
from twisted.internet import reactor
class MyApp(wxApp):
def OnInit(self):
# Twisted Reactor Code
reactor.startRunning()
EVT_TIMER(self,999999,self.OnTimer)
self.timer=wxTimer(self,999999)
self.timer.Start(250,False)
# End Twisted Code
# Do whatever you need to do here
return True
def OnTimer(self,event):
reactor.runUntilCurrent()
reactor.doIteration(0)
| ActiveState/code | recipes/Python/181780_Using_wxPythTwisted/recipe-181780.py | Python | mit | 465 |
#!/usr/bin/env python
'''
A WebSocket to TCP socket proxy with support for "wss://" encryption.
Copyright 2011 Joel Martin
Licensed under LGPL version 3 (see docs/LICENSE.LGPL-3)
You can make a cert/key with openssl using:
openssl req -new -x509 -days 365 -nodes -out self.pem -keyout self.pem
as taken from http://docs.python.org/dev/library/ssl.html#certificates
'''
import signal, socket, optparse, time, os, sys, subprocess, logging, errno
try: from socketserver import ForkingMixIn
except: from SocketServer import ForkingMixIn
try: from http.server import HTTPServer
except: from BaseHTTPServer import HTTPServer
import select
from websockify import websocket
from websockify import auth_plugins as auth
try:
from urllib.parse import parse_qs, urlparse
except:
from cgi import parse_qs
from urlparse import urlparse
class ProxyRequestHandler(websocket.WebSocketRequestHandler):
traffic_legend = """
Traffic Legend:
} - Client receive
}. - Client receive partial
{ - Target receive
> - Target send
>. - Target send partial
< - Client send
<. - Client send partial
"""
def send_auth_error(self, ex):
self.send_response(ex.code, ex.msg)
self.send_header('Content-Type', 'text/html')
for name, val in ex.headers.items():
self.send_header(name, val)
self.end_headers()
def validate_connection(self):
if self.server.token_plugin:
(self.server.target_host, self.server.target_port) = self.get_target(self.server.token_plugin, self.path)
if self.server.auth_plugin:
try:
self.server.auth_plugin.authenticate(
headers=self.headers, target_host=self.server.target_host,
target_port=self.server.target_port)
except auth.AuthenticationError:
ex = sys.exc_info()[1]
self.send_auth_error(ex)
raise
def new_websocket_client(self):
"""
Called after a new WebSocket connection has been established.
"""
# Checking for a token is done in validate_connection()
# Connect to the target
if self.server.wrap_cmd:
msg = "connecting to command: '%s' (port %s)" % (" ".join(self.server.wrap_cmd), self.server.target_port)
elif self.server.unix_target:
msg = "connecting to unix socket: %s" % self.server.unix_target
else:
msg = "connecting to: %s:%s" % (
self.server.target_host, self.server.target_port)
if self.server.ssl_target:
msg += " (using SSL)"
self.log_message(msg)
tsock = websocket.WebSocketServer.socket(self.server.target_host,
self.server.target_port,
connect=True, use_ssl=self.server.ssl_target, unix_socket=self.server.unix_target)
self.print_traffic(self.traffic_legend)
# Start proxying
try:
self.do_proxy(tsock)
except:
if tsock:
tsock.shutdown(socket.SHUT_RDWR)
tsock.close()
if self.verbose:
self.log_message("%s:%s: Closed target",
self.server.target_host, self.server.target_port)
raise
def get_target(self, target_plugin, path):
"""
Parses the path, extracts a token, and looks up a target
for that token using the token plugin. Sets
target_host and target_port if successful
"""
# The files in targets contain the lines
# in the form of token: host:port
# Extract the token parameter from url
args = parse_qs(urlparse(path)[4]) # 4 is the query from url
if not 'token' in args or not len(args['token']):
raise self.server.EClose("Token not present")
token = args['token'][0].rstrip('\n')
result_pair = target_plugin.lookup(token)
if result_pair is not None:
return result_pair
else:
raise self.server.EClose("Token '%s' not found" % token)
def do_proxy(self, target):
"""
Proxy client WebSocket to normal target socket.
"""
cqueue = []
c_pend = 0
tqueue = []
rlist = [self.request, target]
if self.server.heartbeat:
now = time.time()
self.heartbeat = now + self.server.heartbeat
else:
self.heartbeat = None
while True:
wlist = []
if self.heartbeat is not None:
now = time.time()
if now > self.heartbeat:
self.heartbeat = now + self.server.heartbeat
self.send_ping()
if tqueue: wlist.append(target)
if cqueue or c_pend: wlist.append(self.request)
try:
ins, outs, excepts = select.select(rlist, wlist, [], 1)
except (select.error, OSError):
exc = sys.exc_info()[1]
if hasattr(exc, 'errno'):
err = exc.errno
else:
err = exc[0]
if err != errno.EINTR:
raise
else:
continue
if excepts: raise Exception("Socket exception")
if self.request in outs:
# Send queued target data to the client
c_pend = self.send_frames(cqueue)
cqueue = []
if self.request in ins:
# Receive client data, decode it, and queue for target
bufs, closed = self.recv_frames()
tqueue.extend(bufs)
if closed:
# TODO: What about blocking on client socket?
if self.verbose:
self.log_message("%s:%s: Client closed connection",
self.server.target_host, self.server.target_port)
raise self.CClose(closed['code'], closed['reason'])
if target in outs:
# Send queued client data to the target
dat = tqueue.pop(0)
sent = target.send(dat)
if sent == len(dat):
self.print_traffic(">")
else:
# requeue the remaining data
tqueue.insert(0, dat[sent:])
self.print_traffic(".>")
if target in ins:
# Receive target data, encode it and queue for client
buf = target.recv(self.buffer_size)
if len(buf) == 0:
if self.verbose:
self.log_message("%s:%s: Target closed connection",
self.server.target_host, self.server.target_port)
raise self.CClose(1000, "Target closed")
cqueue.append(buf)
self.print_traffic("{")
class WebSocketProxy(websocket.WebSocketServer):
"""
Proxy traffic to and from a WebSockets client to a normal TCP
socket server target. All traffic to/from the client is base64
encoded/decoded to allow binary data to be sent/received to/from
the target.
"""
buffer_size = 65536
def __init__(self, RequestHandlerClass=ProxyRequestHandler, *args, **kwargs):
# Save off proxy specific options
self.target_host = kwargs.pop('target_host', None)
self.target_port = kwargs.pop('target_port', None)
self.wrap_cmd = kwargs.pop('wrap_cmd', None)
self.wrap_mode = kwargs.pop('wrap_mode', None)
self.unix_target = kwargs.pop('unix_target', None)
self.ssl_target = kwargs.pop('ssl_target', None)
self.heartbeat = kwargs.pop('heartbeat', None)
self.token_plugin = kwargs.pop('token_plugin', None)
self.auth_plugin = kwargs.pop('auth_plugin', None)
# Last 3 timestamps command was run
self.wrap_times = [0, 0, 0]
if self.wrap_cmd:
wsdir = os.path.dirname(sys.argv[0])
rebinder_path = [os.path.join(wsdir, "..", "lib"),
os.path.join(wsdir, "..", "lib", "websockify"),
wsdir]
self.rebinder = None
for rdir in rebinder_path:
rpath = os.path.join(rdir, "rebind.so")
if os.path.exists(rpath):
self.rebinder = rpath
break
if not self.rebinder:
raise Exception("rebind.so not found, perhaps you need to run make")
self.rebinder = os.path.abspath(self.rebinder)
self.target_host = "127.0.0.1" # Loopback
# Find a free high port
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('', 0))
self.target_port = sock.getsockname()[1]
sock.close()
os.environ.update({
"LD_PRELOAD": self.rebinder,
"REBIND_OLD_PORT": str(kwargs['listen_port']),
"REBIND_NEW_PORT": str(self.target_port)})
websocket.WebSocketServer.__init__(self, RequestHandlerClass, *args, **kwargs)
def run_wrap_cmd(self):
self.msg("Starting '%s'", " ".join(self.wrap_cmd))
self.wrap_times.append(time.time())
self.wrap_times.pop(0)
self.cmd = subprocess.Popen(
self.wrap_cmd, env=os.environ, preexec_fn=_subprocess_setup)
self.spawn_message = True
def started(self):
"""
Called after Websockets server startup (i.e. after daemonize)
"""
# Need to call wrapped command after daemonization so we can
# know when the wrapped command exits
if self.wrap_cmd:
dst_string = "'%s' (port %s)" % (" ".join(self.wrap_cmd), self.target_port)
elif self.unix_target:
dst_string = self.unix_target
else:
dst_string = "%s:%s" % (self.target_host, self.target_port)
if self.token_plugin:
msg = " - proxying from %s:%s to targets generated by %s" % (
self.listen_host, self.listen_port, type(self.token_plugin).__name__)
else:
msg = " - proxying from %s:%s to %s" % (
self.listen_host, self.listen_port, dst_string)
if self.ssl_target:
msg += " (using SSL)"
self.msg("%s", msg)
if self.wrap_cmd:
self.run_wrap_cmd()
def poll(self):
# If we are wrapping a command, check it's status
if self.wrap_cmd and self.cmd:
ret = self.cmd.poll()
if ret != None:
self.vmsg("Wrapped command exited (or daemon). Returned %s" % ret)
self.cmd = None
if self.wrap_cmd and self.cmd == None:
# Response to wrapped command being gone
if self.wrap_mode == "ignore":
pass
elif self.wrap_mode == "exit":
sys.exit(ret)
elif self.wrap_mode == "respawn":
now = time.time()
avg = sum(self.wrap_times)/len(self.wrap_times)
if (now - avg) < 10:
# 3 times in the last 10 seconds
if self.spawn_message:
self.warn("Command respawning too fast")
self.spawn_message = False
else:
self.run_wrap_cmd()
def _subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
# non-Python successfulbprocesses expect.
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def logger_init():
logger = logging.getLogger(WebSocketProxy.log_prefix)
logger.propagate = False
logger.setLevel(logging.INFO)
h = logging.StreamHandler()
h.setLevel(logging.DEBUG)
h.setFormatter(logging.Formatter("%(message)s"))
logger.addHandler(h)
def websockify_init():
logger_init()
usage = "\n %prog [options]"
usage += " [source_addr:]source_port [target_addr:target_port]"
usage += "\n %prog [options]"
usage += " [source_addr:]source_port -- WRAP_COMMAND_LINE"
parser = optparse.OptionParser(usage=usage)
parser.add_option("--verbose", "-v", action="store_true",
help="verbose messages")
parser.add_option("--traffic", action="store_true",
help="per frame traffic")
parser.add_option("--record",
help="record sessions to FILE.[session_number]", metavar="FILE")
parser.add_option("--daemon", "-D",
dest="daemon", action="store_true",
help="become a daemon (background process)")
parser.add_option("--run-once", action="store_true",
help="handle a single WebSocket connection and exit")
parser.add_option("--timeout", type=int, default=0,
help="after TIMEOUT seconds exit when not connected")
parser.add_option("--idle-timeout", type=int, default=0,
help="server exits after TIMEOUT seconds if there are no "
"active connections")
parser.add_option("--cert", default="self.pem",
help="SSL certificate file")
parser.add_option("--key", default=None,
help="SSL key file (if separate from cert)")
parser.add_option("--ssl-only", action="store_true",
help="disallow non-encrypted client connections")
parser.add_option("--ssl-target", action="store_true",
help="connect to SSL target as SSL client")
parser.add_option("--unix-target",
help="connect to unix socket target", metavar="FILE")
parser.add_option("--web", default=None, metavar="DIR",
help="run webserver on same port. Serve files from DIR.")
parser.add_option("--wrap-mode", default="exit", metavar="MODE",
choices=["exit", "ignore", "respawn"],
help="action to take when the wrapped program exits "
"or daemonizes: exit (default), ignore, respawn")
parser.add_option("--prefer-ipv6", "-6",
action="store_true", dest="source_is_ipv6",
help="prefer IPv6 when resolving source_addr")
parser.add_option("--libserver", action="store_true",
help="use Python library SocketServer engine")
parser.add_option("--target-config", metavar="FILE",
dest="target_cfg",
help="Configuration file containing valid targets "
"in the form 'token: host:port' or, alternatively, a "
"directory containing configuration files of this form "
"(DEPRECATED: use `--token-plugin TokenFile --token-source "
" path/to/token/file` instead)")
parser.add_option("--token-plugin", default=None, metavar="PLUGIN",
help="use the given Python class to process tokens "
"into host:port pairs")
parser.add_option("--token-source", default=None, metavar="ARG",
help="an argument to be passed to the token plugin"
"on instantiation")
parser.add_option("--auth-plugin", default=None, metavar="PLUGIN",
help="use the given Python class to determine if "
"a connection is allowed")
parser.add_option("--auth-source", default=None, metavar="ARG",
help="an argument to be passed to the auth plugin"
"on instantiation")
parser.add_option("--auto-pong", action="store_true",
help="Automatically respond to ping frames with a pong")
parser.add_option("--heartbeat", type=int, default=0,
help="send a ping to the client every HEARTBEAT seconds")
(opts, args) = parser.parse_args()
if opts.verbose:
logging.getLogger(WebSocketProxy.log_prefix).setLevel(logging.DEBUG)
if opts.token_source and not opts.token_plugin:
parser.error("You must use --token-plugin to use --token-source")
if opts.auth_source and not opts.auth_plugin:
parser.error("You must use --auth-plugin to use --auth-source")
# Transform to absolute path as daemon may chdir
if opts.target_cfg:
opts.target_cfg = os.path.abspath(opts.target_cfg)
if opts.target_cfg:
opts.token_plugin = 'TokenFile'
opts.token_source = opts.target_cfg
del opts.target_cfg
# Sanity checks
if len(args) < 2 and not (opts.token_plugin or opts.unix_target):
parser.error("Too few arguments")
if sys.argv.count('--'):
opts.wrap_cmd = args[1:]
else:
opts.wrap_cmd = None
if len(args) > 2:
parser.error("Too many arguments")
if not websocket.ssl and opts.ssl_target:
parser.error("SSL target requested and Python SSL module not loaded.");
if opts.ssl_only and not os.path.exists(opts.cert):
parser.error("SSL only and %s not found" % opts.cert)
# Parse host:port and convert ports to numbers
if args[0].count(':') > 0:
opts.listen_host, opts.listen_port = args[0].rsplit(':', 1)
opts.listen_host = opts.listen_host.strip('[]')
else:
opts.listen_host, opts.listen_port = '', args[0]
try: opts.listen_port = int(opts.listen_port)
except: parser.error("Error parsing listen port")
if opts.wrap_cmd or opts.unix_target or opts.token_plugin:
opts.target_host = None
opts.target_port = None
else:
if args[1].count(':') > 0:
opts.target_host, opts.target_port = args[1].rsplit(':', 1)
opts.target_host = opts.target_host.strip('[]')
else:
parser.error("Error parsing target")
try: opts.target_port = int(opts.target_port)
except: parser.error("Error parsing target port")
if opts.token_plugin is not None:
if '.' not in opts.token_plugin:
opts.token_plugin = (
'websockify.token_plugins.%s' % opts.token_plugin)
token_plugin_module, token_plugin_cls = opts.token_plugin.rsplit('.', 1)
__import__(token_plugin_module)
token_plugin_cls = getattr(sys.modules[token_plugin_module], token_plugin_cls)
opts.token_plugin = token_plugin_cls(opts.token_source)
del opts.token_source
if opts.auth_plugin is not None:
if '.' not in opts.auth_plugin:
opts.auth_plugin = 'websockify.auth_plugins.%s' % opts.auth_plugin
auth_plugin_module, auth_plugin_cls = opts.auth_plugin.rsplit('.', 1)
__import__(auth_plugin_module)
auth_plugin_cls = getattr(sys.modules[auth_plugin_module], auth_plugin_cls)
opts.auth_plugin = auth_plugin_cls(opts.auth_source)
del opts.auth_source
# Create and start the WebSockets proxy
libserver = opts.libserver
del opts.libserver
if libserver:
# Use standard Python SocketServer framework
server = LibProxyServer(**opts.__dict__)
server.serve_forever()
else:
# Use internal service framework
server = WebSocketProxy(**opts.__dict__)
server.start_server()
class LibProxyServer(ForkingMixIn, HTTPServer):
"""
Just like WebSocketProxy, but uses standard Python SocketServer
framework.
"""
def __init__(self, RequestHandlerClass=ProxyRequestHandler, **kwargs):
# Save off proxy specific options
self.target_host = kwargs.pop('target_host', None)
self.target_port = kwargs.pop('target_port', None)
self.wrap_cmd = kwargs.pop('wrap_cmd', None)
self.wrap_mode = kwargs.pop('wrap_mode', None)
self.unix_target = kwargs.pop('unix_target', None)
self.ssl_target = kwargs.pop('ssl_target', None)
self.token_plugin = kwargs.pop('token_plugin', None)
self.auth_plugin = kwargs.pop('auth_plugin', None)
self.heartbeat = kwargs.pop('heartbeat', None)
self.token_plugin = None
self.auth_plugin = None
self.daemon = False
# Server configuration
listen_host = kwargs.pop('listen_host', '')
listen_port = kwargs.pop('listen_port', None)
web = kwargs.pop('web', '')
# Configuration affecting base request handler
self.only_upgrade = not web
self.verbose = kwargs.pop('verbose', False)
record = kwargs.pop('record', '')
if record:
self.record = os.path.abspath(record)
self.run_once = kwargs.pop('run_once', False)
self.handler_id = 0
for arg in kwargs.keys():
print("warning: option %s ignored when using --libserver" % arg)
if web:
os.chdir(web)
HTTPServer.__init__(self, (listen_host, listen_port),
RequestHandlerClass)
def process_request(self, request, client_address):
"""Override process_request to implement a counter"""
self.handler_id += 1
ForkingMixIn.process_request(self, request, client_address)
if __name__ == '__main__':
websockify_init()
| SuperDARNCanada/realtimedisplay | websockets/websockify/websockify/websocketproxy.py | Python | mit | 21,312 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-02-15 07:47
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0083_auto_20180209_1210'),
]
operations = [
migrations.RenameModel(
old_name='Facilitator',
new_name='Profile',
),
]
| p2pu/learning-circles | studygroups/migrations/0084_auto_20180215_0747.py | Python | mit | 366 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-13 15:43
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='category name', max_length=200, unique=True, verbose_name='name')),
('desc', models.TextField(help_text='category description', verbose_name='description')),
('modified', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': 'Category',
'ordering': ['name'],
'verbose_name_plural': 'Categories',
},
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('desc', models.TextField(blank=True, help_text="order's description", verbose_name='description')),
('status', models.PositiveIntegerField(choices=[(0, 'preparation'), (1, 'sent'), (2, 'received')], default=0, help_text="order's status", verbose_name='status')),
('modified', models.DateTimeField(auto_now=True, db_index=True)),
('created', models.DateTimeField(auto_now_add=True, db_index=True)),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.Customer', verbose_name='customer')),
],
options={
'verbose_name': 'Order',
'ordering': ['-modified', '-created'],
'verbose_name_plural': 'Orders',
},
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, help_text='product name', max_length=200, verbose_name='name')),
('price', models.FloatField(db_index=True, help_text="product's price", verbose_name='price')),
('image', models.ImageField(help_text="product's image", upload_to='images/', verbose_name='image')),
('desc', models.TextField(help_text="product's description", verbose_name='description')),
('modified', models.DateTimeField(auto_now=True)),
('created', models.DateTimeField(auto_now_add=True)),
('category', models.ForeignKey(help_text="product's category", on_delete=django.db.models.deletion.CASCADE, to='sales.Category', verbose_name='category')),
],
options={
'verbose_name': 'Product',
'ordering': ['name'],
'verbose_name_plural': 'Products',
},
),
migrations.CreateModel(
name='ProductSet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('number', models.PositiveIntegerField(default=1, verbose_name='number')),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sales.Order', verbose_name='Order')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sales.Product', verbose_name='Product')),
],
options={
'verbose_name': 'ProductSet',
'ordering': ['id'],
'verbose_name_plural': 'ProductSets',
},
),
migrations.AddField(
model_name='order',
name='product',
field=models.ManyToManyField(blank=True, through='sales.ProductSet', to='sales.Product', verbose_name='Product'),
),
]
| z0rr0/eshop | shop/sales/migrations/0001_initial.py | Python | mit | 4,179 |
"""
Tensor Contraction Engine output parser.
This module provides parsers of the output of the Tensor Contraction Engine of
So Hirata into Tensor objects in drudge.
"""
import collections
import itertools
import re
from sympy import nsimplify, sympify, Symbol
from drudge import Term
#
# The driver function
# -------------------
#
def parse_tce_out(tce_out, range_cb, base_cb):
"""Parse a TCE output into a list of terms.
A list of terms, and a dictionary of free symbols will be returned.
"""
lines = []
for line in tce_out.splitlines():
stripped = line.strip()
if len(stripped) > 0:
lines.append(stripped)
continue
free_vars = collections.defaultdict(set)
return list(itertools.chain.from_iterable(
_parse_tce_line(line, range_cb, base_cb, free_vars)
for line in lines
)), free_vars
#
# Internal functions
# ------------------
#
def _parse_tce_line(line, range_cb, base_cb, free_vars):
"""Parse a TCE output line into a list of terms.
"""
# Get the initial part in the bracket and the actual term specification
# part after it.
match_res = re.match(
r'^\s*\[(?P<factors>.*)\](?P<term>[^\[\]]+)$',
line
)
if match_res is None:
raise ValueError('Invalid TCE output line', line)
factors_str = match_res.group('factors').strip()
term_str = match_res.group('term').strip()
# Get the actual term in its raw form.
raw_term = _parse_term(term_str, range_cb, base_cb, free_vars)
# Generates the actual list of terms based on the factors, possibly with
# permutations.
return _gen_terms(factors_str, raw_term)
#
# Some constants for the TCE output format
#
_SUM_BASE = 'Sum'
#
# Parsing the term specification
#
def _parse_term(term_str, range_cb, base_cb, free_vars):
"""Parse the term string after the square bracket into a Term.
"""
# First break the string into indexed values.
summed_vars, idxed_vals = _break_into_idxed(term_str)
sums = tuple((Symbol(i), range_cb(i)) for i in summed_vars)
dumms = {i[0] for i in sums}
amp = sympify('1')
for base, indices in idxed_vals:
indices_symbs = tuple(Symbol(i) for i in indices)
for i, j in zip(indices_symbs, indices):
if i not in dumms:
free_vars[range_cb(j)].add(i)
continue
base_symb = base_cb(base, indices_symbs)
amp *= base_symb[indices_symbs]
continue
return Term(sums=sums, amp=amp, vecs=())
def _break_into_idxed(term_str):
"""Break the term string into pairs of indexed base and indices.
Both the base and the indices variables are going to be simple strings in
the return value.
"""
# First break it into fields separated by the multiplication asterisk.
fields = (i for i in re.split(r'\s*\*\s*', term_str) if len(i) > 0)
# Parse the fields one-by-one.
idxed_vals = []
for field in fields:
# Break the field into the base part and the indices part.
match_res = re.match(
r'(?P<base>\w+)\s*\((?P<indices>.*)\)', field
)
if match_res is None:
raise ValueError('Invalid indexed value', field)
# Generate the final result.
idxed_vals.append((
match_res.group('base'),
tuple(match_res.group('indices').split())
))
continue
# Summation always comes first in TCE output.
if idxed_vals[0][0] == _SUM_BASE:
return idxed_vals[0][1], idxed_vals[1:]
else:
return (), idxed_vals
#
# Final term generation based on the raw term
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
def _gen_terms(factors_str, raw_term):
"""Generate the actual terms based on the initial factor string.
The raw term should be a term directly parsed from the term specification
part of the TCE line. This function will use the factors string in the
square bracket to turn it into a list of terms for the final value of the
line.
"""
# The regular expression for a factor.
factor_regex = r'\s*'.join([
r'(?P<sign>[+-])',
r'(?P<factor_number>[0-9.]+)',
r'(?:\*\s*P\((?P<perm_from>[^=>]*)=>(?P<perm_to>[^)]*)\))?',
]) + r'\s*'
mismatch_regex = r'.'
regex = '(?P<factor>{})|(?P<mismatch>{})'.format(
factor_regex, mismatch_regex
)
# Iterate over the factors.
terms = []
for match_res in re.finditer(regex, factors_str):
# Test if the result matches a factor.
if match_res.group('factor') is None:
raise ValueError('Invalid factor string', factors_str)
# The value of the factor.
factor_value = nsimplify(''.join(
match_res.group('sign', 'factor_number')
), rational=True)
# Get the substitution for the permutation of the indices.
if match_res.group('perm_from') is not None:
from_vars = match_res.group('perm_from').split()
to_vars = match_res.group('perm_to').split()
subs = {
Symbol(from_var): Symbol(to_var)
for from_var, to_var in zip(from_vars, to_vars)
}
else:
subs = {}
# Add the result.
terms.append(raw_term.subst(subs).scale(factor_value))
# Continue to the next factor.
continue
return terms
| tschijnmo/drudge | drudge/_tceparser.py | Python | mit | 5,439 |
from pyd.support import setup, Extension, pydexe_sanity_check
pydexe_sanity_check()
projName = 'interpcontext'
setup(
name=projName,
version='1.0',
ext_modules=[
Extension(projName, ['interpcontext.d'],
build_deimos=True, d_lump=True
)
],
)
| ariovistus/pyd | examples/interpcontext/setup.py | Python | mit | 286 |
from selenium import webdriver
from time import sleep
driver=webdriver.Firefox()
driver.get("http://www.baidu.com/")
sleep(3)
# driver.find_element_by_id('kw').send_keys('Selenium我要自学网')
driver.find_element_by_name('wd').send_keys('Selenium我要自学网')
sleep(3)
driver.find_element_by_id('su').click()
sleep(3)
driver.quit()
| 1065865483/0python_script | four/Webdriver/FindElement/By_idName.py | Python | mit | 343 |
def strip_headers(post):
"""Find the first blank line and drop the headers to keep the body"""
if '\n\n' in post:
headers, body = post.split('\n\n', 1)
return body.lower()
else:
# Unexpected post inner-structure, be conservative
# and keep everything
return post.lower()
print("#" * 72)
print("Original text:\n\n")
original_text = all_twenty_train.data[0]
print(original_text)
print("#" * 72)
print("Stripped headers text:\n\n")
text_body = strip_headers(original_text)
print(text_body)
| janusnic/21v-python | unit_20/parallel_ml/notebooks/solutions/07A_1_strip_headers.py | Python | mit | 542 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .run_command_document_base import RunCommandDocumentBase
class RunCommandDocument(RunCommandDocumentBase):
"""Describes the properties of a Run Command.
:param schema: The VM run command schema.
:type schema: str
:param id: The VM run command id.
:type id: str
:param os_type: The Operating System type. Possible values include:
'Windows', 'Linux'
:type os_type: str or
~azure.mgmt.compute.v2017_12_01.models.OperatingSystemTypes
:param label: The VM run command label.
:type label: str
:param description: The VM run command description.
:type description: str
:param script: The script to be executed.
:type script: list[str]
:param parameters: The parameters used by the script.
:type parameters:
list[~azure.mgmt.compute.v2017_12_01.models.RunCommandParameterDefinition]
"""
_validation = {
'schema': {'required': True},
'id': {'required': True},
'os_type': {'required': True},
'label': {'required': True},
'description': {'required': True},
'script': {'required': True},
}
_attribute_map = {
'schema': {'key': '$schema', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'OperatingSystemTypes'},
'label': {'key': 'label', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'script': {'key': 'script', 'type': '[str]'},
'parameters': {'key': 'parameters', 'type': '[RunCommandParameterDefinition]'},
}
def __init__(self, schema, id, os_type, label, description, script, parameters=None):
super(RunCommandDocument, self).__init__(schema=schema, id=id, os_type=os_type, label=label, description=description)
self.script = script
self.parameters = parameters
| AutorestCI/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/v2017_12_01/models/run_command_document.py | Python | mit | 2,330 |
import sys
IS_PY3 = sys.version_info[0] == 3
if IS_PY3:
from http.client import NO_CONTENT
from email import encoders as Encoders
from urllib.parse import quote, urlencode
unicode = str
bytes = bytes
else:
from email import Encoders
from httplib import NO_CONTENT
from urllib import quote, urlencode
unicode = unicode
_orig_bytes = bytes
bytes = lambda s, *a: _orig_bytes(s)
| reclosedev/lathermail | lathermail/compat.py | Python | mit | 421 |
"""BWA (https://github.com/lh3/bwa)
"""
import os
import signal
import subprocess
from bcbio.pipeline import config_utils
from bcbio import bam, utils
from bcbio.distributed import objectstore
from bcbio.distributed.transaction import file_transaction, tx_tmpdir
from bcbio.ngsalign import alignprep, novoalign, postalign, rtg
from bcbio.provenance import do
from bcbio.rnaseq import gtf
from bcbio.variation import sentieon
import bcbio.pipeline.datadict as dd
from bcbio.bam import fastq
from bcbio.log import logger
galaxy_location_file = "bwa_index.loc"
def align_bam(in_bam, ref_file, names, align_dir, data):
"""Perform direct alignment of an input BAM file with BWA using pipes.
This avoids disk IO by piping between processes:
- samtools sort of input BAM to queryname
- bedtools conversion to interleaved FASTQ
- bwa-mem alignment
- samtools conversion to BAM
- samtools sort to coordinate
"""
config = data["config"]
out_file = os.path.join(align_dir, "{0}-sort.bam".format(names["lane"]))
samtools = config_utils.get_program("samtools", config)
bedtools = config_utils.get_program("bedtools", config)
resources = config_utils.get_resources("samtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
# adjust memory for samtools since used for input and output
max_mem = config_utils.adjust_memory(resources.get("memory", "1G"),
3, "decrease").upper()
if not utils.file_exists(out_file):
with tx_tmpdir(data) as work_dir:
with postalign.tobam_cl(data, out_file, bam.is_paired(in_bam)) as (tobam_cl, tx_out_file):
if not hla_on(data) or needs_separate_hla(data):
bwa_cmd = _get_bwa_mem_cmd(data, out_file, ref_file, "-", with_hla=False)
else:
bwa_cmd = _get_bwa_mem_cmd(data, out_file, ref_file, "-", with_hla=True)
tx_out_prefix = os.path.splitext(tx_out_file)[0]
prefix1 = "%s-in1" % tx_out_prefix
cmd = ("unset JAVA_HOME && "
"{samtools} sort -n -l 1 -@ {num_cores} -m {max_mem} {in_bam} -T {prefix1} "
"| {bedtools} bamtofastq -i /dev/stdin -fq /dev/stdout -fq2 /dev/stdout "
"| {bwa_cmd} | ")
cmd = cmd.format(**locals()) + tobam_cl
do.run(cmd, "bwa mem alignment from BAM: %s" % names["sample"], None,
[do.file_nonempty(tx_out_file), do.file_reasonable_size(tx_out_file, in_bam)])
data["work_bam"] = out_file
hla_file = "HLA-" + out_file
if needs_separate_hla(data) and not utils.file_exists(hla_file):
with tx_tmpdir(data) as work_dir:
with postalign.tobam_cl(data, hla_file, bam.is_paired(in_bam)) as (tobam_cl, tx_out_file):
bwa_cmd = _get_bwa_mem_cmd(data, hla_file, ref_file, "-", with_hla=True)
tx_out_prefix = os.path.splitext(tx_out_file)[0]
prefix1 = "%s-in1" % tx_out_prefix
cmd = ("unset JAVA_HOME && "
"{samtools} sort -n -l 1 -@ {num_cores} -m {max_mem} {in_bam} -T {prefix1} "
"| {bedtools} bamtofastq -i /dev/stdin -fq /dev/stdout -fq2 /dev/stdout "
"| {bwa_cmd} | ")
cmd = cmd.format(**locals()) + tobam_cl
do.run(cmd, "bwa mem alignment from BAM: %s" % names["sample"], None,
[do.file_nonempty(tx_out_file), do.file_reasonable_size(tx_out_file, in_bam)])
hla_file = _align_mem_hla(fastq_file, pair_file, ref_file, hla_file, names, rg_info, data)
data["hla_bam"] = hla_file
return data
def _get_bwa_mem_cmd(data, out_file, ref_file, fastq1, fastq2="", with_hla=False):
"""Perform piped bwa mem mapping potentially with alternative alleles in GRCh38 + HLA typing.
Commands for HLA post-processing:
base=TEST
run-HLA $base.hla > $base.hla.top
cat $base.hla.HLA*.gt | grep ^GT | cut -f2- > $base.hla.all
rm -f $base.hla.HLA*gt
rm -f $base.hla.HLA*gz
"""
alt_file = ref_file + ".alt"
if with_hla:
bwakit_dir = os.path.dirname(os.path.realpath(utils.which("run-bwamem")))
hla_base = os.path.join(utils.safe_makedir(os.path.join(os.path.dirname(out_file), "hla")),
os.path.basename(out_file) + ".hla")
alt_cmd = (" | {bwakit_dir}/k8 {bwakit_dir}/bwa-postalt.js -p {hla_base} {alt_file}")
else:
alt_cmd = ""
if dd.get_aligner(data) == "sentieon-bwa":
bwa_exe = "sentieon-bwa"
exports = sentieon.license_export(data)
else:
bwa_exe = "bwa"
exports = ""
bwa = config_utils.get_program(bwa_exe, data["config"])
num_cores = data["config"]["algorithm"].get("num_cores", 1)
bwa_resources = config_utils.get_resources("bwa", data["config"])
bwa_params = (" ".join([str(x) for x in bwa_resources.get("options", [])])
if "options" in bwa_resources else "")
rg_info = novoalign.get_rg_info(data["rgnames"])
# For UMI runs, pass along consensus tags
c_tags = "-C" if "umi_bam" in data else ""
pairing = "-p" if not fastq2 else ""
# Restrict seed occurances to 1/2 of default, manage memory usage for centromere repeats in hg38
# https://sourceforge.net/p/bio-bwa/mailman/message/31514937/
# http://ehc.ac/p/bio-bwa/mailman/message/32268544/
mem_usage = "-c 250"
bwa_cmd = ("{exports}{bwa} mem {pairing} {c_tags} {mem_usage} -M -t {num_cores} {bwa_params} -R '{rg_info}' "
"-v 1 {ref_file} {fastq1} {fastq2} ")
return (bwa_cmd + alt_cmd).format(**locals())
def is_precollapsed_bam(data):
return dd.get_umi_type(data) == "fastq_name" and not has_umi(data)
def hla_on(data):
return has_hla(data) and dd.get_hlacaller(data)
def has_umi(data):
return "umi_bam" in data
def has_hla(data):
from bcbio.heterogeneity import chromhacks
return len(chromhacks.get_hla_chroms(dd.get_ref_file(data))) != 0
def fastq_size_output(fastq_file, tocheck):
head_count = 8000000
fastq_file = objectstore.cl_input(fastq_file)
gzip_cmd = "zcat {fastq_file}" if fastq_file.endswith(".gz") else "cat {fastq_file}"
cmd = (utils.local_path_export() + gzip_cmd + " | head -n {head_count} | "
"seqtk sample -s42 - {tocheck} | "
"awk '{{if(NR%4==2) print length($1)}}' | sort | uniq -c")
def fix_signal():
"""Avoid spurious 'cat: write error: Broken pipe' message due to head command.
Work around from:
https://bitbucket.org/brodie/cram/issues/16/broken-pipe-when-heading-certain-output
"""
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
count_out = subprocess.check_output(cmd.format(**locals()), shell=True,
executable="/bin/bash", preexec_fn=fix_signal).decode()
if not count_out.strip():
raise IOError("Failed to check fastq file sizes with: %s" % cmd.format(**locals()))
for count, size in (l.strip().split() for l in count_out.strip().split("\n")):
yield count, size
def _can_use_mem(fastq_file, data, read_min_size=None):
"""bwa-mem handle longer (> 70bp) reads with improved piping.
Randomly samples 5000 reads from the first two million.
Default to no piping if more than 75% of the sampled reads are small.
If we've previously calculated minimum read sizes (from rtg SDF output)
we can skip the formal check.
"""
min_size = 70
if read_min_size and read_min_size >= min_size:
return True
thresh = 0.75
tocheck = 5000
shorter = 0
for count, size in fastq_size_output(fastq_file, tocheck):
if int(size) < min_size:
shorter += int(count)
return (float(shorter) / float(tocheck)) <= thresh
def align_pipe(fastq_file, pair_file, ref_file, names, align_dir, data):
"""Perform piped alignment of fastq input files, generating sorted output BAM.
"""
pair_file = pair_file if pair_file else ""
# back compatible -- older files were named with lane information, use sample name now
if names["lane"] != dd.get_sample_name(data):
out_file = os.path.join(align_dir, "{0}-sort.bam".format(names["lane"]))
else:
out_file = None
if not out_file or not utils.file_exists(out_file):
umi_ext = "-cumi" if "umi_bam" in data else ""
out_file = os.path.join(align_dir, "{0}-sort{1}.bam".format(dd.get_sample_name(data), umi_ext))
qual_format = data["config"]["algorithm"].get("quality_format", "").lower()
min_size = None
if data.get("align_split") or fastq_file.endswith(".sdf"):
if fastq_file.endswith(".sdf"):
min_size = rtg.min_read_size(fastq_file)
final_file = out_file
out_file, data = alignprep.setup_combine(final_file, data)
fastq_file, pair_file = alignprep.split_namedpipe_cls(fastq_file, pair_file, data)
else:
final_file = None
if qual_format == "illumina":
fastq_file = alignprep.fastq_convert_pipe_cl(fastq_file, data)
if pair_file:
pair_file = alignprep.fastq_convert_pipe_cl(pair_file, data)
rg_info = novoalign.get_rg_info(names)
if not utils.file_exists(out_file) and (final_file is None or not utils.file_exists(final_file)):
# If we cannot do piping, use older bwa aln approach
if ("bwa-mem" not in dd.get_tools_on(data) and
("bwa-mem" in dd.get_tools_off(data) or not _can_use_mem(fastq_file, data, min_size))):
out_file = _align_backtrack(fastq_file, pair_file, ref_file, out_file,
names, rg_info, data)
else:
if is_precollapsed_bam(data) or not hla_on(data) or needs_separate_hla(data):
out_file = _align_mem(fastq_file, pair_file, ref_file, out_file,
names, rg_info, data)
else:
out_file = _align_mem_hla(fastq_file, pair_file, ref_file, out_file,
names, rg_info, data)
data["work_bam"] = out_file
# bwakit will corrupt the non-HLA alignments in a UMI collapsed BAM file
# (see https://github.com/bcbio/bcbio-nextgen/issues/3069)
if needs_separate_hla(data):
hla_file = os.path.join(os.path.dirname(out_file), "HLA-" + os.path.basename(out_file))
hla_file = _align_mem_hla(fastq_file, pair_file, ref_file, hla_file, names, rg_info, data)
data["hla_bam"] = hla_file
return data
def _align_mem(fastq_file, pair_file, ref_file, out_file, names, rg_info, data):
"""Perform bwa-mem alignment on supported read lengths.
"""
with postalign.tobam_cl(data, out_file, pair_file != "") as (tobam_cl, tx_out_file):
cmd = ("unset JAVA_HOME && "
"%s | %s" % (_get_bwa_mem_cmd(data, out_file, ref_file, fastq_file, pair_file, with_hla=False), tobam_cl))
do.run(cmd, "bwa mem alignment from fastq: %s" % names["sample"], None,
[do.file_nonempty(tx_out_file), do.file_reasonable_size(tx_out_file, fastq_file)])
return out_file
def _align_mem_hla(fastq_file, pair_file, ref_file, out_file, names, rg_info, data):
"""Perform bwa-mem alignment on supported read lengths with HLA alignments
"""
with postalign.tobam_cl(data, out_file, pair_file != "") as (tobam_cl, tx_out_file):
cmd = ("unset JAVA_HOME && "
"%s | %s" % (_get_bwa_mem_cmd(data, out_file, ref_file, fastq_file, pair_file, with_hla=True), tobam_cl))
do.run(cmd, "bwa mem alignment from fastq: %s" % names["sample"], None,
[do.file_nonempty(tx_out_file), do.file_reasonable_size(tx_out_file, fastq_file)])
return out_file
def needs_separate_hla(data):
"""
bwakit will corrupt the non-HLA alignments in a UMI collapsed BAM file
(see https://github.com/bcbio/bcbio-nextgen/issues/3069)
"""
return hla_on(data) and has_umi(data)
def _align_backtrack(fastq_file, pair_file, ref_file, out_file, names, rg_info, data):
"""Perform a BWA alignment using 'aln' backtrack algorithm.
"""
bwa = config_utils.get_program("bwa", data["config"])
config = data["config"]
sai1_file = "%s_1.sai" % os.path.splitext(out_file)[0]
sai2_file = "%s_2.sai" % os.path.splitext(out_file)[0] if pair_file else ""
if not utils.file_exists(sai1_file):
with file_transaction(data, sai1_file) as tx_sai1_file:
_run_bwa_align(fastq_file, ref_file, tx_sai1_file, config)
if sai2_file and not utils.file_exists(sai2_file):
with file_transaction(data, sai2_file) as tx_sai2_file:
_run_bwa_align(pair_file, ref_file, tx_sai2_file, config)
with postalign.tobam_cl(data, out_file, pair_file != "") as (tobam_cl, tx_out_file):
align_type = "sampe" if sai2_file else "samse"
cmd = ("unset JAVA_HOME && {bwa} {align_type} -r '{rg_info}' {ref_file} {sai1_file} {sai2_file} "
"{fastq_file} {pair_file} | ")
cmd = cmd.format(**locals()) + tobam_cl
do.run(cmd, "bwa %s" % align_type, data)
return out_file
def _bwa_args_from_config(config):
num_cores = config["algorithm"].get("num_cores", 1)
core_flags = ["-t", str(num_cores)] if num_cores > 1 else []
return core_flags
def _run_bwa_align(fastq_file, ref_file, out_file, config):
aln_cl = [config_utils.get_program("bwa", config), "aln",
"-n 2", "-k 2"]
aln_cl += _bwa_args_from_config(config)
aln_cl += [ref_file, fastq_file]
cmd = "{cl} > {out_file}".format(cl=" ".join(aln_cl), out_file=out_file)
do.run(cmd, "bwa aln: {f}".format(f=os.path.basename(fastq_file)), None)
def index_transcriptome(gtf_file, ref_file, data):
"""
use a GTF file and a reference FASTA file to index the transcriptome
"""
gtf_fasta = gtf.gtf_to_fasta(gtf_file, ref_file)
return build_bwa_index(gtf_fasta, data)
def build_bwa_index(fasta_file, data):
bwa = config_utils.get_program("bwa", data["config"])
cmd = "{bwa} index {fasta_file}".format(**locals())
message = "Creating transcriptome index of %s with bwa." % (fasta_file)
do.run(cmd, message)
return fasta_file
def align_transcriptome(fastq_file, pair_file, ref_file, data):
"""
bwa mem with settings for aligning to the transcriptome for eXpress/RSEM/etc
"""
work_bam = dd.get_work_bam(data)
base, ext = os.path.splitext(work_bam)
out_file = base + ".transcriptome" + ext
if utils.file_exists(out_file):
data = dd.set_transcriptome_bam(data, out_file)
return data
# bwa mem needs phred+33 quality, so convert if it is Illumina
if dd.get_quality_format(data).lower() == "illumina":
logger.info("bwa mem does not support the phred+64 quality format, "
"converting %s and %s to phred+33.")
fastq_file = fastq.groom(fastq_file, data, in_qual="fastq-illumina")
if pair_file:
pair_file = fastq.groom(pair_file, data, in_qual="fastq-illumina")
bwa = config_utils.get_program("bwa", data["config"])
gtf_file = dd.get_gtf_file(data)
gtf_fasta = index_transcriptome(gtf_file, ref_file, data)
args = " ".join(_bwa_args_from_config(data["config"]))
num_cores = data["config"]["algorithm"].get("num_cores", 1)
samtools = config_utils.get_program("samtools", data["config"])
cmd = ("{bwa} mem {args} -a -t {num_cores} {gtf_fasta} {fastq_file} "
"{pair_file} ")
with file_transaction(data, out_file) as tx_out_file:
message = "Aligning %s and %s to the transcriptome." % (fastq_file, pair_file)
cmd += "| " + postalign.sam_to_sortbam_cl(data, tx_out_file, name_sort=True)
do.run(cmd.format(**locals()), message)
data = dd.set_transcriptome_bam(data, out_file)
return data
def filter_multimappers(align_file, data):
"""
Filtering a BWA alignment file for uniquely mapped reads, from here:
https://bioinformatics.stackexchange.com/questions/508/obtaining-uniquely-mapped-reads-from-bwa-mem-alignment
"""
config = dd.get_config(data)
type_flag = "" if bam.is_bam(align_file) else "S"
base, ext = os.path.splitext(align_file)
out_file = base + ".unique" + ext
bed_file = dd.get_variant_regions(data) or dd.get_sample_callable(data)
bed_cmd = '-L {0}'.format(bed_file) if bed_file else " "
if utils.file_exists(out_file):
return out_file
base_filter = '-F "not unmapped {paired_filter} and [XA] == null and [SA] == null and not supplementary " '
if bam.is_paired(align_file):
paired_filter = "and paired and proper_pair"
else:
paired_filter = ""
filter_string = base_filter.format(paired_filter=paired_filter)
sambamba = config_utils.get_program("sambamba", config)
num_cores = dd.get_num_cores(data)
with file_transaction(out_file) as tx_out_file:
cmd = ('{sambamba} view -h{type_flag} '
'--nthreads {num_cores} '
'-f bam {bed_cmd} '
'{filter_string} '
'{align_file} '
'> {tx_out_file}')
message = "Removing multimapped reads from %s." % align_file
do.run(cmd.format(**locals()), message)
bam.index(out_file, config)
return out_file
| lbeltrame/bcbio-nextgen | bcbio/ngsalign/bwa.py | Python | mit | 17,449 |
from optparse import OptionParser
import os,sys
from oldowan.mitotype.matcher import HVRMatcher
from oldowan.mitotype.prevalidate import prevalidate_submission
def run_command():
"""Perform automated human mtDNA haplotype identification."""
# Set up the options parser
usage = "usage: %prog [options] sequence|filename"
parser = OptionParser(usage=usage)
parser.add_option('-f',
'--file',
action='store_true',
default=False,
help='load sequences from FASTA file',
dest='use_file')
parser.add_option('-c',
'--csv',
action='store_true',
dest='csv',
default=False,
help='output in comma-separated-value format')
parser.add_option('-n',
'--no-csv-header',
action='store_false',
dest='csv_header',
default=True,
help='output a csv header')
parser.add_option('-o',
'--out',
dest='outfile',
help='write results to FILE',
default=False,
metavar='FILE')
# Parse the options
(options, args) = parser.parse_args()
# At least one argument is always required.
# It will be either the sequence to be tested, or
# When the -f flag is used, the filename of the fasta file
# to be tested
if len(args) != 1:
if options.use_file:
print 'You must provide a filename!'
print "Type 'mitotype -h' for help."
else:
print 'You must provide a sequence to test'
print "Type 'mitotype -h' for help."
sys.exit(1)
# If we've made it this far we're probably going to have to do some
# actual work; initialize the matcher.
hvrm = HVRMatcher()
# Do the work, either:
# (1) load the fasta file
# (2) use sequence passed on the command line
working_text = ''
if options.use_file:
if os.path.exists(args[0]):
f = open(args[0], 'r')
working_text = f.read()
f.close()
else:
print 'ERROR: Could not find file: %s' % args[0]
sys.exit(1)
else:
working_text = args[0]
vi = prevalidate_submission(working_text)
if not vi.valid:
print 'ERROR: Could not validate input: %s' % vi.problem
results = hvrm.match(working_text, vi)
# If outfile option is used, make stdout point to that file
if options.outfile:
outf = open(options.outfile, 'w')
sys.stdout = outf
# If we're outputing to CSV, spit out a header
if options.csv and options.csv_header:
print 'Query Label,Query Defining Positions,Motif Label,Match Score,Motif Defining Positions,Source'
# Output the results
for r in results:
if options.csv:
for row in r.csv_rows():
print row
else:
print r
sys.stdout.flush()
| ryanraaum/oldowan.mitotype | oldowan/mitotype/commandline.py | Python | mit | 3,160 |
import sys, socket, select, time
from optparse import OptionParser
XBOX_PORT = 5050
XBOX_PING = "dd00000a000000000000000400000002"
XBOX_POWER = "dd02001300000010"
help_text = "xbox-remote-power.py -a <ip address> -i <live id>"
py3 = sys.version_info[0] > 2
def main():
parser = OptionParser()
parser.add_option('-a', '--address', dest='ip_addr', help="IP Address of Xbox One", default='')
parser.add_option('-i', '--id', dest='live_id', help="Live ID of Xbox One", default='')
(opts, args) = parser.parse_args()
if not opts.ip_addr:
opts.ip_addr = user_input("Enter the IP address: ")
ping = False
if not opts.live_id:
print("No Live ID given, do you want to attempt to ping the Xbox for it?")
result = ""
while result not in ("y", "n"):
result = user_input("(y/n): ").lower()
if result == "y":
ping = True
elif result == "n":
opts.live_id = user_input("Enter the Live ID: ")
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setblocking(0)
s.bind(("", 0))
s.connect((opts.ip_addr, XBOX_PORT))
if ping:
print("Attempting to ping Xbox for Live ID...")
s.send(bytearray.fromhex(XBOX_PING))
ready = select.select([s], [], [], 5)
if ready[0]:
data = s.recv(1024)
opts.live_id = data[199:215]
else:
print("Failed to ping Xbox, please enter Live ID manually")
opts.live_id = user_input("Enter the Live ID: ")
if isinstance(opts.live_id, str):
live_id = opts.live_id.encode()
else:
live_id = opts.live_id
power_packet = bytearray.fromhex(XBOX_POWER) + live_id + b'\x00'
print("Sending power on packets to " + opts.ip_addr)
for i in range(0, 5):
s.send(power_packet)
time.sleep(1)
print("Xbox should turn on now")
s.send(bytearray.fromhex(XBOX_PING))
ready = select.select([s], [], [], 5)
if ready[0]:
data = s.recv(1024)
opts.live_id = data[199:215]
print("Ping successful!")
print("Live ID = " + live_id.decode("utf-8"))
print("")
print("******************************************")
print("* Xbox running - Streaming now possible! *")
print("******************************************")
print("")
else:
print("Failed to ping Xbox - please try again! :(")
print("")
s.close()
def user_input(text):
response = ""
while response == "":
if py3:
response = input(text)
else:
response = raw_input(text)
return response
if __name__ == "__main__":
main()
| JDQuackers/xbox-remote-power | xbox-remote-power.py | Python | mit | 2,719 |
"""
The Fibonacci Sequence is the series of numbers:
0, 1, 1, 2, 3, 5, 8, 13, 21, 34, ...
The next number is found by adding up the two numbers before it.
The 2 is found by adding the two numbers before it (1+1)
Similarly, the 3 is found by adding the two numbers before it (1+2),
And the 5 is (2+3),
and so on!
"""
"""
Using lists to return a list of fib numbers
"""
def fib1(limit=10):
"""
Returns a list of fib numbers
"""
nth_number = limit
if limit <= 1:
answer = [0]
elif limit == 2:
answer = [0,1]
else:
fib_num = [0,1]
while len(fib_num) < nth_number:
fib1 = fib_num[len(fib_num)-2]
fib2 = fib_num[len(fib_num)-1]
fib3 = fib2 + fib1
fib_num.append(fib3)
answer = fib_num
return answer
"""
How to return a specific fib number.
"""
def fib2(nth_num=10):
"""
Returns the nth fib number
"""
# Base cases
fib1 = 0
fib2 = 1
if nth_num <= 1:
answer = fib1
elif nth_num == 2:
answer = fib2
else:
current_fib = 2
while nth_num - current_fib > 0:
fib1, fib2 = fib2, fib1 + fib2
current_fib = current_fib + 1
answer = fib2
return answer
"""
Solve with generators
"""
def fib3(nth_num=10):
"""
A generator that yields fib numbers
"""
# Base case
fib1 = 0
fib2 = 1
if nth_num <= 1:
yield fib1
elif nth_num == 2:
yield fib1
yield fib2
else:
yield fib1
yield fib2
current_fib = 2
while nth_num - current_fib > 0:
fib1, fib2 = fib2, fib1 + fib2
yield fib2
current_fib = current_fib + 1
def fib_list(limit=10):
answer = []
for fib_num in fib3(limit):
answer.append(fib_num)
return answer
def nth_fib_num(nth_num=10):
answer = 0
for fib_num in fib3(nth_num):
answer = fib_num
return answer
if __name__ == "__main__":
print(fib1(10))
print(fib2(10))
print(fib_list(10))
print(nth_fib_num(10))
| crazcalm/PyTN_talk_proposal | recipies/recipe1/fib.py | Python | mit | 2,169 |
# -*- coding: utf8 -*-
"CountColumn filter"
from .abstract import AbstractFilter
class CountColumn(AbstractFilter):
"Count a flux's column and put the result in a variable"
name = 'Compter colonnes'
description = "Compte le nombre de colonnes d'un flux et met le résultat dans une variable"
node_in = ['cible']
parameters = [
{
'name': 'Variable',
'key': 'target',
'type': 'integer'
}
]
def run(self):
"Execute the filter"
target = self._model.config('target')
value = len(self._flux_in['cible']['headers'])
self._registery.set(target, value)
| Exanis/cannelloni | backend/filters/count_column.py | Python | mit | 662 |
# -*- coding: utf-8 -*-
"""
A very thin wrapper on top of the FluidDB RESTful API
Copyright (c) 2009-2010 Seo Sanghyeon, Nicholas Tollervey and others
See README, AUTHORS and LICENSE for more information
"""
import sys
import httplib2
import urllib
import types
if sys.version_info < (2, 6):
import simplejson as json
else:
import json
# There are currently two instances of FluidDB. MAIN is the default standard
# instance and SANDBOX is a scratch version for testing purposes. Data in
# SANDBOX can (and will) be blown away.
MAIN = 'https://fluiddb.fluidinfo.com'
SANDBOX = 'https://sandbox.fluidinfo.com'
instance = MAIN
ITERABLE_TYPES = set((list, tuple))
SERIALIZABLE_TYPES = set((types.NoneType, bool, int, float, str, unicode, list,
tuple))
global_headers = {
'Accept': '*/*',
}
def login(username, password):
"""
Creates the 'Authorization' token from the given username and password.
"""
userpass = username + ':' + password
auth = 'Basic ' + userpass.encode('base64').strip()
global_headers['Authorization'] = auth
def logout():
"""
Removes the 'Authorization' token from the headers passed into FluidDB
"""
if 'Authorization' in global_headers:
del global_headers['Authorization']
def call(method, path, body=None, mime=None, tags=[], custom_headers={}, **kw):
"""
Makes a call to FluidDB
method = HTTP verb. e.g. PUT, POST, GET, DELETE or HEAD
path = Path appended to the instance to locate the resource in FluidDB this
can be either a string OR a list of path elements.
body = The request body (a dictionary will be translated to json,
primitive types will also be jsonified)
mime = The mime-type for the body of the request - will override the
jsonification of primitive types
tags = The list of tags to return if the request is to values
headers = A dictionary containing additional headers to send in the request
**kw = Query-string arguments to be appended to the URL
"""
http = httplib2.Http()
# build the URL
url = build_url(path)
if kw:
url = url + '?' + urllib.urlencode(kw)
if tags and path.startswith('/values'):
# /values based requests must have a tags list to append to the
# url args (which are passed in as **kw), so append them so everything
# gets urlencoded correctly below
url = url + '&' + urllib.urlencode([('tag', tag) for tag in tags])
# set the headers
headers = global_headers.copy()
if custom_headers:
headers.update(custom_headers)
# make sure the path is a string for the following elif check for PUT
# based requests
if isinstance(path, list):
path = '/'+'/'.join(path)
# Make sure the correct content-type header is sent
if isinstance(body, dict):
# jsonify dicts
headers['content-type'] = 'application/json'
body = json.dumps(body)
elif method.upper() == 'PUT' and (
path.startswith('/objects/') or path.startswith('/about')):
# A PUT to an "/objects/" or "/about/" resource means that we're
# handling tag-values. Make sure we handle primitive/opaque value types
# properly.
if mime:
# opaque value (just set the mime type)
headers['content-type'] = mime
elif isprimitive(body):
# primitive values need to be json-ified and have the correct
# content-type set
headers['content-type'] = 'application/vnd.fluiddb.value+json'
body = json.dumps(body)
else:
# No way to work out what content-type to send to FluidDB so
# bail out.
raise TypeError("You must supply a mime-type")
response, content = http.request(url, method, body, headers)
if ((response['content-type'] == 'application/json' or
response['content-type'] == 'application/vnd.fluiddb.value+json')
and content):
result = json.loads(content)
else:
result = content
return response, result
def isprimitive(body):
"""
Given the body of a request will return a boolean to indicate if the
value is a primitive value type.
See:
http://doc.fluidinfo.com/fluidDB/api/tag-values.html
&
http://bit.ly/hmrMzT
For an explanation of the difference between primitive and opaque
values.
"""
bodyType = type(body)
if bodyType in SERIALIZABLE_TYPES:
if bodyType in ITERABLE_TYPES:
if not all(isinstance(x, basestring) for x in body):
return False
return True
else:
return False
def build_url(path):
"""
Given a path that is either a string or list of path elements, will return
the correct URL
"""
url = instance
if isinstance(path, list):
url += '/'
url += '/'.join([urllib.quote(element, safe='') for element in path])
else:
url += urllib.quote(path)
return url
| ntoll/fluiddb.py | fluiddb.py | Python | mit | 5,024 |
from django.contrib import admin
from isi_mip.sciencepaper.models import Paper
admin.site.register(Paper) | bruecksen/isimip | isi_mip/sciencepaper/admin.py | Python | mit | 107 |
from swappr import app
app.run(debug=True, host="0.0.0.0", port=9000)
| swappr-tanda-team/swappr | runserver.py | Python | mit | 71 |
from __future__ import division #Para não truncar a divisão de inteiros
from visual import * #Módulo com as funções gráficas do VPython
from math import *
scene_range = 15
scene.width = 1920
scene.height = 1080
scene.fullscreen = True
scene.autoscale = False
scene.range = (scene_range, scene_range, scene_range)
scene.center = (0,0,0)
scene.forward = (-1,-0.7,-1)
dt = 10
rate_emf = 1000
numero_planos_linhas_campo = 24
carga_particula = 1
massa_particula = 1.673*10**-27
carga_polo_pos = 5*10**7
pos_polo_pos = vector(0,2,0)
carga_polo_neg = -5*10**7
pos_polo_neg = vector(0,-2,0)
def criacao_emf():
#polos pos e neg
global pos_polo_pos
global pos_polo_neg
polo_pos = sphere(pos=pos_polo_pos, radius=1, material = materials.marble, opacity=0.25)
polo_neg = sphere(pos=pos_polo_neg, radius=1, material = materials.marble, opacity=0.25)
#criacao do referencial dentro da esfera positiva (sendo o vec_y_polo_pos paralelo ao vector que une os dois centros das esferas)
#os vectores serão usados nas rotações (eixos)
norm_vec_conect_center_spheres = norm(polo_pos.pos - polo_neg.pos)
vec_norm_polo_pos = vector(norm_vec_conect_center_spheres.y, norm_vec_conect_center_spheres.x, 0)
vec_x_polo_pos = arrow(pos=polo_pos.pos, axis=vec_norm_polo_pos, opacity=0.25, color = color.red)
vec_y_polo_pos = arrow(pos=polo_pos.pos, axis=norm_vec_conect_center_spheres, opacity=0.25, color = color.green)
vec_z_polo_pos = arrow(pos=polo_pos.pos, axis=cross(vec_y_polo_pos.axis, vec_x_polo_pos.axis), opacity=0.25, color = color.cyan)
#listas com os dados
lista_particulas_emf = []
lista_trajectos = []
#ângulos de rotação
latitude = 0
longitude = 0
#criação das particulas
while (longitude < 180):
dir_longitude = vec_x_polo_pos.axis.rotate(angle=radians(longitude), axis=vec_y_polo_pos.axis)
latitude_axis = vec_z_polo_pos.axis.rotate(angle=radians(longitude), axis=vec_y_polo_pos.axis)
while (latitude < 360):
dir_particula = dir_longitude.rotate(angle=radians(latitude), axis=latitude_axis)
pos_particula = polo_pos.pos + dir_particula
particula = sphere(pos=pos_particula, radius=0.05, opacity=0.25)
trajecto = curve(pos=pos_particula, color=color.yellow)
lista_particulas_emf.append(particula)
lista_trajectos.append(trajecto)
latitude += 360 / numero_planos_linhas_campo
latitude = 0
longitude += 360 / numero_planos_linhas_campo
#criação de arrays a partir das listas
array_particulas_emf = array(lista_particulas_emf)
array_trajectos = array(lista_trajectos)
#cálculo das linhas do campo magnético
continuar = True
picked_pole = None
while continuar:
rate(rate_emf)
#Caso o utilizador altere a posição de uma das partículas, reconstroi as linhas de campo
if scene.mouse.events:
m = scene.mouse.getevent()
if m.drag:
if (m.pick == polo_pos or m.pick == polo_neg):
picked_pole = m.pick
elif m.drop:
if picked_pole:
continuar = False
pos_polo_pos = polo_pos.pos
pos_polo_neg = polo_neg.pos
#Limpa os objectos e linhas de campo actuais
while(len(scene.objects) > 0):
scene.objects[0].visible = False
if picked_pole:
current_pos = scene.mouse.pos
offset = current_pos - picked_pole.pos
if (offset != 0):
picked_pole.pos += offset
for i in range(array_particulas_emf.size):
#Se as particulas se afastarem consideravelmento do centro dos polos ou quando entrarem dentro do polo neg, são imobilizadas
if ((mag(array_particulas_emf[i].pos) < scene_range) and (mag(array_particulas_emf[i].pos - polo_neg.pos) > polo_neg.radius)):
#cálculo dos dados
#Fe = k |q1|*|q1| / K r^2 -> Lei de Coulomb
#E = Fe / q
#E = k * q1 / K r^2
dist_particulas_pos = array_particulas_emf[i].pos - polo_pos.pos
dist_particulas_neg = array_particulas_emf[i].pos - polo_neg.pos
Eqp = ((9*10**9 * carga_polo_pos * 1.602*10**-19) / mag(dist_particulas_pos)**2) * norm(dist_particulas_pos)
Eqn = ((9*10**9 * carga_polo_neg * 1.602*10**-19) / mag(dist_particulas_neg)**2) * norm(dist_particulas_neg)
E = Eqp + Eqn
#x = x0 + v*t
#Como se está a desenhar as linhas de campo, está-se a percorrer o espaço usando E como vector director (análogo à velocidade de uma partícula)
pos = array_particulas_emf[i].pos + E * dt
#update dos dados
#array_campo_mag_emf[i] = E
array_particulas_emf[i].pos = pos
array_trajectos[i].append(pos)
while True:
criacao_emf()
| carlosmccosta/Electric-Dipole | Source code/Electric dipole field lines.py | Python | mit | 5,205 |
#!/usr/bin/env python3
#
# This script executes different GO BPScore algorithms
# in order to compare their run times.
# for timing
import time
# for the data connection
import pappi.sql
from pappi.data_config import *
# import the GO association loading function
from pappi.go.utils import load_go_associations_sql
# import similarity scorer to be benchmarked
from pappi.go.fast_similarity import GoFastSimilarity
from pappi.go.fastSemSim_similarity import GoFastSemSimSimilarity
from pappi.go.prebuf_similarity import GoPreBufSimilarity
from pappi.go.gene_prebuf_similarity import GoGenePreBufSimilarity
class BPScore_Benchmarker:
def __init__(self):
# get database connection
self.con = pappi.sql.get_conn(DATABASE)
self.genes = self.get_benchmark_genes(self.con)
self.scorers = []
self.init_time = []
self.run_times = dict() # dict {number of genes -> list of run times}
def init_scorers(self):
# initialize all the scorers (and save the initalization time)
start = time.time()
self.scorers.append(GoFastSimilarity(GO_OBO_FILE, self.con, True))
self.init_time.append(time.time() - start)
start = time.time()
self.scorers.append(GoFastSemSimSimilarity(GO_OBO_FILE, GO_ASSOC_FILE,
self.con))
self.init_time.append(time.time() - start)
start = time.time()
self.scorers.append(GoPreBufSimilarity(GO_OBO_FILE, GO_SCORE_FILE,
GO_SCORE_MAP_FILE, self.con, True))
self.init_time.append(time.time() - start)
start = time.time()
self.scorers.append(GoGenePreBufSimilarity(GO_OBO_FILE, GO_SCORE_FILE,
GO_SCORE_MAP_FILE,
GO_BPSCORE_FILE,
GO_BPSCORE_MAP_FILE, self.con,
True))
self.init_time.append(time.time() - start)
def benchmark_scorers(self, nGenes):
# get a set of genes with the given size
benchmark_genes = set(self.genes[0:nGenes])
# score the gene set with all scorers
score_time = []
for scorer in self.scorers:
start = time.time()
score = scorer.gene_set_score(benchmark_genes)
score_time.append(time.time() - start)
# save run time to class table
self.run_times[nGenes] = score_time
def get_benchmark_genes(self, sql_conn):
# load Gene->GO-Term associations to get a set of genes to be used in
# the benchmark
assoc = load_go_associations_sql(sql_conn)
# use a list for fast/efficient range access
genes = list(assoc.keys())
return genes
def run_benchmark(self):
for n in range(10, 1001, 10):
print("benchmarking for n = " + str(n) + " genes...")
self.benchmark_scorers(n)
def print_timings(self):
print("scored by " + str(len(self.scorers)) + " scorers")
print()
print("n\t" + "\t".join(self.scorers[i].__class__.__name__
for i in range(0, len(self.scorers))))
print("init\t" + "\t".join(str(self.init_time[i])
for i in range(0, len(self.scorers))))
for n in sorted(self.run_times.keys()):
score_time = self.run_times[n]
print(str(n) + "\t" + "\t".join(str(s) for s in score_time))
# the main benchmark:
if __name__ == '__main__':
print("loading benchmarking class...")
benchmarker = BPScore_Benchmarker()
print("benchmark init times...")
benchmarker.init_scorers()
print("benchmark scoring...")
benchmarker.run_benchmark()
# print the actual timing results
benchmarker.print_timings()
| patflick/tsppi | src/bpscore_benchmark.py | Python | mit | 3,882 |
from aiohttp import web
from aiohttp_session import get_session, SESSION_KEY as SESSION_COOKIE_NAME
from aioweb.middleware.csrf.templatetags import CsrfTag, CsrfRawTag
from aioweb.util import awaitable
from aioweb.modules.template.backends.jinja2 import APP_KEY as JINJA_APP_KEY
import random, string
from aiohttp_session import get_session
from hashlib import sha256
CSRF_FIELD_NAME = 'csrftoken'
CSRF_SESSION_NAME = 'csrf_token'
CSRF_HEADER_NAME = 'X-Csrf-Token'
CSRF_COOKIE_NAME = 'Csrf-Token'
REASON_NO_CSRF_COOKIE = "CSRF cookie not set."
REASON_BAD_TOKEN = "CSRF token missing or incorrect."
CSRF_LENGTH = 128
CSRF_SALT_LENGTH = 6
CSRF_ALLOWED_CHARS = string.ascii_letters + string.digits
CSRF_TOKEN_SEPARATOR = '-'
def generate_csrf_secret():
return ''.join([random.choice(CSRF_ALLOWED_CHARS) for c in range(CSRF_LENGTH)])
def generate_salt():
return ''.join([random.choice(CSRF_ALLOWED_CHARS) for c in range(CSRF_SALT_LENGTH)])
async def get_secret(request):
"""
Returns the CSRF token required for a POST form. The token is an
alphanumeric value. A new token is created if one is not already set.
"""
session = await get_session(request)
if CSRF_SESSION_NAME in session and session[CSRF_SESSION_NAME]:
return session[CSRF_SESSION_NAME]
return await set_secret(request)
def make_token(salt, secret):
return "{}{}{}".format(salt, CSRF_TOKEN_SEPARATOR,
sha256("{}{}{}".format(salt, CSRF_TOKEN_SEPARATOR, secret).encode()).hexdigest())
async def get_token(request):
salt = generate_salt()
secret = await get_secret(request)
return make_token(salt, secret)
async def set_secret(request):
session = await get_session(request)
session[CSRF_SESSION_NAME] = generate_csrf_secret()
return session[CSRF_SESSION_NAME]
def validate_token(token, secret):
salt, hashed = token.split('-', maxsplit=1)
return token == make_token(salt, secret)
async def middleware(app, handler):
async def middleware_handler(request):
setattr(request, 'csrf_token', await get_token(request))
try:
response = await awaitable(handler(request))
except web.HTTPException as e:
raise e
return response
return middleware_handler
def setup(app):
app[JINJA_APP_KEY].add_extension(CsrfTag)
app[JINJA_APP_KEY].add_extension(CsrfRawTag)
async def pre_dispatch(request, controller, actionName):
reason = None
check_ok = True
if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
action = getattr(controller, actionName)
if not getattr(action, 'csrf_disabled', False):
check_ok = False
token = request.headers.get(CSRF_HEADER_NAME)
if not token:
data = await request.post()
token = data.get(CSRF_FIELD_NAME)
if token:
if validate_token(token, await get_secret(request)):
check_ok = True
else:
reason = REASON_BAD_TOKEN
else:
reason = REASON_NO_CSRF_COOKIE
if not check_ok:
raise web.HTTPForbidden(reason=reason)
| kreopt/aioweb | aioweb/middleware/csrf/__init__.py | Python | mit | 3,212 |
from django.http import HttpResponse, HttpResponseForbidden
from django.core.cache import cache
from inboxtix.util import get_api_tree
def autocomplete_category(request):
if not request.is_ajax():
return HttpResponseForbidden()
name = request.GET.get('q',None)
limit = request.GET.get('limit', 10)
if not name:
return HttpResponse('')
tree = get_api_tree('category', 'search', **{'name':name, 'limit':limit})
matches = []
for cat in tree.iter('category'):
matches.append(cat.find('name').text)
return HttpResponse('\n'.join(matches))
| tdavis/inboxtix | inboxtix/home/views.py | Python | mit | 594 |
#!/usr/bin/env python
"""
@file HybridVAControl.py
@author Craig Rafter
@date 19/08/2016
class for fixed time signal control
"""
import signalControl, readJunctionData, traci
from math import atan2, degrees, hypot
import numpy as np
from collections import defaultdict
class HybridVAControl(signalControl.signalControl):
def __init__(self, junctionData, minGreenTime=10., maxGreenTime=60., scanRange=250, packetRate=0.2):
super(HybridVAControl, self).__init__()
self.junctionData = junctionData
self.firstCalled = traci.simulation.getCurrentTime()
self.lastCalled = self.firstCalled
self.lastStageIndex = 0
traci.trafficlights.setRedYellowGreenState(self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString)
self.packetRate = int(1000*packetRate)
self.transition = False
# self.CAMactive = False
# dict[vehID] = [position, heading, velocity, Tdetect]
self.newVehicleInfo = {}
self.oldVehicleInfo = {}
self.scanRange = scanRange
self.jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
self.jcnCtrlRegion = self._getJncCtrlRegion()
# print(self.junctionData.id)
# print(self.jcnCtrlRegion)
self.controlledLanes = traci.trafficlights.getControlledLanes(self.junctionData.id)
# dict[laneID] = [heading, shape]
self.laneDetectionInfo = self._getIncomingLaneInfo()
self.stageTime = 0.0
self.minGreenTime = minGreenTime
self.maxGreenTime = maxGreenTime
self.secondsPerMeterTraffic = 0.45
self.nearVehicleCatchDistance = 25
self.extendTime = 1.0 # 5 m in 10 m/s (acceptable journey 1.333)
self.laneInductors = self._getLaneInductors()
self.TIME_MS = self.firstCalled
self.TIME_SEC = 0.001 * self.TIME_MS
'''def minmax(x, lower, upper):
return min(max(x, lower), upper)
'''
def process(self):
self.TIME_MS = traci.simulation.getCurrentTime()
self.TIME_SEC = 0.001 * self.TIME_MS
# Packets sent on this step
# packet delay + only get packets towards the end of the second
if (not self.TIME_MS % self.packetRate) and (not 50 < self.TIME_MS % 1000 < 650):
#self.CAMactive = True
self._getCAMinfo()
# else:
# self.CAMactive = False
# Update stage decisions
# If there's no ITS enabled vehicles present use VA ctrl
numCAVs = len(self.oldVehicleInfo)
isControlInterval = not self.TIME_MS % 1000
#if isControlInterval: print('CTRL')
if numCAVs < 1 and isControlInterval:
detectTimePerLane = self._getLaneDetectTime()
# Set adaptive time limit
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
#print('A'+str(self.stageTime))
# If active and on the second, or transition then make stage descision
elif numCAVs >= 1 and isControlInterval:
oncomingVeh = self._getOncomingVehicles()
# If new stage get furthest from stop line whose velocity < 5% speed
# limit and determine queue length
if self.transition:
furthestVeh = self._getFurthestStationaryVehicle(oncomingVeh)
if furthestVeh[0] != '':
meteredTime = self.secondsPerMeterTraffic*furthestVeh[1]
self.stageTime = max(self.minGreenTime, meteredTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# If we're in this state this should never happen but just in case
else:
self.stageTime = self.minGreenTime
#print('B'+str(self.stageTime))
# If currently staging then extend time if there are vehicles close
# to the stop line
else:
nearestVeh = self._getNearestVehicle(oncomingVeh)
# If a vehicle detected
if nearestVeh != '' and nearestVeh[1] <= self.nearVehicleCatchDistance:
if (self.oldVehicleInfo[nearestVeh[0]][2] != 1e6
and self.oldVehicleInfo[nearestVeh[0]][2] > 1.0/self.secondsPerMeterTraffic):
meteredTime = nearestVeh[1]/self.oldVehicleInfo[nearestVeh[0]][2]
else:
meteredTime = self.secondsPerMeterTraffic*nearestVeh[1]
elapsedTime = 0.001*(self.TIME_MS - self.lastCalled)
Tremaining = self.stageTime - elapsedTime
self.stageTime = elapsedTime + max(meteredTime, Tremaining)
#self.stageTime = max(self.stageTime, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
#print('C'+str(self.stageTime))
# no detectable near vehicle try inductive loop info
elif nearestVeh == '' or nearestVeh[1] > self.nearVehicleCatchDistance:
detectTimePerLane = self._getLaneDetectTime()
# Set adaptive time limit
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
#print('D'+str(self.stageTime))
else:
pass
# process stage as normal
else:
pass
# print(self.stageTime)
if isControlInterval:
self.transition = False
if self.transitionObject.active:
# If the transition object is active i.e. processing a transition
pass
# elif (self.TIME_MS - self.firstCalled) < (self.junctionData.offset*1000):
# # Process offset first
# pass
elif (self.TIME_MS - self.lastCalled) < self.stageTime*1000:
# Before the period of the next stage
pass
else:
# Not active, not in offset, stage not finished
if len(self.junctionData.stages) != (self.lastStageIndex)+1:
# Loop from final stage to first stage
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[self.lastStageIndex+1].controlString)
self.lastStageIndex += 1
else:
# Proceed to next stage
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[0].controlString)
self.lastStageIndex = 0
#print(self.stageTime)
self.lastCalled = self.TIME_MS
self.transition = True
self.stageTime = 0.0
super(HybridVAControl, self).process()
def _getHeading(self, currentLoc, prevLoc):
dy = currentLoc[1] - prevLoc[1]
dx = currentLoc[0] - prevLoc[0]
if currentLoc[1] == prevLoc[1] and currentLoc[0] == prevLoc[0]:
heading = -1
else:
if dy >= 0:
heading = degrees(atan2(dy, dx))
else:
heading = 360 + degrees(atan2(dy, dx))
# Map angle to make compatible with SUMO heading
if 0 <= heading <= 90:
heading = 90 - heading
elif 90 < heading < 360:
heading = 450 - heading
return heading
def _getJncCtrlRegion(self):
jncPosition = traci.junction.getPosition(self.junctionData.id)
otherJuncPos = [traci.junction.getPosition(x) for x in traci.trafficlights.getIDList() if x != self.junctionData.id]
ctrlRegion = {'N':jncPosition[1]+self.scanRange, 'S':jncPosition[1]-self.scanRange,
'E':jncPosition[0]+self.scanRange, 'W':jncPosition[0]-self.scanRange}
TOL = 10 # Exclusion region around junction boundary
if otherJuncPos != []:
for pos in otherJuncPos:
dx = jncPosition[0] - pos[0]
dy = jncPosition[1] - pos[1]
# North/South Boundary
if abs(dy) < self.scanRange:
if dy < -TOL:
ctrlRegion['N'] = min(pos[1] - TOL, ctrlRegion['N'])
elif dy > TOL:
ctrlRegion['S'] = max(pos[1] + TOL, ctrlRegion['S'])
else:
pass
else:
pass
# East/West Boundary
if abs(dx) < self.scanRange:
if dx < -TOL:
ctrlRegion['E'] = min(pos[0] - TOL, ctrlRegion['E'])
elif dx > TOL:
ctrlRegion['W'] = max(pos[0] + TOL, ctrlRegion['W'])
else:
pass
else:
pass
return ctrlRegion
def _isInRange(self, vehPosition):
distance = np.linalg.norm(vehPosition - self.jcnPosition)
if (distance < self.scanRange
and self.jcnCtrlRegion['W'] <= vehPosition[0] <= self.jcnCtrlRegion['E']
and self.jcnCtrlRegion['S'] <= vehPosition[1] <= self.jcnCtrlRegion['N']):
return True
else:
return False
def _getVelocity(self, vehID, vehPosition, Tdetect):
if vehID in self.oldVehicleInfo.keys():
oldX = np.array(self.oldVehicleInfo[vehID][0])
newX = np.array(vehPosition)
dx = np.linalg.norm(newX - oldX)
dt = Tdetect - self.oldVehicleInfo[vehID][3]
velocity = dx/dt
return velocity
else:
return 1e6
def _getCAMinfo(self):
self.oldVehicleInfo = self.newVehicleInfo.copy()
self.newVehicleInfo = {}
Tdetect = self.TIME_SEC
for vehID in traci.vehicle.getIDList():
vehPosition = traci.vehicle.getPosition(vehID)
if traci.vehicle.getTypeID(vehID) == 'typeITSCV' and self._isInRange(vehPosition):
vehHeading = traci.vehicle.getAngle(vehID)
vehVelocity = self._getVelocity(vehID, vehPosition, Tdetect)
self.newVehicleInfo[vehID] = [vehPosition, vehHeading, vehVelocity, Tdetect]
def _getIncomingLaneInfo(self):
laneInfo = defaultdict(list)
for lane in list(np.unique(np.array(self.controlledLanes))):
shape = traci.lane.getShape(lane)
width = traci.lane.getWidth(lane)
heading = self._getHeading(shape[1], shape[0])
dx = shape[0][0] - shape[1][0]
dy = shape[0][1] - shape[1][1]
if abs(dx) > abs(dy):
roadBounds = ((shape[0][0], shape[0][1] + width), (shape[1][0], shape[1][1] - width))
else:
roadBounds = ((shape[0][0] + width, shape[0][1]), (shape[1][0] - width, shape[1][1]))
laneInfo[lane] = [heading, roadBounds]
return laneInfo
def _getOncomingVehicles(self):
# Oncoming if (in active lane & heading matches oncoming heading &
# is in lane bounds)
activeLanes = self._getActiveLanes()
vehicles = []
for lane in activeLanes:
for vehID in self.oldVehicleInfo.keys():
# If on correct heading pm 10deg
if (np.isclose(self.oldVehicleInfo[vehID][1], self.laneDetectionInfo[lane][0], atol=10)
# If in lane x bounds
and min(self.laneDetectionInfo[lane][1][0][0], self.laneDetectionInfo[lane][1][1][0]) <
self.oldVehicleInfo[vehID][0][0] <
max(self.laneDetectionInfo[lane][1][0][0], self.laneDetectionInfo[lane][1][1][0])
# If in lane y bounds
and min(self.laneDetectionInfo[lane][1][0][1], self.laneDetectionInfo[lane][1][1][1]) <
self.oldVehicleInfo[vehID][0][1] <
max(self.laneDetectionInfo[lane][1][0][1], self.laneDetectionInfo[lane][1][1][1])):
# Then append vehicle
vehicles.append(vehID)
vehicles = list(np.unique(np.array(vehicles)))
return vehicles
def _getActiveLanes(self):
# Get the current control string to find the green lights
stageCtrlString = self.junctionData.stages[self.lastStageIndex].controlString
activeLanes = []
for i, letter in enumerate(stageCtrlString):
if letter == 'G':
activeLanes.append(self.controlledLanes[i])
# Get a list of the unique active lanes
activeLanes = list(np.unique(np.array(activeLanes)))
return activeLanes
def _getLaneInductors(self):
laneInductors = defaultdict(list)
for loop in traci.inductionloop.getIDList():
loopLane = traci.inductionloop.getLaneID(loop)
if loopLane in self.controlledLanes:
laneInductors[loopLane].append(loop)
return laneInductors
def _getFurthestStationaryVehicle(self, vehIDs):
furthestID = ''
maxDistance = -1
speedLimit = traci.lane.getMaxSpeed(self._getActiveLanes()[0])
for ID in vehIDs:
vehPosition = np.array(self.oldVehicleInfo[ID][0])
distance = np.linalg.norm(vehPosition - self.jcnPosition)
if distance > maxDistance and self.oldVehicleInfo[ID][2] < 0.05*speedLimit:
furthestID = ID
maxDistance = distance
return [furthestID, maxDistance]
def _getNearestVehicle(self, vehIDs):
nearestID = ''
minDistance = self.nearVehicleCatchDistance + 1
for ID in vehIDs:
vehPosition = np.array(self.oldVehicleInfo[ID][0])
distance = np.linalg.norm(vehPosition - self.jcnPosition)
if distance < minDistance:
nearestID = ID
minDistance = distance
return [nearestID, minDistance]
def _getLaneDetectTime(self):
activeLanes = self._getActiveLanes()
meanDetectTimePerLane = np.zeros(len(activeLanes))
for i, lane in enumerate(activeLanes):
detectTimes = []
for loop in self.laneInductors[lane]:
detectTimes.append(traci.inductionloop.getTimeSinceDetection(loop))
meanDetectTimePerLane[i] = np.mean(detectTimes)
return meanDetectTimePerLane
| cbrafter/TRB18_GPSVA | codes/sumoAPI/HybridVAControl.py | Python | mit | 15,253 |
"""Helper functions
Consists of functions to typically be used within templates, but also
available to Controllers. This module is available to templates as 'h'.
"""
from routes import url_for
from webhelpers.html import literal
from webhelpers.html.secure_form import secure_form
from webhelpers.html.tags import *
from webhelpers.html.tools import auto_link, mail_to
from webhelpers.text import truncate, chop_at, plural
from webob.exc import strip_tags
from wurdig.lib import auth
from wurdig.lib.comment import *
from wurdig.lib.cookie import *
from wurdig.lib.conf_helper import *
from wurdig.lib.widgets import *
from wurdig.lib.html import *
from wurdig.lib.mdown import *
from wurdig.lib.tag import cloud, post_tags
from wurdig.lib.tidy_helper import *
from wurdig.lib.utils_helper import *
def load_stylesheet_assets(csslist='FCSSLIST'):
import pylons
import os
path = os.path.join(pylons.config['pylons.paths']['static_files'], 'css', '%s')
f = open(path % csslist,'r')
stylesheets = f.read()
f.close()
return ['/css/%s.css?%s' % (f, mtime('/css/%s.css' % f)) for f in stylesheets.split()] | leveille/blog.v1 | wurdig/lib/helpers.py | Python | mit | 1,130 |
"""
Django settings for tiny_hands_pac project.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
from os.path import abspath, dirname, join, normpath
from sys import path
from django.core.exceptions import ImproperlyConfigured
def get_env_variable(var_name):
""" Get the environment variable or return exception """
try:
return os.environ[var_name]
except KeyError:
error_msg = "Set the %s environment variable" % var_name
raise ImproperlyConfigured(error_msg)
# Absolute filesystem path to the Django project directory:
DJANGO_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
PROJECT_ROOT = dirname(DJANGO_ROOT)
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
path.append(DJANGO_ROOT)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# Do not set SECRET_KEY or LDAP password or any other sensitive data here.
# Instead, create a local.py file on the server.
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'compressor',
'taggit',
'modelcluster',
'wagtail.contrib.wagtailsitemaps',
'wagtail.contrib.wagtailsearchpromotions',
'wagtail.wagtailforms',
'wagtail.wagtailredirects',
'wagtail.wagtailembeds',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailsnippets',
'wagtail.wagtaildocs',
'wagtail.wagtailimages',
'wagtail.wagtailsearch',
'wagtail.wagtailadmin',
'wagtail.wagtailcore',
'wagtail.contrib.settings',
'wagtailfontawesome',
'utils',
'pages',
'blog',
'events',
'contact',
'people',
'photo_gallery',
'products',
'documents_gallery',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'wagtail.wagtailcore.middleware.SiteMiddleware',
'wagtail.wagtailredirects.middleware.RedirectMiddleware',
)
ROOT_URLCONF = 'tiny_hands_pac.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'debug' : DEBUG,
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'pages.context_processors.site_url',
],
},
},
]
WSGI_APPLICATION = 'tiny_hands_pac.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'tiny_hands_pac',
'USER': '',
'HOST': '', # Set to empty string for localhost.
'PORT': '', # Set to empty string for default.
'CONN_MAX_AGE': 600,
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-gb'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
MEDIA_ROOT = join(PROJECT_ROOT, 'media')
MEDIA_URL = '/files/'
# Django compressor settings
# http://django-compressor.readthedocs.org/en/latest/settings/
COMPRESS_PRECOMPILERS = (
('text/x-scss', 'django_libsass.SassCompiler'),
)
COMPRESS_OFFLINE = True
# Feeds app for Wagtail CMS
FEED_APP_LABEL = 'blog'
FEED_MODEL_NAME = 'BlogPage'
FEED_ITEM_DESCRIPTION_FIELD = 'intro'
FEED_ITEM_CONTENT_FIELD = 'body'
FEED_TITLE = 'Tiny Hands Big News'
FEED_LINK = '/news/'
FEED_DESCRIPTION = ""
FEED_AUTHOR_EMAIL = 'donaldtrumphastinyhands@gmail.com'
FEED_AUTHOR_LINK = 'https://www.donaldtrumphastinyhands.com'
# Settings for wagalytics
GA_KEY_FILEPATH = ''
GA_VIEW_ID = ''
# Google Maps Key
GOOGLE_MAPS_KEY = ''
DYNAMIC_MAP_URL = ''
STATIC_MAP_URL = ''
# Facebook Open Tags
FB_SITE_NAME = ''
FB_URL = ''
FB_DESCRIPTION = ''
FB_APP_ID = ''
# Twitter Cards
TWITTER_URL = ''
TWITTER_CREATOR = ''
TWITTER_DESCRIPTION = ''
# Use Redis as the cache backend for extra performance
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'tiny_hands_pac',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
# Wagtail settings
LOGIN_URL = 'wagtailadmin_login'
LOGIN_REDIRECT_URL = 'wagtailadmin_home'
WAGTAIL_SITE_NAME = "Tiny Hands PAC"
WAGTAILSEARCH_RESULTS_TEMPLATE = 'utils/tags/search/search_results.html'
# Use Elasticsearch as the search backend for extra performance and better search results
# WAGTAILSEARCH_BACKENDS = {
# 'default': {
# 'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
# 'INDEX': 'tiny_hands_pac',
# },
# }
# Celery settings
# When you have multiple sites using the same Redis server,
# specify a different Redis DB. e.g. redis://localhost/5
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
| DonaldTrumpHasTinyHands/tiny_hands_pac | tiny_hands_pac/settings/base.py | Python | mit | 6,665 |
print("Greetings Earth! We come in peace.")
| morepj/numerical-mooc | working/HelloWorld.py | Python | mit | 44 |
"""
Provides functionality to interact with hvacs.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/hvac/
"""
import logging
import os
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.config import load_yaml_config_file
import homeassistant.util as util
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.temperature import convert
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
from homeassistant.components import zwave
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_TEMPERATURE, STATE_ON, STATE_OFF, STATE_UNKNOWN,
TEMP_CELCIUS)
DOMAIN = "hvac"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = 60
SERVICE_SET_AWAY_MODE = "set_away_mode"
SERVICE_SET_AUX_HEAT = "set_aux_heat"
SERVICE_SET_TEMPERATURE = "set_temperature"
SERVICE_SET_FAN_MODE = "set_fan_mode"
SERVICE_SET_OPERATION_MODE = "set_operation_mode"
SERVICE_SET_SWING_MODE = "set_swing_mode"
SERVICE_SET_HUMIDITY = "set_humidity"
STATE_HEAT = "heat"
STATE_COOL = "cool"
STATE_IDLE = "idle"
STATE_AUTO = "auto"
STATE_DRY = "dry"
STATE_FAN_ONLY = "fan_only"
ATTR_CURRENT_TEMPERATURE = "current_temperature"
ATTR_MAX_TEMP = "max_temp"
ATTR_MIN_TEMP = "min_temp"
ATTR_AWAY_MODE = "away_mode"
ATTR_AUX_HEAT = "aux_heat"
ATTR_FAN_MODE = "fan_mode"
ATTR_FAN_LIST = "fan_list"
ATTR_CURRENT_HUMIDITY = "current_humidity"
ATTR_HUMIDITY = "humidity"
ATTR_MAX_HUMIDITY = "max_humidity"
ATTR_MIN_HUMIDITY = "min_humidity"
ATTR_OPERATION_MODE = "operation_mode"
ATTR_OPERATION_LIST = "operation_list"
ATTR_SWING_MODE = "swing_mode"
ATTR_SWING_LIST = "swing_list"
_LOGGER = logging.getLogger(__name__)
DISCOVERY_PLATFORMS = {
zwave.DISCOVER_HVAC: 'zwave'
}
def set_away_mode(hass, away_mode, entity_id=None):
"""Turn all or specified hvac away mode on."""
data = {
ATTR_AWAY_MODE: away_mode
}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_AWAY_MODE, data)
def set_aux_heat(hass, aux_heat, entity_id=None):
"""Turn all or specified hvac auxillary heater on."""
data = {
ATTR_AUX_HEAT: aux_heat
}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_AUX_HEAT, data)
def set_temperature(hass, temperature, entity_id=None):
"""Set new target temperature."""
data = {ATTR_TEMPERATURE: temperature}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_TEMPERATURE, data)
def set_humidity(hass, humidity, entity_id=None):
"""Set new target humidity."""
data = {ATTR_HUMIDITY: humidity}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_HUMIDITY, data)
def set_fan_mode(hass, fan, entity_id=None):
"""Turn all or specified hvac fan mode on."""
data = {ATTR_FAN_MODE: fan}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_FAN_MODE, data)
def set_operation_mode(hass, operation_mode, entity_id=None):
"""Set new target operation mode."""
data = {ATTR_OPERATION_MODE: operation_mode}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_OPERATION_MODE, data)
def set_swing_mode(hass, swing_mode, entity_id=None):
"""Set new target swing mode."""
data = {ATTR_SWING_MODE: swing_mode}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_SWING_MODE, data)
# pylint: disable=too-many-branches
def setup(hass, config):
"""Setup hvacs."""
component = EntityComponent(_LOGGER, DOMAIN, hass,
SCAN_INTERVAL, DISCOVERY_PLATFORMS)
component.setup(config)
descriptions = load_yaml_config_file(
os.path.join(os.path.dirname(__file__), 'services.yaml'))
def away_mode_set_service(service):
"""Set away mode on target hvacs."""
target_hvacs = component.extract_from_service(service)
away_mode = service.data.get(ATTR_AWAY_MODE)
if away_mode is None:
_LOGGER.error(
"Received call to %s without attribute %s",
SERVICE_SET_AWAY_MODE, ATTR_AWAY_MODE)
return
for hvac in target_hvacs:
if away_mode:
hvac.turn_away_mode_on()
else:
hvac.turn_away_mode_off()
if hvac.should_poll:
hvac.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_AWAY_MODE, away_mode_set_service,
descriptions.get(SERVICE_SET_AWAY_MODE))
def aux_heat_set_service(service):
"""Set auxillary heater on target hvacs."""
target_hvacs = component.extract_from_service(service)
aux_heat = service.data.get(ATTR_AUX_HEAT)
if aux_heat is None:
_LOGGER.error(
"Received call to %s without attribute %s",
SERVICE_SET_AUX_HEAT, ATTR_AUX_HEAT)
return
for hvac in target_hvacs:
if aux_heat:
hvac.turn_aux_heat_on()
else:
hvac.turn_aux_heat_off()
if hvac.should_poll:
hvac.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_AUX_HEAT, aux_heat_set_service,
descriptions.get(SERVICE_SET_AUX_HEAT))
def temperature_set_service(service):
"""Set temperature on the target hvacs."""
target_hvacs = component.extract_from_service(service)
temperature = util.convert(
service.data.get(ATTR_TEMPERATURE), float)
if temperature is None:
_LOGGER.error(
"Received call to %s without attribute %s",
SERVICE_SET_TEMPERATURE, ATTR_TEMPERATURE)
return
for hvac in target_hvacs:
hvac.set_temperature(convert(
temperature, hass.config.temperature_unit,
hvac.unit_of_measurement))
if hvac.should_poll:
hvac.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_TEMPERATURE, temperature_set_service,
descriptions.get(SERVICE_SET_TEMPERATURE))
def humidity_set_service(service):
"""Set humidity on the target hvacs."""
target_hvacs = component.extract_from_service(service)
humidity = service.data.get(ATTR_HUMIDITY)
if humidity is None:
_LOGGER.error(
"Received call to %s without attribute %s",
SERVICE_SET_HUMIDITY, ATTR_HUMIDITY)
return
for hvac in target_hvacs:
hvac.set_humidity(humidity)
if hvac.should_poll:
hvac.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_HUMIDITY, humidity_set_service,
descriptions.get(SERVICE_SET_HUMIDITY))
def fan_mode_set_service(service):
"""Set fan mode on target hvacs."""
target_hvacs = component.extract_from_service(service)
fan = service.data.get(ATTR_FAN_MODE)
if fan is None:
_LOGGER.error(
"Received call to %s without attribute %s",
SERVICE_SET_FAN_MODE, ATTR_FAN_MODE)
return
for hvac in target_hvacs:
hvac.set_fan_mode(fan)
if hvac.should_poll:
hvac.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_FAN_MODE, fan_mode_set_service,
descriptions.get(SERVICE_SET_FAN_MODE))
def operation_set_service(service):
"""Set operating mode on the target hvacs."""
target_hvacs = component.extract_from_service(service)
operation_mode = service.data.get(ATTR_OPERATION_MODE)
if operation_mode is None:
_LOGGER.error(
"Received call to %s without attribute %s",
SERVICE_SET_OPERATION_MODE, ATTR_OPERATION_MODE)
return
for hvac in target_hvacs:
hvac.set_operation_mode(operation_mode)
if hvac.should_poll:
hvac.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_OPERATION_MODE, operation_set_service,
descriptions.get(SERVICE_SET_OPERATION_MODE))
def swing_set_service(service):
"""Set swing mode on the target hvacs."""
target_hvacs = component.extract_from_service(service)
swing_mode = service.data.get(ATTR_SWING_MODE)
if swing_mode is None:
_LOGGER.error(
"Received call to %s without attribute %s",
SERVICE_SET_SWING_MODE, ATTR_SWING_MODE)
return
for hvac in target_hvacs:
hvac.set_swing_mode(swing_mode)
if hvac.should_poll:
hvac.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_SWING_MODE, swing_set_service,
descriptions.get(SERVICE_SET_SWING_MODE))
return True
class HvacDevice(Entity):
"""Representation of a hvac."""
# pylint: disable=too-many-public-methods,no-self-use
@property
def state(self):
"""Return the current state."""
return self.current_operation or STATE_UNKNOWN
@property
def state_attributes(self):
"""Return the optional state attributes."""
data = {
ATTR_CURRENT_TEMPERATURE:
self._convert_for_display(self.current_temperature),
ATTR_MIN_TEMP: self._convert_for_display(self.min_temp),
ATTR_MAX_TEMP: self._convert_for_display(self.max_temp),
ATTR_TEMPERATURE:
self._convert_for_display(self.target_temperature),
}
humidity = self.target_humidity
if humidity is not None:
data[ATTR_HUMIDITY] = humidity
data[ATTR_CURRENT_HUMIDITY] = self.current_humidity
data[ATTR_MIN_HUMIDITY] = self.min_humidity
data[ATTR_MAX_HUMIDITY] = self.max_humidity
fan_mode = self.current_fan_mode
if fan_mode is not None:
data[ATTR_FAN_MODE] = fan_mode
data[ATTR_FAN_LIST] = self.fan_list
operation_mode = self.current_operation
if operation_mode is not None:
data[ATTR_OPERATION_MODE] = operation_mode
data[ATTR_OPERATION_LIST] = self.operation_list
swing_mode = self.current_swing_mode
if swing_mode is not None:
data[ATTR_SWING_MODE] = swing_mode
data[ATTR_SWING_LIST] = self.swing_list
is_away = self.is_away_mode_on
if is_away is not None:
data[ATTR_AWAY_MODE] = STATE_ON if is_away else STATE_OFF
is_aux_heat = self.is_aux_heat_on
if is_aux_heat is not None:
data[ATTR_AUX_HEAT] = STATE_ON if is_aux_heat else STATE_OFF
return data
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
raise NotImplementedError
@property
def current_humidity(self):
"""Return the current humidity."""
return None
@property
def target_humidity(self):
"""Return the humidity we try to reach."""
return None
@property
def current_operation(self):
"""Return current operation ie. heat, cool, idle."""
return None
@property
def operation_list(self):
"""List of available operation modes."""
return None
@property
def current_temperature(self):
"""Return the current temperature."""
return None
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
raise NotImplementedError
@property
def is_away_mode_on(self):
"""Return true if away mode is on."""
return None
@property
def is_aux_heat_on(self):
"""Return true if away mode is on."""
return None
@property
def current_fan_mode(self):
"""Return the fan setting."""
return None
@property
def fan_list(self):
"""List of available fan modes."""
return None
@property
def current_swing_mode(self):
"""Return the fan setting."""
return None
@property
def swing_list(self):
"""List of available swing modes."""
return None
def set_temperature(self, temperature):
"""Set new target temperature."""
pass
def set_humidity(self, humidity):
"""Set new target humidity."""
pass
def set_fan_mode(self, fan):
"""Set new target fan mode."""
pass
def set_operation_mode(self, operation_mode):
"""Set new target operation mode."""
pass
def set_swing_mode(self, swing_mode):
"""Set new target swing operation."""
pass
def turn_away_mode_on(self):
"""Turn away mode on."""
pass
def turn_away_mode_off(self):
"""Turn away mode off."""
pass
def turn_aux_heat_on(self):
"""Turn auxillary heater on."""
pass
def turn_aux_heat_off(self):
"""Turn auxillary heater off."""
pass
@property
def min_temp(self):
"""Return the minimum temperature."""
return convert(19, TEMP_CELCIUS, self.unit_of_measurement)
@property
def max_temp(self):
"""Return the maximum temperature."""
return convert(30, TEMP_CELCIUS, self.unit_of_measurement)
@property
def min_humidity(self):
"""Return the minimum humidity."""
return 30
@property
def max_humidity(self):
"""Return the maximum humidity."""
return 99
def _convert_for_display(self, temp):
"""Convert temperature into preferred units for display purposes."""
if temp is None:
return None
value = convert(temp, self.unit_of_measurement,
self.hass.config.temperature_unit)
if self.hass.config.temperature_unit is TEMP_CELCIUS:
decimal_count = 1
else:
# Users of fahrenheit generally expect integer units.
decimal_count = 0
return round(value, decimal_count)
| sffjunkie/home-assistant | homeassistant/components/hvac/__init__.py | Python | mit | 14,456 |
# hello_asyncio.py
import asyncio
import tornado.ioloop
import tornado.web
import tornado.gen
from tornado.httpclient import AsyncHTTPClient
try:
import aioredis
except ImportError:
print("Please install aioredis: pip install aioredis")
exit(0)
class AsyncRequestHandler(tornado.web.RequestHandler):
"""Base class for request handlers with `asyncio` coroutines support.
It runs methods on Tornado's ``AsyncIOMainLoop`` instance.
Subclasses have to implement one of `get_async()`, `post_async()`, etc.
Asynchronous method should be decorated with `@asyncio.coroutine`.
Usage example::
class MyAsyncRequestHandler(AsyncRequestHandler):
@asyncio.coroutine
def get_async(self):
html = yield from self.application.http.get('http://python.org')
self.write({'html': html})
You may also just re-define `get()` or `post()` methods and they will be simply run
synchronously. This may be convinient for draft implementation, i.e. for testing
new libs or concepts.
"""
@tornado.gen.coroutine
def get(self, *args, **kwargs):
"""Handle GET request asyncronously, delegates to
``self.get_async()`` coroutine.
"""
yield self._run_method('get', *args, **kwargs)
@tornado.gen.coroutine
def post(self, *args, **kwargs):
"""Handle POST request asyncronously, delegates to
``self.post_async()`` coroutine.
"""
yield self._run_method('post', *args, **kwargs)
@asyncio.coroutine
def _run_async(self, coroutine, future_, *args, **kwargs):
"""Perform coroutine and set result to ``Future`` object."""
try:
result = yield from coroutine(*args, **kwargs)
future_.set_result(result)
except Exception as e:
future_.set_exception(e)
print(traceback.format_exc())
def _run_method(self, method_, *args, **kwargs):
"""Run ``get_async()`` / ``post_async()`` / etc. coroutine
wrapping result with ``tornado.concurrent.Future`` for
compatibility with ``gen.coroutine``.
"""
coroutine = getattr(self, '%s_async' % method_, None)
if not coroutine:
raise tornado.web.HTTPError(405)
future_ = tornado.concurrent.Future()
asyncio.async(
self._run_async(coroutine, future_, *args, **kwargs)
)
return future_
class MainHandler(AsyncRequestHandler):
@asyncio.coroutine
def get_async(self):
redis = self.application.redis
yield from redis.set('my-key', 'OK')
val = yield from redis.get('my-key')
self.write('Hello asyncio.coroutine: %s' % val)
class Application(tornado.web.Application):
def __init__(self):
# Prepare IOLoop class to run instances on asyncio
tornado.ioloop.IOLoop.configure('tornado.platform.asyncio.AsyncIOMainLoop')
handlers = [
(r"/", MainHandler),
]
super().__init__(handlers, debug=True)
def init_with_loop(self, loop):
self.redis = loop.run_until_complete(
aioredis.create_redis(('localhost', 6379), loop=loop)
)
if __name__ == "__main__":
print("Run hello_asyncio ... http://127.0.0.1:8888")
application = Application()
application.listen(8888)
loop = asyncio.get_event_loop()
application.init_with_loop(loop)
loop.run_forever()
| rudyryk/python-samples | hello_tornado/hello_asyncio.py | Python | cc0-1.0 | 3,463 |
#40/40
#Part 1: Terminology (15 points) --> 15/15
#1 1pt) What is the symbol "=" used for?
#to assign and store values to and in variables
# 1pt
#
#2 3pts) Write a technical definition for 'function'
#a named sequence of calculations which takes input and returns output
# 3pts
#
#3 1pt) What does the keyword "return" do?
#it gives back the output or result of the function
# 1pt
#
#4 5pts) We know 5 basic data types. Write the name for each one and provide two
# examples of each below
# 1: integer ex: 1, 2
# 2: floating point ex: 1.2, 1.3
# 3: string ex: "hi", "hello"
# 4: boolean ex: True, False
# 5: tuple ex: ("HEllo", 3), ("Bob", 10, "fat")
# 5pts
#
#5 2pts) What is the difference between a "function definition" and a
# "function call"?
#a function definition does not result in any output being presented, it simply defines a set of calculations which are run if and only if they are called by a function call
# 2pts
#
#
#6 3pts) What are the 3 phases that every computer program has? What happens in
# each of them
# 1:input (the program takes some input values, most often from the user)
# 2:processing (the program does something with those input values to for instance calculate something)
# 3:output (the program returns the product of its labours (processing) often a something printed
# 3pts
#
#Part 2: Programming (25 points) --> 25/25
#Write a program that asks the user for the areas of 3 circles.
#It should then calculate the diameter of each and the sum of the diameters
#of the 3 circles.
#Finally, it should produce output like this:
#Circle Diameter
#c1 ...
#c2 ...
#c3 ...
#TOTALS ...
# Hint: Radius is the square root of the area divided by pi (a = pi(r)^2) so r = sqrt(a/pi)
import math
#1 pt for header line
#3 pt for correct formula
#1 pt for return value
#1 pt for parameter name
#1 pt for function name
def circarea_to_diameter(circarea):
return 2 * (math.sqrt(circarea/math.pi)) #finds radius and multiples by 2 to get diameter
def sum_three(x, y, z): #takes three values and adds them
return x + y + z
#1pt for header line
#1pt for parameter names
#1pt for return value
#1pt for correct output format
#3pt for correct use of format function
def output(d1, d2, d3, total):
return """
Circle Diameter
C1 {}
C2 {}
C3 {}
Totals {}
""".format(d1, d2, d3, total)
#1pt header line
#1pt getting input
#1pt converting input
#1pt for calling output function
#2pt for correct diameter formula
#1pt for variable names
def main():
#input
C1 = raw_input("Area of C1: ")
C2 = raw_input("Area of C2: ")
C3 = raw_input("Area of C3: ")
#processing
d1 = circarea_to_diameter(float(C1))
d2 = circarea_to_diameter(float(C2))
d3 = circarea_to_diameter(float(C3))
total = sum_three(d1, d2, d3)
#output
print output(d1, d2, d3, total)
#1pt for calling main
main()
#1pt explanatory comments
#1pt code format
| ieuan1630-cmis/ieuan1630-cmis-cs2 | cs2quiz1.py | Python | cc0-1.0 | 2,934 |
import astropy.io.fits as fits
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as p
import numpy as n
import os
import sys
from scipy.stats import scoreatpercentile as sc
from scipy.interpolate import interp1d
survey = sys.argv[1]
z_min, z_max = 0., 1.6
imfs = ["Chabrier_ELODIE_", "Chabrier_MILES_", "Chabrier_STELIB_", "Kroupa_ELODIE_", "Kroupa_MILES_", "Kroupa_STELIB_", "Salpeter_ELODIE_", "Salpeter_MILES_", "Salpeter_STELIB_" ]
z_bins = n.array([0, 0.025, 0.375, 0.7, 0.85, 1.6])
key_SNR = 'SNR_ALL'
SNR_keys = n.array([ 'SNR_32_35', 'SNR_35_39', 'SNR_39_41', 'SNR_41_55', 'SNR_55_68', 'SNR_68_74', 'SNR_74_93' ])
SNR_w_min = n.array([ 32, 35, 39, 41, 55, 68, 74 ])
SNR_w_max = n.array([ 35, 39, 41, 55, 68, 74, 93 ])
wl_40 = ((z_bins[1:]+z_bins[:-1]) * 0.5 + 1)*40.
snr_ids = n.searchsorted(SNR_w_max, wl_40)
print(SNR_keys[snr_ids])
out_dir = os.path.join(os.environ['OBS_REPO'], 'spm', 'results')
#path_2_MAG_cat = os.path.join( os.environ['HOME'], 'SDSS', "dr14_specphot_gri.fits" )
#hd = fits.open(path_2_MAG_cat)
#path_2_sdss_cat = os.path.join( os.environ['HOME'], 'SDSS', '26', 'catalogs', "FireFly.fits" )
#path_2_eboss_cat = os.path.join( os.environ['HOME'], 'SDSS', 'v5_10_0', 'catalogs', "FireFly.fits" )
path_2_sdss_cat = os.path.join( os.environ['OBS_REPO'], 'SDSS', '26', 'catalogs', "FireFly.fits" )
path_2_eboss_cat = os.path.join( os.environ['OBS_REPO'], 'SDSS', 'v5_10_0', 'catalogs', "FireFly.fits" )
# OPENS THE CATALOGS
print("Loads catalog")
if survey =='deep2':
deep2_dir = os.path.join(os.environ['OBS_REPO'], 'DEEP2')
path_2_deep2_cat = os.path.join( deep2_dir, "zcat.deep2.dr4.v4.LFcatalogTC.Planck13.spm.fits" )
catalog = fits.open(path_2_deep2_cat)[1].data
if survey =='sdss':
catalog = fits.open(path_2_sdss_cat)[1].data
z_name, z_err_name, class_name, zwarning = 'Z', 'Z_ERR', 'CLASS', 'ZWARNING'
if survey =='boss':
catalog = fits.open(path_2_eboss_cat)[1].data
z_name, z_err_name, class_name, zwarning = 'Z_NOQSO', 'Z_ERR_NOQSO', 'CLASS_NOQSO', 'ZWARNING_NOQSO'
IMF = imfs[0]
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
print(IMF, prf)
name, zflg_val, prefix = prf, 0., IMF
catalog_0 = (catalog[z_err_name] > 0.) & (catalog[z_name] > catalog[z_err_name]) & (catalog[class_name]=='GALAXY') & (catalog[zwarning]==zflg_val) & (catalog[z_name] > z_min) & (catalog[z_name] < z_max)
catalog_zOk = catalog_0 & (catalog['SNR_ALL']>0)
converged = (catalog_zOk)&(catalog[prefix+'stellar_mass'] < 10**13. ) & (catalog[prefix+'stellar_mass'] > 10**4 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] )
dex04 = (converged) & (catalog[prefix+'stellar_mass'] < 10**14. ) & (catalog[prefix+'stellar_mass'] > 0 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] ) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.8 )
dex02 = (dex04) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.4 )
#target_bits
program_names = n.array(list(set( catalog['PROGRAMNAME'] )))
program_names.sort()
sourcetypes = n.array(list(set( catalog['SOURCETYPE'] )))
sourcetypes.sort()
length = lambda selection : len(selection.nonzero()[0])
pcs_ref = list(n.arange(0., 101, 5))
g = lambda key, s1, pcs = pcs_ref : n.hstack(( length(s1), sc(catalog[key][s1], pcs) ))
sel_pg = lambda pgr : (catalog_zOk) & (catalog['PROGRAMNAME']==pgr)
sel_st = lambda pgr : (catalog_zOk) & (catalog['SOURCETYPE']==pgr)
sel0_pg = lambda pgr : (catalog_0) & (catalog['PROGRAMNAME']==pgr)
sel0_st = lambda pgr : (catalog_0) & (catalog['SOURCETYPE']==pgr)
all_galaxies = []
tpps = []
for pg in sourcetypes:
sel_all = sel_st(pg)
n_all = length( sel_all )
if n_all > 100 :
#print(pg, n_all)
all_galaxies.append(n_all)
all_out = []
for z_Min, z_Max, snr_key in zip(z_bins[:-1], z_bins[1:], SNR_keys[snr_ids]):
s_z = sel_all &(catalog[z_name] >= z_Min) & (catalog[z_name] < z_Max)
n_z = length(s_z)
#print(z_Min, z_Max, n_z)
if n_z > 0 :
#print(n.min(catalog[snr_key][s_z]), n.max(catalog[snr_key][s_z]))
itp = interp1d(sc(catalog[snr_key][s_z], pcs_ref), pcs_ref, kind='linear', fill_value= 100., bounds_error=False)
#print(itp.x, itp.y)
all_out.append( [n_z, itp(5), itp(20)] )
else :
all_out.append([0., -1, -1])
all_out = n.hstack((all_out))
tpp = pg + " & " + str(int(n_all)) + " & " + " & ".join(n.array([ str(int(el)) for el in all_out]) ) + ' \\\\ \n'
print( tpp)
tpps.append(tpp)
all_galaxies = n.array(all_galaxies)
tpps = n.array(tpps)
ids = n.argsort(all_galaxies)[::-1]
out_file = os.path.join(os.environ['OBS_REPO'], 'spm', 'results', "table_comp_"+survey+"_snr_all_sourcetype_SNR_moments.tex")
f=open(out_file, 'w')
#f.write('source type & N & \multicolumn{c}{2}{N galaxies} && \multicolumn{c}{2}{SNR ALL$>0$} & \\multicolumn{c}{2}{frefly converged} & \multicolumn{c}{2}{$\sigma_{\log_M}<0.4$} & \multicolumn{c}{2}{$\sigma_{\log_M}<0.2$} \\\\ \n')
#f.write(' & & N & % & & N & % & N & % & N & % \\\\ \n')
for jj in ids :
f.write( tpps[jj] )
f.close()
sys.exit()
#converged = (catalog_zOk)&(catalog[prefix+'stellar_mass'] < 10**13. ) & (catalog[prefix+'stellar_mass'] > 10**4 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] )
#dex04 = (converged) & (catalog[prefix+'stellar_mass'] < 10**14. ) & (catalog[prefix+'stellar_mass'] > 0 ) & (catalog[prefix+'stellar_mass'] > catalog[prefix+'stellar_mass_low_1sig'] ) & (catalog[prefix+'stellar_mass'] < catalog[prefix+'stellar_mass_up_1sig'] ) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.8 )
#dex02 = (dex04) & ( - n.log10(catalog[prefix+'stellar_mass_low_1sig']) + n.log10(catalog[prefix+'stellar_mass_up_1sig']) < 0.4 )
#m_catalog = n.log10(catalog[prefix+'stellar_mass'])
#w_catalog = n.ones_like(catalog[prefix+'stellar_mass'])
#print(ld(catalog_zOk))
#return name + " & $"+ sld(converged)+"$ ("+str(n.round(ld(converged)/ld(catalog_zOk)*100.,1))+") & $"+ sld(dex04)+"$ ("+str(n.round(ld(dex04)/ld(catalog_zOk)*100.,1))+") & $"+ sld(dex02)+ "$ ("+str(n.round(ld(dex02)/ld(catalog_zOk)*100.,1))+r") \\\\"
##return catalog_sel, m_catalog, w_catalog
sys.exit()
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_deep2(deep2, 'ZBEST', 'ZQUALITY', prf, 2., IMF, o2=False)
f.write(l2w + " \n")
f.write('\\hline \n')
#l2w = get_basic_stat_DR12(boss_12_portSF_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Star-Forming & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(boss_12_portPA_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Passive & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(boss_12_portSF_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Star-Forming & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(boss_12_portPA_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Passive & BOSS & 12 ', 0.)
#f.write(l2w + " \n")
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_firefly_DR14(boss, 'Z_NOQSO', 'Z_ERR_NOQSO', 'CLASS_NOQSO', 'ZWARNING_NOQSO', prf, 0., IMF)
f.write(l2w + " \n")
f.write('\\hline \n')
#l2w = get_basic_stat_DR12(sdss_12_portSF_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Star-Forming & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(sdss_12_portPA_kr, 'Z', 'Z_ERR', 'Portsmouth Kroupa Passive & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(sdss_12_portSF_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Star-Forming & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
#l2w = get_basic_stat_DR12(sdss_12_portPA_sa, 'Z', 'Z_ERR', 'Portsmouth Salpeter Passive & SDSS & 12 ', 0.)
#f.write(l2w + " \n")
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_firefly_DR14(sdss, 'Z', 'Z_ERR', 'CLASS', 'ZWARNING', prf, 0., IMF)
f.write(l2w + " \n")
f.write('\\hline \n')
f.close()
#"""
out_file = os.path.join(os.environ['OBS_REPO'], 'spm', 'results', "table_2_r.tex")
f=open(out_file, 'w')
for IMF in imfs :
prf = IMF.split('_')[0]+' & '+IMF.split('_')[1]
l2w = get_basic_stat_deep2(deep2, 'ZBEST', 'ZQUALITY', prf, 2., IMF, o2=True)
f.write(l2w + " \n")
f.close()
| JohanComparat/pySU | spm/bin_SMF/create_table_snr.py | Python | cc0-1.0 | 8,566 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Station'
db.create_table('vcapp_station', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('station_name', self.gf('django.db.models.fields.CharField')(max_length=50)),
('lon', self.gf('django.db.models.fields.FloatField')()),
('lat', self.gf('django.db.models.fields.FloatField')()),
))
db.send_create_signal('vcapp', ['Station'])
# Adding model 'Line'
db.create_table('vcapp_line', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('line_name', self.gf('django.db.models.fields.CharField')(max_length=50)),
))
db.send_create_signal('vcapp', ['Line'])
# Adding model 'Trip'
db.create_table('vcapp_trip', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('timetable_type', self.gf('django.db.models.fields.CharField')(max_length=2)),
('line', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Line'])),
))
db.send_create_signal('vcapp', ['Trip'])
# Adding model 'TripStop'
db.create_table('vcapp_tripstop', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('departure_time', self.gf('django.db.models.fields.TimeField')()),
('trip', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Trip'])),
('station', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Station'])),
))
db.send_create_signal('vcapp', ['TripStop'])
# Adding model 'Segment'
db.create_table('vcapp_segment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('departure_tripstop', self.gf('django.db.models.fields.related.ForeignKey')(related_name='departure_point', to=orm['vcapp.TripStop'])),
('arrival_tripstop', self.gf('django.db.models.fields.related.ForeignKey')(related_name='arrival_point', to=orm['vcapp.TripStop'])),
('trip', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Trip'])),
))
db.send_create_signal('vcapp', ['Segment'])
# Adding model 'InterchangeStation'
db.create_table('vcapp_interchangestation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('line', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Line'])),
('station', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Station'])),
))
db.send_create_signal('vcapp', ['InterchangeStation'])
def backwards(self, orm):
# Deleting model 'Station'
db.delete_table('vcapp_station')
# Deleting model 'Line'
db.delete_table('vcapp_line')
# Deleting model 'Trip'
db.delete_table('vcapp_trip')
# Deleting model 'TripStop'
db.delete_table('vcapp_tripstop')
# Deleting model 'Segment'
db.delete_table('vcapp_segment')
# Deleting model 'InterchangeStation'
db.delete_table('vcapp_interchangestation')
models = {
'vcapp.interchangestation': {
'Meta': {'object_name': 'InterchangeStation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Line']"}),
'station': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Station']"})
},
'vcapp.line': {
'Meta': {'object_name': 'Line'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'vcapp.segment': {
'Meta': {'object_name': 'Segment'},
'arrival_tripstop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'arrival_point'", 'to': "orm['vcapp.TripStop']"}),
'departure_tripstop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'departure_point'", 'to': "orm['vcapp.TripStop']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Trip']"})
},
'vcapp.station': {
'Meta': {'object_name': 'Station'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.FloatField', [], {}),
'lon': ('django.db.models.fields.FloatField', [], {}),
'station_name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'vcapp.trip': {
'Meta': {'object_name': 'Trip'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Line']"}),
'timetable_type': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
'vcapp.tripstop': {
'Meta': {'object_name': 'TripStop'},
'departure_time': ('django.db.models.fields.TimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'station': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Station']"}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Trip']"})
}
}
complete_apps = ['vcapp'] | edwinsteele/visual-commute | vcapp/migrations/0002_initial.py | Python | cc0-1.0 | 5,957 |
parts = (('house', 'Jack built'),
('malt', 'lay in'),
('rat', 'ate'),
('cat', 'killed'),
('dog', 'worried'),
('cow with the crumpled horn', 'tossed'),
('maiden all forlorn', 'milked'),
('man all tattered and torn', 'kissed'),
('priest all shaven and shorn', 'married'),
('rooster that crowed in the morn', 'woke'),
('farmer sowing his corn', 'kept'),
('horse and the hound and the horn', 'belonged to'))
def verse(n):
return '{}\nthat {}'.format(parts[n][0],parts[n][1]) if n != 0 else '{} that {}'.format(parts[n][0],parts[n][1])
def rhymes(v = 11):
if v == 0:
return verse(v)
else:
return verse(v) + ' the ' + rhymes(v-1)
def rhyme():
return '\n'.join([ 'This is the ' + rhymes(v) + '.\n' for v in range(12) ])[:-1]
| Winawer/exercism | python/house/house.py | Python | cc0-1.0 | 886 |
#!/usr/bin/env python
# 8 band Audio equaliser from wav file
# import alsaaudio as aa
# import smbus
from struct import unpack
import numpy as np
import wave
from time import sleep
import sys
ADDR = 0x20 #The I2C address of MCP23017
DIRA = 0x00 #PortA I/O direction, by pin. 0=output, 1=input
DIRB = 0x01 #PortB I/O direction, by pin. 0=output, 1=input
BANKA = 0x12 #Register address for Bank A
BANKB = 0x13 #Register address for Bank B
# bus=smbus.SMBus(0) #Use '1' for newer Pi boards;
# #Set up the 23017 for 16 output pins
# bus.write_byte_data(ADDR, DIRA, 0); #all zeros = all outputs on Bank A
# bus.write_byte_data(ADDR, DIRB, 0); #all zeros = all outputs on Bank B
# def TurnOffLEDS ():
# bus.write_byte_data(ADDR, BANKA, 0xFF) #set all columns high
# bus.write_byte_data(ADDR, BANKB, 0x00) #set all rows low
# def Set_Column(row, col):
# bus.write_byte_data(ADDR, BANKA, col)
# bus.write_byte_data(ADDR, BANKB, row)
# # Initialise matrix
# TurnOffLEDS()
matrix = [0,0,0,0,0,0,0,0]
power = []
# weighting = [2,2,8,8,16,32,64,64] # Change these according to taste
weighting = [2,2,2,2,4,4,8,8] # Change these according to taste
# Set up audio
#wavfile = wave.open('test_stereo_16000Hz_16bit_PCM.wav','r')
#wavfile = wave.open('Media-Convert_test5_PCM_Stereo_VBR_8SS_44100Hz.wav','r')
wavfile = wave.open('Media-Convert_test2_PCM_Mono_VBR_8SS_48000Hz.wav','r')
sample_rate = wavfile.getframerate()
no_channels = wavfile.getnchannels()
chunk = 4096 # Use a multiple of 8
# output = aa.PCM(aa.PCM_PLAYBACK, aa.PCM_NORMAL)
# output.setchannels(no_channels)
# output.setrate(sample_rate)
# output.setformat(aa.PCM_FORMAT_S16_LE)
# output.setperiodsize(chunk)
# Return power array index corresponding to a particular frequency
def piff(val):
return int(2*chunk*val/sample_rate)
def print_intensity(matrix):
levelFull = "||||||||";
levelEmpty = " ";
levelStr = "";
for level in matrix:
#level = 0;
levelStr += levelFull[0: level] + levelEmpty [0:8-(level)] + " ";
sys.stdout.write("\rlevel: " + levelStr);
sys.stdout.flush();
def calculate_levels(data, chunk, sample_rate):
#print ("[calculate_levels] chunk=%s, sample_rate: %s, len(data)=%s" % (chunk, sample_rate, len(data)));
if(len(data) != chunk):
print ("\n[calculate_levels] skiping: chunk=%s != len(data)=%s" % (chunk, len(data)));
return None;
global matrix
# Convert raw data (ASCII string) to numpy array
data = unpack("%dh"%(len(data)/2),data)
data = np.array(data, dtype='h')
# Apply FFT - real data
fourier=np.fft.rfft(data)
# Remove last element in array to make it the same size as chunk
fourier=np.delete(fourier,len(fourier)-1)
# Find average 'amplitude' for specific frequency ranges in Hz
power = np.abs(fourier)
matrix[0]= int(np.mean(power[piff(0) :piff(156):1]))
matrix[1]= int(np.mean(power[piff(156) :piff(313):1]))
matrix[2]= int(np.mean(power[piff(313) :piff(625):1]))
matrix[3]= int(np.mean(power[piff(625) :piff(1250):1]))
matrix[4]= int(np.mean(power[piff(1250) :piff(2500):1]))
matrix[5]= int(np.mean(power[piff(2500) :piff(5000):1]))
matrix[6]= int(np.mean(power[piff(5000) :piff(10000):1]))
# Produces error, I guess to low sampling rate of the audio file
# matrix[7]= int(np.mean(power[piff(10000):piff(20000):1]))
# Tidy up column values for the LED matrix
matrix=np.divide(np.multiply(matrix,weighting),1000000)
# Set floor at 0 and ceiling at 8 for LED matrix
matrix=matrix.clip(0,8)
return matrix
# Process audio file
print "Processing....."
data = wavfile.readframes(chunk)
while data != '':
# output.write(data)
matrix = calculate_levels(data, chunk,sample_rate)
if matrix == None:
next;
print_intensity(matrix);
# for i in range (0,8):
# Set_Column((1<<matrix[i])-1,0xFF^(1<<i))
sleep(0.1);
data = wavfile.readframes(chunk)
# TurnOffLEDS()
# =========================
| mprinc/FeelTheSound | src/PoC/fft.py | Python | cc0-1.0 | 4,053 |
try:
from code import InteractiveConsole
except ImportError:
from pydevconsole_code_for_ironpython import InteractiveConsole
import os
import sys
try:
False
True
except NameError: # version < 2.3 -- didn't have the True/False builtins
import __builtin__
setattr(__builtin__, 'True', 1) # Python 3.0 does not accept __builtin__.True = 1 in its syntax
setattr(__builtin__, 'False', 0)
from pydev_console_utils import BaseStdIn, StdIn, BaseInterpreterInterface
try:
class ExecState:
FIRST_CALL = True
PYDEV_CONSOLE_RUN_IN_UI = False # Defines if we should run commands in the UI thread.
from org.python.pydev.core.uiutils import RunInUiThread # @UnresolvedImport
from java.lang import Runnable # @UnresolvedImport
class Command(Runnable):
def __init__(self, interpreter, line):
self.interpreter = interpreter
self.line = line
def run(self):
if ExecState.FIRST_CALL:
ExecState.FIRST_CALL = False
sys.stdout.write('\nYou are now in a console within Eclipse.\nUse it with care as it can halt the VM.\n')
sys.stdout.write('Typing a line with "PYDEV_CONSOLE_TOGGLE_RUN_IN_UI"\nwill start executing all the commands in the UI thread.\n\n')
if self.line == 'PYDEV_CONSOLE_TOGGLE_RUN_IN_UI':
ExecState.PYDEV_CONSOLE_RUN_IN_UI = not ExecState.PYDEV_CONSOLE_RUN_IN_UI
if ExecState.PYDEV_CONSOLE_RUN_IN_UI:
sys.stdout.write('Running commands in UI mode. WARNING: using sys.stdin (i.e.: calling raw_input()) WILL HALT ECLIPSE.\n')
else:
sys.stdout.write('No longer running commands in UI mode.\n')
self.more = False
else:
self.more = self.interpreter.push(self.line)
def Sync(runnable):
if ExecState.PYDEV_CONSOLE_RUN_IN_UI:
return RunInUiThread.sync(runnable)
else:
return runnable.run()
except:
# If things are not there, define a way in which there's no 'real' sync, only the default execution.
class Command:
def __init__(self, interpreter, line):
self.interpreter = interpreter
self.line = line
def run(self):
self.more = self.interpreter.push(self.line)
def Sync(runnable):
runnable.run()
try:
try:
execfile # Not in Py3k
except NameError:
from pydev_imports import execfile
import builtins # @UnresolvedImport -- only Py3K
builtins.execfile = execfile
except:
pass
# Pull in runfile, the interface to UMD that wraps execfile
from pydev_umd import runfile, _set_globals_function
try:
import builtins
builtins.runfile = runfile
except:
import __builtin__
__builtin__.runfile = runfile
#=======================================================================================================================
# InterpreterInterface
#=======================================================================================================================
class InterpreterInterface(BaseInterpreterInterface):
'''
The methods in this class should be registered in the xml-rpc server.
'''
def __init__(self, host, client_port, server):
BaseInterpreterInterface.__init__(self, server)
self.client_port = client_port
self.host = host
try:
import pydevd # @UnresolvedImport
if pydevd.GetGlobalDebugger() is None:
raise RuntimeError() # Work as if the debugger does not exist as it's not connected.
except:
self.namespace = globals()
else:
# Adapted from the code in pydevd
# patch provided by: Scott Schlesier - when script is run, it does not
# pretend pydevconsole is not the main module, and
# convince the file to be debugged that it was loaded as main
sys.modules['pydevconsole'] = sys.modules['__main__']
sys.modules['pydevconsole'].__name__ = 'pydevconsole'
from imp import new_module
m = new_module('__main__')
sys.modules['__main__'] = m
ns = m.__dict__
try:
ns['__builtins__'] = __builtins__
except NameError:
pass # Not there on Jython...
self.namespace = ns
self.interpreter = InteractiveConsole(self.namespace)
self._input_error_printed = False
def doAddExec(self, line):
command = Command(self.interpreter, line)
Sync(command)
return command.more
def getNamespace(self):
return self.namespace
def getCompletions(self, text, act_tok):
try:
from _pydev_completer import Completer
completer = Completer(self.namespace, None)
return completer.complete(act_tok)
except:
import traceback;traceback.print_exc()
return []
def close(self):
sys.exit(0)
try:
from pydev_ipython_console import InterpreterInterface
except:
sys.stderr.write('PyDev console: using default backend (IPython not available).\n')
pass # IPython not available, proceed as usual.
#=======================================================================================================================
# _DoExit
#=======================================================================================================================
def _DoExit(*args):
'''
We have to override the exit because calling sys.exit will only actually exit the main thread,
and as we're in a Xml-rpc server, that won't work.
'''
try:
import java.lang.System
java.lang.System.exit(1)
except ImportError:
if len(args) == 1:
os._exit(args[0])
else:
os._exit(0)
#=======================================================================================================================
# StartServer
#=======================================================================================================================
def StartServer(host, port, client_port):
# replace exit (see comments on method)
# note that this does not work in jython!!! (sys method can't be replaced).
sys.exit = _DoExit
from _pydev_xmlrpc_hook import InputHookedXMLRPCServer
try:
server = InputHookedXMLRPCServer((host, port), logRequests=False)
interpreter = InterpreterInterface(host, client_port, server)
except:
sys.stderr.write('Error starting server with host: %s, port: %s, client_port: %s\n' % (host, port, client_port))
raise
# Tell UMD the proper default namespace
_set_globals_function(interpreter.getNamespace)
# Functions for basic protocol
server.register_function(interpreter.addExec)
server.register_function(interpreter.getCompletions)
server.register_function(interpreter.getDescription)
server.register_function(interpreter.close)
# Functions so that the console can work as a debugger (i.e.: variables view, expressions...)
server.register_function(interpreter.connectToDebugger)
server.register_function(interpreter.hello)
# Functions for GUI main loop integration
server.register_function(interpreter.enableGui)
server.serve_forever()
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
sys.stdin = BaseStdIn()
port, client_port = sys.argv[1:3]
import pydev_localhost
StartServer(pydev_localhost.get_localhost(), int(port), int(client_port))
| aptana/Pydev | bundles/org.python.pydev/pysrc/pydevconsole.py | Python | epl-1.0 | 7,910 |
# #
# Copyright 2012-2019 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Module with various utility functions
:author: Kenneth Hoste (Ghent University)
"""
import datetime
import glob
import os
import re
import sys
from string import digits
from easybuild.base import fancylogger
from easybuild.tools.build_log import EasyBuildError, print_msg
from easybuild.tools.config import build_option
from easybuild.tools.py2vs3 import ascii_letters, string_type
_log = fancylogger.getLogger('tools.utilities')
INDENT_2SPACES = ' ' * 2
INDENT_4SPACES = ' ' * 4
def flatten(lst):
"""Flatten a list of lists."""
res = []
for x in lst:
res.extend(x)
return res
def quote_str(val, escape_newline=False, prefer_single_quotes=False, tcl=False):
"""
Obtain a new value to be used in string replacement context.
For non-string values, it just returns the exact same value.
For string values, it tries to escape the string in quotes, e.g.,
foo becomes 'foo', foo'bar becomes "foo'bar",
foo'bar"baz becomes \"\"\"foo'bar"baz\"\"\", etc.
:param escape_newline: wrap strings that include a newline in triple quotes
:param prefer_single_quotes: if possible use single quotes
:param tcl: Boolean for whether we are quoting for Tcl syntax
"""
if isinstance(val, string_type):
# forced triple double quotes
if ("'" in val and '"' in val) or (escape_newline and '\n' in val):
return '"""%s"""' % val
# escape double quote(s) used in strings
elif '"' in val:
if tcl:
return '"%s"' % val.replace('"', '\\"')
else:
return "'%s'" % val
# if single quotes are preferred, use single quotes;
# unless a space or a single quote are in the string
elif prefer_single_quotes and "'" not in val and ' ' not in val:
return "'%s'" % val
# fallback on double quotes (required in tcl syntax)
else:
return '"%s"' % val
else:
return val
def quote_py_str(val):
"""Version of quote_str specific for generating use in Python context (e.g., easyconfig parameters)."""
return quote_str(val, escape_newline=True, prefer_single_quotes=True)
def shell_quote(token):
"""
Wrap provided token in single quotes (to escape space and characters with special meaning in a shell),
so it can be used in a shell command. This results in token that is not expanded/interpolated by the shell.
"""
# first, strip off double quotes that may wrap the entire value,
# we don't want to wrap single quotes around a double-quoted value
token = str(token).strip('"')
# escape any non-escaped single quotes, and wrap entire token in single quotes
return "'%s'" % re.sub(r"(?<!\\)'", r"\'", token)
def remove_unwanted_chars(inputstring):
"""Remove unwanted characters from the given string and return a copy
All non-letter and non-numeral characters are considered unwanted except for underscore ('_').
"""
return ''.join(c for c in inputstring if c in (ascii_letters + digits + '_'))
def import_available_modules(namespace):
"""
Import all available module in the specified namespace.
:param namespace: The namespace to import modules from.
"""
modules = []
for path in sys.path:
cand_modpath_glob = os.path.sep.join([path] + namespace.split('.') + ['*.py'])
# if sys.path entry being considered is the empty string
# (which corresponds to Python packages/modules in current working directory being considered),
# we need to strip off / from the start of the path
if path == '' and cand_modpath_glob.startswith(os.path.sep):
cand_modpath_glob = cand_modpath_glob.lstrip(os.path.sep)
for module in sorted(glob.glob(cand_modpath_glob)):
if not module.endswith('__init__.py'):
mod_name = module.split(os.path.sep)[-1].split('.')[0]
modpath = '.'.join([namespace, mod_name])
_log.debug("importing module %s", modpath)
try:
mod = __import__(modpath, globals(), locals(), [''])
except ImportError as err:
raise EasyBuildError("import_available_modules: Failed to import %s: %s", modpath, err)
if mod not in modules:
modules.append(mod)
return modules
def only_if_module_is_available(modnames, pkgname=None, url=None):
"""Decorator to guard functions/methods against missing required module with specified name."""
if pkgname and url is None:
url = 'https://pypi.python.org/pypi/%s' % pkgname
if isinstance(modnames, string_type):
modnames = (modnames,)
def wrap(orig):
"""Decorated function, raises ImportError if specified module is not available."""
try:
imported = None
for modname in modnames:
try:
__import__(modname)
imported = modname
break
except ImportError:
pass
if imported is None:
raise ImportError("None of the specified modules %s is available" % ', '.join(modnames))
else:
return orig
except ImportError as err:
# need to pass down 'err' via named argument to ensure it's in scope when using Python 3.x
def error(err=err, *args, **kwargs):
msg = "%s; required module '%s' is not available" % (err, modname)
if pkgname:
msg += " (provided by Python package %s, available from %s)" % (pkgname, url)
elif url:
msg += " (available from %s)" % url
raise EasyBuildError("ImportError: %s", msg)
return error
return wrap
def trace_msg(message, silent=False):
"""Print trace message."""
if build_option('trace'):
print_msg(' >> ' + message, prefix=False)
def nub(list_):
"""Returns the unique items of a list of hashables, while preserving order of
the original list, i.e. the first unique element encoutered is
retained.
Code is taken from
http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order
Supposedly, this is one of the fastest ways to determine the
unique elements of a list.
@type list_: a list :-)
:return: a new list with each element from `list` appearing only once (cfr. Michelle Dubois).
"""
seen = set()
seen_add = seen.add
return [x for x in list_ if x not in seen and not seen_add(x)]
def get_class_for(modulepath, class_name):
"""
Get class for a given Python class name and Python module path.
:param modulepath: Python module path (e.g., 'easybuild.base.generaloption')
:param class_name: Python class name (e.g., 'GeneralOption')
"""
# try to import specified module path, reraise ImportError if it occurs
try:
module = __import__(modulepath, globals(), locals(), [''])
except ImportError as err:
raise ImportError(err)
# try to import specified class name from specified module path, throw ImportError if this fails
try:
klass = getattr(module, class_name)
except AttributeError as err:
raise ImportError("Failed to import %s from %s: %s" % (class_name, modulepath, err))
return klass
def get_subclasses_dict(klass, include_base_class=False):
"""Get dict with subclasses per classes, recursively from the specified base class."""
res = {}
subclasses = klass.__subclasses__()
if include_base_class:
res.update({klass: subclasses})
for subclass in subclasses:
# always include base class for recursive call
res.update(get_subclasses_dict(subclass, include_base_class=True))
return res
def get_subclasses(klass, include_base_class=False):
"""Get list of all subclasses, recursively from the specified base class."""
return get_subclasses_dict(klass, include_base_class=include_base_class).keys()
def mk_rst_table(titles, columns):
"""
Returns an rst table with given titles and columns (a nested list of string columns for each column)
"""
title_cnt, col_cnt = len(titles), len(columns)
if title_cnt != col_cnt:
msg = "Number of titles/columns should be equal, found %d titles and %d columns" % (title_cnt, col_cnt)
raise ValueError(msg)
table = []
tmpl = []
line = []
# figure out column widths
for i, title in enumerate(titles):
width = max(map(len, columns[i] + [title]))
# make line template
tmpl.append('{%s:{c}<%s}' % (i, width))
line = [''] * col_cnt
line_tmpl = INDENT_4SPACES.join(tmpl)
table_line = line_tmpl.format(*line, c='=')
table.append(table_line)
table.append(line_tmpl.format(*titles, c=' '))
table.append(table_line)
for row in map(list, zip(*columns)):
table.append(line_tmpl.format(*row, c=' '))
table.extend([table_line, ''])
return table
def time2str(delta):
"""Return string representing provided datetime.timedelta value in human-readable form."""
res = None
if not isinstance(delta, datetime.timedelta):
raise EasyBuildError("Incorrect value type provided to time2str, should be datetime.timedelta: %s", type(delta))
delta_secs = delta.days * 3600 * 24 + delta.seconds + delta.microseconds / 10**6
if delta_secs < 60:
res = '%d sec' % int(delta_secs)
elif delta_secs < 3600:
mins = int(delta_secs / 60)
secs = int(delta_secs - (mins * 60))
res = '%d min %d sec' % (mins, secs)
else:
hours = int(delta_secs / 3600)
mins = int((delta_secs - hours * 3600) / 60)
secs = int(delta_secs - (hours * 3600) - (mins * 60))
hours_str = 'hours' if hours > 1 else 'hour'
res = '%d %s %d min %d sec' % (hours, hours_str, mins, secs)
return res
| gppezzi/easybuild-framework | easybuild/tools/utilities.py | Python | gpl-2.0 | 11,107 |
#rewrite of original calTimer to use qthreads as opposed to native python threads
#needed to make UI changes (impossible from native)
#also attempting to alleviate need for sigterm to stop perm loop
from PyQt4 import QtCore
import time,os,ctypes
import sys
class calTimer(QtCore.QThread):
xml_file = './data/data.xml'
fileSize = os.stat(xml_file)
def initFileSize(self):
print "initfilesize run"
fileToCheck = os.stat(self.xml_file)
self.fileSize = fileToCheck.st_size
def run(self):
self.initFileSize()
testFileSize = self.fileSize
while testFileSize == self.fileSize:
print "No change - sleep 3"
#time.sleep(3)
| CPSC491FileMaker/project | calTimer.QThread.py | Python | gpl-2.0 | 717 |
from time import localtime, time, strftime, mktime
from enigma import eServiceReference, eTimer, eServiceCenter, ePoint
from Screen import Screen
from Screens.HelpMenu import HelpableScreen
from Components.About import about
from Components.ActionMap import HelpableActionMap, HelpableNumberActionMap
from Components.Button import Button
from Components.config import config, configfile, ConfigClock
from Components.EpgList import EPGList, EPGBouquetList, TimelineText, EPG_TYPE_SINGLE, EPG_TYPE_SIMILAR, EPG_TYPE_MULTI, EPG_TYPE_ENHANCED, EPG_TYPE_INFOBAR, EPG_TYPE_INFOBARGRAPH, EPG_TYPE_GRAPH, MAX_TIMELINES
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.Sources.ServiceEvent import ServiceEvent
from Components.Sources.Event import Event
from Components.UsageConfig import preferredTimerPath
from Screens.TimerEdit import TimerSanityConflict
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.ChoiceBox import ChoiceBox
from Screens.MessageBox import MessageBox
from Screens.PictureInPicture import PictureInPicture
from Screens.Setup import Setup
from TimeDateInput import TimeDateInput
from RecordTimer import RecordTimerEntry, parseEvent, AFTEREVENT
from TimerEntry import TimerEntry, InstantRecordTimerEntry
from ServiceReference import ServiceReference
mepg_config_initialized = False
# PiPServiceRelation installed?
try:
from Plugins.SystemPlugins.PiPServiceRelation.plugin import getRelationDict
plugin_PiPServiceRelation_installed = True
except:
plugin_PiPServiceRelation_installed = False
class EPGSelection(Screen, HelpableScreen):
EMPTY = 0
ADD_TIMER = 1
REMOVE_TIMER = 2
ZAP = 1
def __init__(self, session, service = None, zapFunc = None, eventid = None, bouquetChangeCB=None, serviceChangeCB = None, EPGtype = None, StartBouquet = None, StartRef = None, bouquets = None):
Screen.__init__(self, session)
HelpableScreen.__init__(self)
self.zapFunc = zapFunc
self.serviceChangeCB = serviceChangeCB
self.bouquets = bouquets
graphic = False
if EPGtype == 'single':
self.type = EPG_TYPE_SINGLE
elif EPGtype == 'infobar':
self.type = EPG_TYPE_INFOBAR
elif EPGtype == 'enhanced':
self.type = EPG_TYPE_ENHANCED
elif EPGtype == 'graph':
self.type = EPG_TYPE_GRAPH
if config.epgselection.graph_type_mode.value == "graphics":
graphic = True
elif EPGtype == 'infobargraph':
self.type = EPG_TYPE_INFOBARGRAPH
if config.epgselection.infobar_type_mode.value == "graphics":
graphic = True
elif EPGtype == 'multi':
self.type = EPG_TYPE_MULTI
else:
self.type = EPG_TYPE_SIMILAR
if not self.type == EPG_TYPE_SINGLE:
self.StartBouquet = StartBouquet
self.StartRef = StartRef
self.servicelist = None
self.longbuttonpressed = False
self.ChoiceBoxDialog = None
self.ask_time = -1
self.closeRecursive = False
self.eventviewDialog = None
self.eventviewWasShown = False
self.currch = None
self.session.pipshown = False
self.cureventindex = None
if plugin_PiPServiceRelation_installed:
self.pipServiceRelation = getRelationDict()
else:
self.pipServiceRelation = {}
self.zapnumberstarted = False
self.NumberZapTimer = eTimer()
self.NumberZapTimer.callback.append(self.dozumberzap)
self.NumberZapField = None
self.CurrBouquet = None
self.CurrService = None
self["number"] = Label()
self["number"].hide()
self['Service'] = ServiceEvent()
self['Event'] = Event()
self['lab1'] = Label(_('Please wait while gathering data...'))
self.key_green_choice = self.EMPTY
self['key_red'] = Button(_('IMDb Search'))
self['key_green'] = Button(_('Add Timer'))
self['key_yellow'] = Button(_('EPG Search'))
self['key_blue'] = Button(_('Add AutoTimer'))
self['dialogactions'] = HelpableActionMap(self, 'WizardActions',
{
'back': (self.closeChoiceBoxDialog, _('Close dialog')),
}, -1)
self['dialogactions'].csel = self
self["dialogactions"].setEnabled(False)
self['okactions'] = HelpableActionMap(self, 'OkCancelActions',
{
'cancel': (self.closeScreen, _('Exit EPG')),
'OK': (self.OK, _('Zap to channel (setup in menu)')),
'OKLong': (self.OKLong, _('Zap to channel and close (setup in menu)'))
}, -1)
self['okactions'].csel = self
self['colouractions'] = HelpableActionMap(self, 'ColorActions',
{
'red': (self.redButtonPressed, _('IMDB search for current event')),
'redlong': (self.redlongButtonPressed, _('Sort EPG List')),
'green': (self.greenButtonPressed, _('Add/Remove timer for current event')),
'yellow': (self.yellowButtonPressed, _('Search for similar events')),
'greenlong': (self.showTimerList, _('Show Timer List')),
'blue': (self.blueButtonPressed, _('Add a auto timer for current event')),
'bluelong': (self.blueButtonPressedLong, _('Show AutoTimer List'))
}, -1)
self['colouractions'].csel = self
self['recordingactions'] = HelpableActionMap(self, 'InfobarInstantRecord',
{
'ShortRecord': (self.recButtonPressed, _('Add a record timer for current event')),
'LongRecord': (self.reclongButtonPressed, _('Add a zap timer for current event'))
}, -1)
self['recordingactions'].csel = self
if self.type == EPG_TYPE_SIMILAR:
self.currentService = service
self.eventid = eventid
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
elif self.type == EPG_TYPE_SINGLE:
self.currentService = ServiceReference(service)
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'info': (self.Info, _('Show detailed event info')),
'epg': (self.Info, _('Show detailed event info')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.prevPage, _('Move up a page')),
'right': (self.nextPage, _('Move down a page')),
'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
elif self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_ENHANCED:
if self.type == EPG_TYPE_INFOBAR:
self.skinName = 'QuickEPG'
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'nextBouquet': (self.nextBouquet, _('Goto next bouquet')),
'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')),
'nextService': (self.nextPage, _('Move down a page')),
'prevService': (self.prevPage, _('Move up a page')),
'input_date_time': (self.enterDateTime, _('Goto specific data/time')),
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.prevService, _('Goto previous channel')),
'right': (self.nextService, _('Goto next channel')),
'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
elif self.type == EPG_TYPE_ENHANCED:
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'nextBouquet': (self.nextBouquet, _('Goto next bouquet')),
'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')),
'nextService': (self.nextService, _('Goto next channel')),
'prevService': (self.prevService, _('Goto previous channel')),
'input_date_time': (self.enterDateTime, _('Goto specific data/time')),
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.prevPage, _('Move up a page')),
'right': (self.nextPage, _('Move down a page')),
'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
self['input_actions'] = HelpableNumberActionMap(self, 'NumberActions',
{
'0': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'1': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'2': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'3': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'4': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'5': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'6': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'7': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'8': (self.keyNumberGlobal, _('enter number to jump to channel.')),
'9': (self.keyNumberGlobal, _('enter number to jump to channel.'))
}, -1)
self['input_actions'].csel = self
self.list = []
self.servicelist = service
self.currentService = self.session.nav.getCurrentlyPlayingServiceOrGroup()
elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
if self.type == EPG_TYPE_GRAPH:
if not config.epgselection.graph_pig.value:
self.skinName = 'GraphicalEPG'
else:
self.skinName = 'GraphicalEPGPIG'
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.skinName = 'GraphicalInfoBarEPG'
now = time() - int(config.epg.histminutes.value) * 60
if self.type == EPG_TYPE_GRAPH:
self.ask_time = self.ask_time = now - now % (int(config.epgselection.graph_roundto.value) * 60)
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.ask_time = self.ask_time = now - now % (int(config.epgselection.infobar_roundto.value) * 60)
self.closeRecursive = False
self.bouquetlist_active = False
self['bouquetlist'] = EPGBouquetList(graphic=graphic)
self['bouquetlist'].hide()
self['timeline_text'] = TimelineText(type=self.type,graphic=graphic)
self['Event'] = Event()
self['primetime'] = Label(_('PRIMETIME'))
self['change_bouquet'] = Label(_('CHANGE BOUQUET'))
self['jump'] = Label(_('JUMP 24 HOURS'))
self['page'] = Label(_('PAGE UP/DOWN'))
self.time_lines = []
for x in range(0, MAX_TIMELINES):
pm = Pixmap()
self.time_lines.append(pm)
self['timeline%d' % x] = pm
self['timeline_now'] = Pixmap()
self.updateTimelineTimer = eTimer()
self.updateTimelineTimer.callback.append(self.moveTimeLines)
self.updateTimelineTimer.start(60000)
self['bouquetokactions'] = HelpableActionMap(self, 'OkCancelActions',
{
'cancel': (self.BouquetlistHide, _('Close bouquet list.')),
'OK': (self.BouquetOK, _('Change to bouquet')),
}, -1)
self['bouquetokactions'].csel = self
self["bouquetokactions"].setEnabled(False)
self['bouquetcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.moveBouquetPageUp, _('Goto previous event')),
'right': (self.moveBouquetPageDown, _('Goto next event')),
'up': (self.moveBouquetUp, _('Goto previous channel')),
'down': (self.moveBouquetDown, _('Goto next channel'))
}, -1)
self['bouquetcursoractions'].csel = self
self["bouquetcursoractions"].setEnabled(False)
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.leftPressed, _('Goto previous event')),
'right': (self.rightPressed, _('Goto next event')),
'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'nextService': (self.nextService, _('Jump forward 24 hours')),
'prevService': (self.prevService, _('Jump back 24 hours')),
'nextBouquet': (self.nextBouquet, _('Goto next bouquet')),
'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')),
'input_date_time': (self.enterDateTime, _('Goto specific data/time')),
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'tv': (self.Bouquetlist, _('Toggle between bouquet/epg lists')),
'tvlong': (self.togglePIG, _('Toggle Picture In Graphics')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
self['input_actions'] = HelpableNumberActionMap(self, 'NumberActions',
{
'1': (self.keyNumberGlobal, _('Reduce time scale')),
'2': (self.keyNumberGlobal, _('Page up')),
'3': (self.keyNumberGlobal, _('Increase time scale')),
'4': (self.keyNumberGlobal, _('page left')),
'5': (self.keyNumberGlobal, _('Jump to current time')),
'6': (self.keyNumberGlobal, _('Page right')),
'7': (self.keyNumberGlobal, _('No of items switch (increase or reduced)')),
'8': (self.keyNumberGlobal, _('Page down')),
'9': (self.keyNumberGlobal, _('Jump to prime time')),
'0': (self.keyNumberGlobal, _('Move to home of list'))
}, -1)
self['input_actions'].csel = self
elif self.type == EPG_TYPE_MULTI:
self.skinName = 'EPGSelectionMulti'
self['bouquetlist'] = EPGBouquetList(graphic=graphic)
self['bouquetlist'].hide()
self['now_button'] = Pixmap()
self['next_button'] = Pixmap()
self['more_button'] = Pixmap()
self['now_button_sel'] = Pixmap()
self['next_button_sel'] = Pixmap()
self['more_button_sel'] = Pixmap()
self['now_text'] = Label()
self['next_text'] = Label()
self['more_text'] = Label()
self['date'] = Label()
self.bouquetlist_active = False
self['bouquetokactions'] = HelpableActionMap(self, 'OkCancelActions',
{
'OK': (self.BouquetOK, _('Change to bouquet')),
}, -1)
self['bouquetokactions'].csel = self
self["bouquetokactions"].setEnabled(False)
self['bouquetcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.moveBouquetPageUp, _('Goto previous event')),
'right': (self.moveBouquetPageDown, _('Goto next event')),
'up': (self.moveBouquetUp, _('Goto previous channel')),
'down': (self.moveBouquetDown, _('Goto next channel'))
}, -1)
self['bouquetcursoractions'].csel = self
self['bouquetcursoractions'].setEnabled(False)
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.leftPressed, _('Goto previous event')),
'right': (self.rightPressed, _('Goto next event')),
'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'nextService': (self.nextPage, _('Move down a page')),
'prevService': (self.prevPage, _('Move up a page')),
'nextBouquet': (self.nextBouquet, _('Goto next bouquet')),
'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')),
'input_date_time': (self.enterDateTime, _('Goto specific data/time')),
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'tv': (self.Bouquetlist, _('Toggle between bouquet/epg lists')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
if self.type == EPG_TYPE_GRAPH:
time_epoch=int(config.epgselection.graph_prevtimeperiod.value)
elif self.type == EPG_TYPE_INFOBARGRAPH:
time_epoch=int(config.epgselection.infobar_prevtimeperiod.value)
else:
time_epoch=None
self['list'] = EPGList(type=self.type, selChangedCB=self.onSelectionChanged, timer=session.nav.RecordTimer, time_epoch=time_epoch, overjump_empty=config.epgselection.overjump.value, graphic=graphic)
self.refreshTimer = eTimer()
self.refreshTimer.timeout.get().append(self.refreshlist)
self.listTimer = eTimer()
self.listTimer.callback.append(self.hidewaitingtext)
if about.getCPUString() != 'BCM7346B2' and about.getCPUString() != 'BCM7425B2':
self.createTimer = eTimer()
self.createTimer.callback.append(self.onCreate)
self.onLayoutFinish.append(self.LayoutFinish)
else:
self.onLayoutFinish.append(self.onCreate)
def createSetup(self):
self.closeEventViewDialog()
key = None
if self.type == EPG_TYPE_SINGLE:
key = 'epgsingle'
elif self.type == EPG_TYPE_MULTI:
key = 'epgmulti'
elif self.type == EPG_TYPE_ENHANCED:
key = 'epgenhanced'
elif self.type == EPG_TYPE_INFOBAR:
key = 'epginfobar'
elif self.type == EPG_TYPE_GRAPH:
key = 'epggraphical'
elif self.type == EPG_TYPE_INFOBARGRAPH:
key = 'epginfobargraphical'
if key:
self.session.openWithCallback(self.onSetupClose, Setup, key)
def onSetupClose(self, test = None):
if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
if self.type == EPG_TYPE_GRAPH:
self.close('reopengraph')
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.close('reopeninfobargraph')
else:
if self.type == EPG_TYPE_INFOBAR:
self.close('reopeninfobar')
def togglePIG(self):
if not config.epgselection.graph_pig.value:
config.epgselection.graph_pig.setValue(True)
else:
config.epgselection.graph_pig.setValue(False)
config.epgselection.graph_pig.save()
configfile.save()
self.close('reopengraph')
def hidewaitingtext(self):
self.listTimer.stop()
if self.type == EPG_TYPE_MULTI:
self['list'].moveToService(self.session.nav.getCurrentlyPlayingServiceOrGroup())
self['lab1'].hide()
def getBouquetServices(self, bouquet):
services = []
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def LayoutFinish(self):
self['lab1'].show()
self.createTimer.start(800)
def onCreate(self):
if about.getCPUString() != 'BCM7346B2' and about.getCPUString() != 'BCM7425B2':
self.createTimer.stop()
serviceref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
title = None
self['list'].recalcEntrySize()
self.BouquetRoot = False
if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
if self.StartBouquet.toString().startswith('1:7:0'):
self.BouquetRoot = True
self.services = self.getBouquetServices(self.StartBouquet)
self['list'].fillGraphEPG(self.services, self.ask_time)
self['list'].moveToService(serviceref)
self['list'].setCurrentlyPlaying(serviceref)
self['bouquetlist'].recalcEntrySize()
self['bouquetlist'].fillBouquetList(self.bouquets)
self['bouquetlist'].moveToService(self.StartBouquet)
self['bouquetlist'].setCurrentBouquet(self.StartBouquet )
self.setTitle(self['bouquetlist'].getCurrentBouquet())
if self.type == EPG_TYPE_GRAPH:
self['list'].setShowServiceMode(config.epgselection.graph_servicetitle_mode.value)
self.moveTimeLines()
if config.epgselection.graph_channel1.value:
self['list'].instance.moveSelectionTo(0)
elif self.type == EPG_TYPE_INFOBARGRAPH:
self['list'].setShowServiceMode(config.epgselection.infobar_servicetitle_mode.value)
self.moveTimeLines()
elif self.type == EPG_TYPE_MULTI:
self['bouquetlist'].recalcEntrySize()
self['bouquetlist'].fillBouquetList(self.bouquets)
self['bouquetlist'].moveToService(self.StartBouquet)
self['bouquetlist'].fillBouquetList(self.bouquets)
self.services = self.getBouquetServices(self.StartBouquet)
self['list'].fillMultiEPG(self.services, self.ask_time)
self['list'].setCurrentlyPlaying(serviceref)
self.setTitle(self['bouquetlist'].getCurrentBouquet())
elif self.type == EPG_TYPE_SINGLE or self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
if self.type == EPG_TYPE_SINGLE:
service = self.currentService
elif self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
service = ServiceReference(self.servicelist.getCurrentSelection())
title = ServiceReference(self.servicelist.getRoot()).getServiceName()
self['Service'].newService(service.ref)
if title:
title = title + ' - ' + service.getServiceName()
else:
title = service.getServiceName()
self.setTitle(title)
self['list'].fillSingleEPG(service)
self['list'].sortSingleEPG(int(config.epgselection.sort.value))
else:
self['list'].fillSimilarList(self.currentService, self.eventid)
self.listTimer.start(10)
def refreshlist(self):
self.refreshTimer.stop()
if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines()
elif self.type == EPG_TYPE_MULTI:
self['list'].fillMultiEPG(self.services, self.ask_time)
elif self.type == EPG_TYPE_SINGLE or self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
try:
if self.type == EPG_TYPE_SINGLE:
service = self.currentService
elif self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
service = ServiceReference(self.servicelist.getCurrentSelection())
if not self.cureventindex:
index = self['list'].getCurrentIndex()
else:
index = self.cureventindex
self.cureventindex = None
self['list'].fillSingleEPG(service)
self['list'].sortSingleEPG(int(config.epgselection.sort.value))
self['list'].setCurrentIndex(index)
except:
pass
def moveUp(self):
self['list'].moveTo(self['list'].instance.moveUp)
def moveDown(self):
self['list'].moveTo(self['list'].instance.moveDown)
def updEvent(self, dir, visible = True):
ret = self['list'].selEntry(dir, visible)
if ret:
self.moveTimeLines(True)
def nextPage(self):
self['list'].moveTo(self['list'].instance.pageDown)
def prevPage(self):
self['list'].moveTo(self['list'].instance.pageUp)
def toTop(self):
self['list'].moveTo(self['list'].instance.moveTop)
def toEnd(self):
self['list'].moveTo(self['list'].instance.moveEnd)
def leftPressed(self):
if self.type == EPG_TYPE_MULTI:
self['list'].updateMultiEPG(-1)
else:
self.updEvent(-1)
def rightPressed(self):
if self.type == EPG_TYPE_MULTI:
self['list'].updateMultiEPG(1)
else:
self.updEvent(+1)
def Bouquetlist(self):
if not self.bouquetlist_active:
self.BouquetlistShow()
else:
self.BouquetlistHide()
def BouquetlistShow(self):
self.curindex = self['bouquetlist'].l.getCurrentSelectionIndex()
self["epgcursoractions"].setEnabled(False)
self["okactions"].setEnabled(False)
self['bouquetlist'].show()
self["bouquetokactions"].setEnabled(True)
self["bouquetcursoractions"].setEnabled(True)
self.bouquetlist_active = True
def BouquetlistHide(self, cancel=True):
self["bouquetokactions"].setEnabled(False)
self["bouquetcursoractions"].setEnabled(False)
self['bouquetlist'].hide()
if cancel:
self['bouquetlist'].setCurrentIndex(self.curindex)
self["okactions"].setEnabled(True)
self["epgcursoractions"].setEnabled(True)
self.bouquetlist_active = False
def getCurrentBouquet(self):
if self.BouquetRoot:
return self.StartBouquet
elif self.has_key('bouquetlist'):
cur = self["bouquetlist"].l.getCurrentSelection()
return cur and cur[1]
else:
return self.servicelist.getRoot()
def BouquetOK(self):
self.BouquetRoot = False
now = time() - int(config.epg.histminutes.value) * 60
self.services = self.getBouquetServices(self.getCurrentBouquet())
if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
if self.type == EPG_TYPE_GRAPH:
self.ask_time = self.ask_time = now - now % (int(config.epgselection.graph_roundto.value) * 60)
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.ask_time = self.ask_time = now - now % (int(config.epgselection.infobar_roundto.value) * 60)
self['list'].resetOffset()
self['list'].fillGraphEPG(self.services, self.ask_time)
self.moveTimeLines(True)
elif self.type == EPG_TYPE_MULTI:
self['list'].fillMultiEPG(self.services, self.ask_time)
self['list'].instance.moveSelectionTo(0)
self.setTitle(self['bouquetlist'].getCurrentBouquet())
self.BouquetlistHide(False)
def moveBouquetUp(self):
self['bouquetlist'].moveTo(self['bouquetlist'].instance.moveUp)
self['bouquetlist'].fillBouquetList(self.bouquets)
def moveBouquetDown(self):
self['bouquetlist'].moveTo(self['bouquetlist'].instance.moveDown)
self['bouquetlist'].fillBouquetList(self.bouquets)
def moveBouquetPageUp(self):
self['bouquetlist'].moveTo(self['bouquetlist'].instance.pageUp)
self['bouquetlist'].fillBouquetList(self.bouquets)
def moveBouquetPageDown(self):
self['bouquetlist'].moveTo(self['bouquetlist'].instance.pageDown)
self['bouquetlist'].fillBouquetList(self.bouquets)
def nextBouquet(self):
if self.type == EPG_TYPE_MULTI or self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self.moveBouquetDown()
self.BouquetOK()
elif (self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR) and config.usage.multibouquet.value:
self.CurrBouquet = self.servicelist.getCurrentSelection()
self.CurrService = self.servicelist.getRoot()
self.servicelist.nextBouquet()
self.onCreate()
def prevBouquet(self):
if self.type == EPG_TYPE_MULTI or self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self.moveBouquetUp()
self.BouquetOK()
elif (self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR) and config.usage.multibouquet.value:
self.CurrBouquet = self.servicelist.getCurrentSelection()
self.CurrService = self.servicelist.getRoot()
self.servicelist.prevBouquet()
self.onCreate()
def nextService(self):
if self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
self.CurrBouquet = self.servicelist.getCurrentSelection()
self.CurrService = self.servicelist.getRoot()
self['list'].instance.moveSelectionTo(0)
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if not cur or (not (cur.flags & 64)) or cur.toString() == prev:
break
else:
self.servicelist.moveDown()
if self.isPlayable():
self.onCreate()
if not self['list'].getCurrent()[1] and config.epgselection.overjump.value:
self.nextService()
else:
self.nextService()
elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self.updEvent(+24)
elif self.serviceChangeCB:
self.serviceChangeCB(1, self)
def prevService(self):
if self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
self.CurrBouquet = self.servicelist.getCurrentSelection()
self.CurrService = self.servicelist.getRoot()
self['list'].instance.moveSelectionTo(0)
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if not cur or (not (cur.flags & 64)) or cur.toString() == prev:
break
else:
self.servicelist.moveUp()
if self.isPlayable():
self.onCreate()
if not self['list'].getCurrent()[1] and config.epgselection.overjump.value:
self.prevService()
else:
self.prevService()
elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self.updEvent(-24)
elif self.serviceChangeCB:
self.serviceChangeCB(-1, self)
def enterDateTime(self):
global mepg_config_initialized
if self.type == EPG_TYPE_MULTI:
if not mepg_config_initialized:
config.misc.prev_mepg_time = ConfigClock(default=time())
mepg_config_initialized = True
self.session.openWithCallback(self.onDateTimeInputClosed, TimeDateInput, config.misc.prev_mepg_time)
elif self.type == EPG_TYPE_GRAPH:
self.session.openWithCallback(self.onDateTimeInputClosed, TimeDateInput, config.epgselection.graph_prevtime)
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.session.openWithCallback(self.onDateTimeInputClosed, TimeDateInput, config.epgselection.infobar_prevtime)
def onDateTimeInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
if self.type == EPG_TYPE_MULTI:
self.ask_time = ret[1]
self['list'].fillMultiEPG(self.services, ret[1])
elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
now = time() - int(config.epg.histminutes.value) * 60
if self.type == EPG_TYPE_GRAPH:
self.ask_time -= self.ask_time % (int(config.epgselection.graph_roundto.value) * 60)
elif self.type == EPG_TYPE_INFOBARGRAPH:
self.ask_time -= self.ask_time % (int(config.epgselection.infobar_roundto.value) * 60)
l = self['list']
l.resetOffset()
l.fillGraphEPG(None, self.ask_time)
self.moveTimeLines(True)
if self.eventviewDialog and (self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH):
self.infoKeyPressed(True)
def closeScreen(self):
if self.type == None:
self.close()
return
if self.type == EPG_TYPE_SINGLE:
self.close()
return # stop and do not continue.
if self.CurrBouquet and self.CurrService and (self.CurrBouquet != self.StartBouquet or self.CurrService != self.StartRef):
self.zapToNumber(self.StartRef, self.StartBouquet)
if self.session.nav.getCurrentlyPlayingServiceOrGroup() and self.StartRef and self.session.nav.getCurrentlyPlayingServiceOrGroup().toString() != self.StartRef.toString():
if self.zapFunc and ((self.type == EPG_TYPE_GRAPH and config.epgselection.graph_preview_mode.value) or (self.type == EPG_TYPE_MULTI and config.epgselection.multi_preview_mode.value) or (
self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH and config.epgselection.infobar_preview_mode.value == '1' or config.epgselection.infobar_preview_mode.value == '2') or (self.type == EPG_TYPE_ENHANCED and config.epgselection.enhanced_preview_mode.value)) and self.StartRef and self.StartBouquet:
if '0:0:0:0:0:0:0:0:0' not in self.StartRef.toString():
self.zapFunc(None, zapback = True)
elif '0:0:0:0:0:0:0:0:0' in self.StartRef.toString():
self.session.nav.playService(self.StartRef)
if self.session.pipshown:
self.session.pipshown = False
del self.session.pip
self.closeEventViewDialog()
self.close(True)
def infoKeyPressed(self, eventviewopen=False):
cur = self['list'].getCurrent()
event = cur[0]
service = cur[1]
if event is not None and not self.eventviewDialog and not eventviewopen:
if self.type != EPG_TYPE_SIMILAR:
if self.type == EPG_TYPE_INFOBARGRAPH:
self.eventviewDialog = self.session.instantiateDialog(EventViewSimple,event, service, skin='InfoBarEventView')
self.eventviewDialog.show()
else:
self.session.open(EventViewEPGSelect, event, service, callback=self.eventViewCallback, similarEPGCB=self.openSimilarList)
elif self.eventviewDialog and not eventviewopen:
self.eventviewDialog.hide()
del self.eventviewDialog
self.eventviewDialog = None
elif event is not None and self.eventviewDialog and eventviewopen:
if self.type != EPG_TYPE_SIMILAR:
if self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH:
self.eventviewDialog.hide()
self.eventviewDialog = self.session.instantiateDialog(EventViewSimple,event, service, skin='InfoBarEventView')
self.eventviewDialog.show()
def redButtonPressed(self):
self.closeEventViewDialog()
if not self.longbuttonpressed:
self.openIMDb()
else:
self.longbuttonpressed = False
def redlongButtonPressed(self):
self.closeEventViewDialog()
self.longbuttonpressed = True
self.sortEpg()
def greenButtonPressed(self):
self.closeEventViewDialog()
if not self.longbuttonpressed:
self.RecordTimerQuestion(True)
else:
self.longbuttonpressed = False
def greenlongButtonPressed(self):
self.closeEventViewDialog()
self.longbuttonpressed = True
self.showAutoTimerList()
def yellowButtonPressed(self):
self.closeEventViewDialog()
if not self.longbuttonpressed:
self.openEPGSearch()
else:
self.longbuttonpressed = False
def blueButtonPressed(self):
self.closeEventViewDialog()
if not self.longbuttonpressed:
self.addAutoTimer()
else:
self.longbuttonpressed = False
def bluelongButtonPressed(self):
self.closeEventViewDialog()
self.longbuttonpressed = True
self.showAutoTimerList()
def blueButtonPressedLong(self):
self.closeEventViewDialog()
from InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance.LongButtonPressed:
self.showAutoTimerList()
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, None, eventid)
def setServices(self, services):
self.services = services
self.onCreate()
def setService(self, service):
self.currentService = service
self.onCreate()
def eventViewCallback(self, setEvent, setService, val):
l = self['list']
old = l.getCurrent()
if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH:
self.updEvent(val, False)
elif val == -1:
self.moveUp()
elif val == +1:
self.moveDown()
cur = l.getCurrent()
if (self.type == EPG_TYPE_MULTI or self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH) and cur[0] is None and cur[1].ref != old[1].ref:
self.eventViewCallback(setEvent, setService, val)
else:
setService(cur[1])
setEvent(cur[0])
def eventSelected(self):
self.infoKeyPressed()
def sortEpg(self):
if self.type == EPG_TYPE_SINGLE or self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR:
if config.epgselection.sort.value == '0':
config.epgselection.sort.setValue('1')
else:
config.epgselection.sort.setValue('0')
config.epgselection.sort.save()
configfile.save()
self['list'].sortSingleEPG(int(config.epgselection.sort.value))
def OpenSingleEPG(self):
cur = self['list'].getCurrent()
event = cur[0]
serviceref = cur[1].ref
if serviceref is not None:
self.session.open(SingleEPG, serviceref)
def openIMDb(self):
try:
from Plugins.Extensions.IMDb.plugin import IMDB, IMDBEPGSelection
try:
cur = self['list'].getCurrent()
event = cur[0]
name = event.getEventName()
except:
name = ''
self.session.open(IMDB, name, False)
except ImportError:
self.session.open(MessageBox, _('The IMDb plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10)
def openEPGSearch(self):
try:
from Plugins.Extensions.EPGSearch.EPGSearch import EPGSearch
try:
cur = self['list'].getCurrent()
event = cur[0]
name = event.getEventName()
except:
name = ''
self.session.open(EPGSearch, name, False)
except ImportError:
self.session.open(MessageBox, _('The EPGSearch plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10)
def addAutoTimer(self):
try:
from Plugins.Extensions.AutoTimer.AutoTimerEditor import addAutotimerFromEvent
cur = self['list'].getCurrent()
event = cur[0]
if not event:
return
serviceref = cur[1]
addAutotimerFromEvent(self.session, evt=event, service=serviceref)
self.refreshTimer.start(3000)
except ImportError:
self.session.open(MessageBox, _('The AutoTimer plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10)
def addAutoTimerSilent(self):
try:
from Plugins.Extensions.AutoTimer.AutoTimerEditor import addAutotimerFromEventSilent
cur = self['list'].getCurrent()
event = cur[0]
if not event:
return
serviceref = cur[1]
addAutotimerFromEventSilent(self.session, evt=event, service=serviceref)
self.refreshTimer.start(3000)
except ImportError:
self.session.open(MessageBox, _('The AutoTimer plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10)
def showTimerList(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
def showAutoTimerList(self):
global autopoller
global autotimer
try:
from Plugins.Extensions.AutoTimer.plugin import main, autostart
from Plugins.Extensions.AutoTimer.AutoTimer import AutoTimer
from Plugins.Extensions.AutoTimer.AutoPoller import AutoPoller
autopoller = AutoPoller()
autotimer = AutoTimer()
try:
autotimer.readXml()
except SyntaxError as se:
self.session.open(MessageBox, _('Your config file is not well-formed:\n%s') % str(se), type=MessageBox.TYPE_ERROR, timeout=10)
return
if autopoller is not None:
autopoller.stop()
from Plugins.Extensions.AutoTimer.AutoTimerOverview import AutoTimerOverview
self.session.openWithCallback(self.editCallback, AutoTimerOverview, autotimer)
except ImportError:
self.session.open(MessageBox, _('The AutoTimer plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10)
def editCallback(self, session):
global autopoller
global autotimer
if session is not None:
autotimer.writeXml()
autotimer.parseEPG()
if config.plugins.autotimer.autopoll.value:
if autopoller is None:
from Plugins.Extensions.AutoTimer.AutoPoller import AutoPoller
autopoller = AutoPoller()
autopoller.start()
else:
autopoller = None
autotimer = None
def timerAdd(self):
self.RecordTimerQuestion(True)
def editTimer(self, timer):
self.session.open(TimerEntry, timer)
def removeTimer(self, timer):
self.closeChoiceBoxDialog()
timer.afterEvent = AFTEREVENT.NONE
self.session.nav.RecordTimer.removeEntry(timer)
self['key_green'].setText(_('Add Timer'))
self.key_green_choice = self.ADD_TIMER
self.refreshlist()
def disableTimer(self, timer):
self.closeChoiceBoxDialog()
timer.disable()
self.session.nav.RecordTimer.timeChanged(timer)
self['key_green'].setText(_('Add Timer'))
self.key_green_choice = self.ADD_TIMER
self.refreshlist()
def RecordTimerQuestion(self, manual=False):
cur = self['list'].getCurrent()
event = cur[0]
serviceref = cur[1]
if event is None:
return
eventid = event.getEventId()
refstr = ':'.join(serviceref.ref.toString().split(':')[:11])
title = None
for timer in self.session.nav.RecordTimer.timer_list:
if timer.eit == eventid and ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr:
cb_func1 = lambda ret: self.removeTimer(timer)
cb_func2 = lambda ret: self.editTimer(timer)
cb_func3 = lambda ret: self.disableTimer(timer)
menu = [(_("Delete timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func1), (_("Edit timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func2), (_("Disable timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func3)]
title = _("Select action for timer %s:") % event.getEventName()
break
else:
if not manual:
menu = [(_("Add Timer"), 'CALLFUNC', self.ChoiceBoxCB, self.doRecordTimer), (_("Add AutoTimer"), 'CALLFUNC', self.ChoiceBoxCB, self.addAutoTimerSilent)]
title = "%s?" % event.getEventName()
else:
newEntry = RecordTimerEntry(serviceref, checkOldTimers=True, dirname=preferredTimerPath(), *parseEvent(event))
self.session.openWithCallback(self.finishedAdd, TimerEntry, newEntry)
if title:
self.ChoiceBoxDialog = self.session.instantiateDialog(ChoiceBox, title=title, list=menu, keys=['green', 'blue'], skin_name="RecordTimerQuestion")
serviceref = eServiceReference(str(self['list'].getCurrent()[1]))
posy = self['list'].getSelectionPosition(serviceref)
self.ChoiceBoxDialog.instance.move(ePoint(posy[0]-self.ChoiceBoxDialog.instance.size().width(),self.instance.position().y()+posy[1]))
self.showChoiceBoxDialog()
def recButtonPressed(self):
if not self.longbuttonpressed:
self.RecordTimerQuestion()
else:
self.longbuttonpressed = False
def reclongButtonPressed(self):
self.longbuttonpressed = True
self.doZapTimer()
def RemoveChoiceBoxCB(self, choice):
self.closeChoiceBoxDialog()
if choice:
choice(self)
def ChoiceBoxCB(self, choice):
self.closeChoiceBoxDialog()
if choice:
try:
choice()
except:
choice
def showChoiceBoxDialog(self):
self['okactions'].setEnabled(False)
if self.has_key('epgcursoractions'):
self['epgcursoractions'].setEnabled(False)
self['colouractions'].setEnabled(False)
self['recordingactions'].setEnabled(False)
self['epgactions'].setEnabled(False)
self["dialogactions"].setEnabled(True)
self.ChoiceBoxDialog['actions'].execBegin()
self.ChoiceBoxDialog.show()
if self.has_key('input_actions'):
self['input_actions'].setEnabled(False)
def closeChoiceBoxDialog(self):
self["dialogactions"].setEnabled(False)
if self.ChoiceBoxDialog:
self.ChoiceBoxDialog['actions'].execEnd()
self.session.deleteDialog(self.ChoiceBoxDialog)
self['okactions'].setEnabled(True)
if self.has_key('epgcursoractions'):
self['epgcursoractions'].setEnabled(True)
self['colouractions'].setEnabled(True)
self['recordingactions'].setEnabled(True)
self['epgactions'].setEnabled(True)
if self.has_key('input_actions'):
self['input_actions'].setEnabled(True)
def doRecordTimer(self):
self.doInstantTimer(0)
def doZapTimer(self):
self.doInstantTimer(1)
def doInstantTimer(self, zap):
cur = self['list'].getCurrent()
event = cur[0]
serviceref = cur[1]
if event is None:
return
eventid = event.getEventId()
refstr = serviceref.ref.toString()
newEntry = RecordTimerEntry(serviceref, checkOldTimers=True, *parseEvent(event))
self.InstantRecordDialog = self.session.instantiateDialog(InstantRecordTimerEntry, newEntry, zap)
retval = [True, self.InstantRecordDialog.retval()]
self.session.deleteDialogWithCallback(self.finishedAdd, self.InstantRecordDialog, retval)
def finishedAdd(self, answer):
if answer[0]:
entry = answer[1]
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
for x in simulTimerList:
if x.setAutoincreaseEnd(entry):
self.session.nav.RecordTimer.timeChanged(x)
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
if not entry.repeated and not config.recording.margin_before.value and not config.recording.margin_after.value and len(simulTimerList) > 1:
change_time = False
conflict_begin = simulTimerList[1].begin
conflict_end = simulTimerList[1].end
if conflict_begin == entry.end:
entry.end -= 30
change_time = True
elif entry.begin == conflict_end:
entry.begin += 30
change_time = True
if change_time:
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
self.session.openWithCallback(self.finishSanityCorrection, TimerSanityConflict, simulTimerList)
self["key_green"].setText(_("Change timer"))
self.key_green_choice = self.REMOVE_TIMER
else:
self['key_green'].setText(_('Add Timer'))
self.key_green_choice = self.ADD_TIMER
self.refreshlist()
def finishSanityCorrection(self, answer):
self.finishedAdd(answer)
def OK(self):
if self.zapnumberstarted:
self.dozumberzap()
else:
if config.epgselection.graph_ok.value == 'Zap' or config.epgselection.enhanced_ok.value == 'Zap' or config.epgselection.infobar_ok.value == 'Zap' or config.epgselection.multi_ok.value == 'Zap':
self.zapTo()
if config.epgselection.graph_ok.value == 'Zap + Exit' or config.epgselection.enhanced_ok.value == 'Zap + Exit' or config.epgselection.infobar_ok.value == 'Zap + Exit' or config.epgselection.multi_ok.value == 'Zap + Exit':
self.zap()
def OKLong(self):
if self.zapnumberstarted:
self.dozumberzap()
else:
if config.epgselection.graph_oklong.value == 'Zap' or config.epgselection.enhanced_oklong.value == 'Zap' or config.epgselection.infobar_oklong.value == 'Zap' or config.epgselection.multi_oklong.value == 'Zap':
self.zapTo()
if config.epgselection.graph_oklong.value == 'Zap + Exit' or config.epgselection.enhanced_oklong.value == 'Zap + Exit' or config.epgselection.infobar_oklong.value == 'Zap + Exit' or config.epgselection.multi_oklong.value == 'Zap + Exit':
self.zap()
def Info(self):
if self.type == EPG_TYPE_GRAPH and config.epgselection.graph_info.value == 'Channel Info':
self.infoKeyPressed()
elif self.type == EPG_TYPE_GRAPH and config.epgselection.graph_info.value == 'Single EPG':
self.OpenSingleEPG()
else:
self.infoKeyPressed()
def InfoLong(self):
if self.type == EPG_TYPE_GRAPH and config.epgselection.graph_infolong.value == 'Channel Info':
self.infoKeyPressed()
elif self.type == EPG_TYPE_GRAPH and config.epgselection.graph_infolong.value == 'Single EPG':
self.OpenSingleEPG()
else:
self.OpenSingleEPG()
def applyButtonState(self, state):
if state == 0:
self['now_button'].hide()
self['now_button_sel'].hide()
self['next_button'].hide()
self['next_button_sel'].hide()
self['more_button'].hide()
self['more_button_sel'].hide()
self['now_text'].hide()
self['next_text'].hide()
self['more_text'].hide()
self['key_red'].setText('')
else:
if state == 1:
self['now_button_sel'].show()
self['now_button'].hide()
else:
self['now_button'].show()
self['now_button_sel'].hide()
if state == 2:
self['next_button_sel'].show()
self['next_button'].hide()
else:
self['next_button'].show()
self['next_button_sel'].hide()
if state == 3:
self['more_button_sel'].show()
self['more_button'].hide()
else:
self['more_button'].show()
self['more_button_sel'].hide()
def onSelectionChanged(self):
cur = self['list'].getCurrent()
event = cur[0]
self['Event'].newEvent(event)
if cur[1] is None:
self['Service'].newService(None)
else:
self['Service'].newService(cur[1].ref)
if self.type == EPG_TYPE_MULTI:
count = self['list'].getCurrentChangeCount()
if self.ask_time != -1:
self.applyButtonState(0)
elif count > 1:
self.applyButtonState(3)
elif count > 0:
self.applyButtonState(2)
else:
self.applyButtonState(1)
datestr = ''
if event is not None:
now = time()
beg = event.getBeginTime()
nowTime = localtime(now)
begTime = localtime(beg)
if nowTime[2] != begTime[2]:
datestr = strftime(_('%A %e %b'), begTime)
else:
datestr = '%s' % _('Today')
self['date'].setText(datestr)
if cur[1] is None or cur[1].getServiceName() == '':
if self.key_green_choice != self.EMPTY:
self['key_green'].setText('')
self.key_green_choice = self.EMPTY
return
if event is None:
if self.key_green_choice != self.EMPTY:
self['key_green'].setText('')
self.key_green_choice = self.EMPTY
return
serviceref = cur[1]
eventid = event.getEventId()
refstr = ':'.join(serviceref.ref.toString().split(':')[:11])
isRecordEvent = False
for timer in self.session.nav.RecordTimer.timer_list:
if timer.eit == eventid and ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr:
isRecordEvent = True
break
if isRecordEvent and self.key_green_choice != self.REMOVE_TIMER:
self["key_green"].setText(_("Change timer"))
self.key_green_choice = self.REMOVE_TIMER
elif not isRecordEvent and self.key_green_choice != self.ADD_TIMER:
self['key_green'].setText(_('Add Timer'))
self.key_green_choice = self.ADD_TIMER
if self.eventviewDialog and (self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH):
self.infoKeyPressed(True)
def moveTimeLines(self, force = False):
self.updateTimelineTimer.start((60 - int(time()) % 60) * 1000)
self['timeline_text'].setEntries(self['list'], self['timeline_now'], self.time_lines, force)
self['list'].l.invalidate()
def isPlayable(self):
current = ServiceReference(self.servicelist.getCurrentSelection())
return not current.ref.flags & (eServiceReference.isMarker | eServiceReference.isDirectory)
def setServicelistSelection(self, bouquet, service):
if self.servicelist:
if self.servicelist.getRoot() != bouquet:
self.servicelist.clearPath()
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service)
def closeEventViewDialog(self):
if self.eventviewDialog:
self.eventviewDialog.hide()
del self.eventviewDialog
self.eventviewDialog = None
def zap(self):
if self.zapFunc:
self.zapSelectedService()
self.closeEventViewDialog()
self.close(True)
else:
self.closeEventViewDialog()
self.close()
def zapSelectedService(self, prev=False):
if self.session.pipshown:
self.prevch = self.session.pip.getCurrentService() and str(self.session.pip.getCurrentService().toString()) or None
else:
self.prevch = self.session.nav.getCurrentlyPlayingServiceReference() and str(self.session.nav.getCurrentlyPlayingServiceReference().toString()) or None
lst = self["list"]
count = lst.getCurrentChangeCount()
if count == 0:
ref = lst.getCurrent()[1]
if ref is not None:
if (self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH) and config.epgselection.infobar_preview_mode.value == '2':
if not self.session.pipshown:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
self.session.pipshown = True
n_service = self.pipServiceRelation.get(str(ref.ref), None)
if n_service is not None:
service = eServiceReference(n_service)
else:
service = ref.ref
if self.session.pipshown and self.currch == service.toString():
self.session.pipshown = False
del self.session.pip
self.zapFunc(ref.ref, bouquet = self.getCurrentBouquet(), preview = False)
return
self.session.pip.playService(service)
self.currch = self.session.pip.getCurrentService() and str(self.session.pip.getCurrentService().toString())
else:
self.zapFunc(ref.ref, bouquet = self.getCurrentBouquet(), preview = prev)
self.currch = self.session.nav.getCurrentlyPlayingServiceReference() and str(self.session.nav.getCurrentlyPlayingServiceReference().toString())
self['list'].setCurrentlyPlaying(self.session.nav.getCurrentlyPlayingServiceOrGroup())
def zapTo(self):
if self.session.nav.getCurrentlyPlayingServiceOrGroup() and '0:0:0:0:0:0:0:0:0' in self.session.nav.getCurrentlyPlayingServiceOrGroup().toString():
from Screens.InfoBarGenerics import setResumePoint
setResumePoint(self.session)
if self.zapFunc:
self.zapSelectedService(True)
self.refreshTimer.start(2000)
if not self.currch or self.currch == self.prevch:
if self.zapFunc:
self.zapFunc(None, False)
self.closeEventViewDialog()
self.close('close')
else:
self.closeEventViewDialog()
self.close()
def keyNumberGlobal(self, number):
if self.type == EPG_TYPE_GRAPH:
if number == 1:
timeperiod = int(config.epgselection.graph_prevtimeperiod.value)
if timeperiod > 60:
timeperiod -= 60
self['list'].setEpoch(timeperiod)
config.epgselection.graph_prevtimeperiod.setValue(timeperiod)
self.moveTimeLines()
elif number == 2:
self.prevPage()
elif number == 3:
timeperiod = int(config.epgselection.graph_prevtimeperiod.value)
if timeperiod < 300:
timeperiod += 60
self['list'].setEpoch(timeperiod)
config.epgselection.graph_prevtimeperiod.setValue(timeperiod)
self.moveTimeLines()
elif number == 4:
self.updEvent(-2)
elif number == 5:
now = time() - int(config.epg.histminutes.value) * 60
self.ask_time = now - now % (int(config.epgselection.graph_roundto.value) * 60)
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines(True)
elif number == 6:
self.updEvent(+2)
elif number == 7:
if config.epgselection.graph_heightswitch.value:
config.epgselection.graph_heightswitch.setValue(False)
else:
config.epgselection.graph_heightswitch.setValue(True)
self['list'].setItemsPerPage()
self['list'].fillGraphEPG(None)
self.moveTimeLines()
elif number == 8:
self.nextPage()
elif number == 9:
basetime = localtime(self['list'].getTimeBase())
basetime = (basetime[0], basetime[1], basetime[2], int(config.epgselection.graph_primetimehour.value), int(config.epgselection.graph_primetimemins.value), 0, basetime[6], basetime[7], basetime[8])
self.ask_time = mktime(basetime)
if self.ask_time + 3600 < time():
self.ask_time += 86400
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines(True)
elif number == 0:
self.toTop()
now = time() - int(config.epg.histminutes.value) * 60
self.ask_time = now - now % (int(config.epgselection.graph_roundto.value) * 60)
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines()
elif self.type == EPG_TYPE_INFOBARGRAPH:
if number == 1:
timeperiod = int(config.epgselection.infobar_prevtimeperiod.value)
if timeperiod > 60:
timeperiod -= 60
self['list'].setEpoch(timeperiod)
config.epgselection.infobar_prevtimeperiod.setValue(timeperiod)
self.moveTimeLines()
elif number == 2:
self.prevPage()
elif number == 3:
timeperiod = int(config.epgselection.infobar_prevtimeperiod.value)
if timeperiod < 300:
timeperiod += 60
self['list'].setEpoch(timeperiod)
config.epgselection.infobar_prevtimeperiod.setValue(timeperiod)
self.moveTimeLines()
elif number == 4:
self.updEvent(-2)
elif number == 5:
now = time() - int(config.epg.histminutes.value) * 60
self.ask_time = now - now % (int(config.epgselection.infobar_roundto.value) * 60)
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines(True)
elif number == 6:
self.updEvent(+2)
elif number == 8:
self.nextPage()
elif number == 9:
basetime = localtime(self['list'].getTimeBase())
basetime = (basetime[0], basetime[1], basetime[2], int(config.epgselection.infobar_primetimehour.value), int(config.epgselection.infobar_primetimemins.value), 0, basetime[6], basetime[7], basetime[8])
self.ask_time = mktime(basetime)
if self.ask_time + 3600 < time():
self.ask_time += 86400
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines(True)
elif number == 0:
self.toTop()
now = time() - int(config.epg.histminutes.value) * 60
self.ask_time = now - now % (int(config.epgselection.infobar_roundto.value) * 60)
self['list'].resetOffset()
self['list'].fillGraphEPG(None, self.ask_time)
self.moveTimeLines()
else:
self.zapnumberstarted = True
self.NumberZapTimer.start(5000, True)
if not self.NumberZapField:
self.NumberZapField = str(number)
else:
self.NumberZapField += str(number)
self.handleServiceName()
self["number"].setText(self.zaptoservicename+'\n'+self.NumberZapField)
self["number"].show()
if len(self.NumberZapField) >= 4:
self.dozumberzap()
def dozumberzap(self):
self.zapnumberstarted = False
self.numberEntered(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self.NumberZapField))
self.zaptoservicename = ServiceReference(self.service).getServiceName()
def numberEntered(self, service = None, bouquet = None):
if service is not None:
self.zapToNumber(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist is not None:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number):
bouquet = self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service is None:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist is not None:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service is not None:
playable = not service.flags & (eServiceReference.isMarker | eServiceReference.isDirectory) or service.flags & eServiceReference.isNumberedMarker
if not playable:
service = None
break
if config.usage.alternative_number_mode.value:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def zapToNumber(self, service, bouquet):
self["number"].hide()
self.NumberZapField = None
self.CurrBouquet = bouquet
self.CurrService = service
if service is not None:
self.setServicelistSelection(bouquet, service)
self.onCreate()
class SingleEPG(EPGSelection):
def __init__(self, session, service, EPGtype="single"):
EPGSelection.__init__(self, session, service=service, EPGtype=EPGtype)
self.skinName = 'EPGSelection'
| schleichdi2/OpenNfr_E2_Gui-6.0 | lib/python/Screens/EpgSelection.py | Python | gpl-2.0 | 58,072 |
# Get weather data from various online sources
# -*- coding: utf-8 -*-
import requests
from wrappers import *
@plugin
class yweather:
@command("weather")
def weather(self, message):
"""Get the current condition in a given location, from the Yahoo! Weather Service
"""
w = self.get_yahoo_weather(message.data)
if isinstance(w, dict):
return message.reply(data=w,
text="Weather for {0[city]}, {0[country]}: {0[condition]}, {0[temperature]}. Wind Speed: {0[wind_speed]} ({0[wind_direction]}), Wind Chill: {0[wind_chill]}. Visibility {0[visibility]}. High Temp: {0[high]}°C, Low Temp: {0[low]}°C. Sunrise: {0[sunrise]}, Sunset: {0[sunset]}.".format(w)
)
else:
return message.reply(data=w, text=w)
@command("forecast")
def forecast(self, message):
"""Get the 5 day forcast for a given location, from the Yahoo! Weather Service
"""
w = self.get_yahoo_weather(message.data)
if isinstance(w, dict):
return message.reply(data=w['forecast'], text="; ".join(["{0[day]}: {0[condition]}. High: {0[high]}, Low: {0[low]}.".format(x) for x in w['forecast']]))
else:
return message.reply(data=w, text=w)
def get_yahoo_weather(self, place):
if not place:
raise Exception("You must provide a place name.")
# Use Yahoo's yql to build the query
url = 'https://query.yahooapis.com/v1/public/yql?q=select * from weather.forecast where woeid in(select woeid from geo.places(1) where text="' + place + '") and u="c"&format=json'
# Fetch the results
r = requests.get(url)
json = r.json()
result = json['query']['results']
if not result:
return "No weather could be found for " + place + "."
# Read the pertinant parts of the result, and format them nicely.
channel = result['channel']
city = channel['location']['city']
country = channel['location']['country']
region = channel['location']['region']
high = channel['item']['forecast'][0]['high']
low = channel['item']['forecast'][0]['low']
# There's a bug in the weather API where windchill is reported as "feels like" in farenheight.
feelsLike = (float(channel['wind']['chill']) - 32) / 1.8
chill = feelsLike - float(channel['item']['condition']['temp'])
windChill = "{0:.2f}°{1}".format(chill, channel['units']['temperature'])
windDir = "{0:03d}deg".format(int(channel['wind']['direction']))
windSpeed = "{0} {1}".format(channel['wind']['speed'], channel['units']['speed'])
humidity = "{0}%".format(channel['atmosphere']['humidity'])
pressure = "{0}{1}".format(channel['atmosphere']['pressure'], channel['units']['pressure'])
rising = channel['atmosphere']['rising']
visibility = "{0}{1}".format(channel['atmosphere']['visibility'], channel['units']['distance'])
sunrise = channel['astronomy']['sunrise']
sunset = channel['astronomy']['sunset']
condition = channel['item']['condition']['text']
temperature = "{0}°{1}".format(channel['item']['condition']['temp'], channel['units']['temperature'])
forecast = []
for pred in channel['item']['forecast']:
c = {"day": pred['day'],
"condition": pred['text'],
"high": "{0}°{1}".format(pred['high'], channel['units']['temperature']),
"low": "{0}°{1}".format(pred['low'], channel['units']['temperature'])}
forecast.append(c)
return {"city":city,
"country":country,
"region":region,
"high":high,
"low":low,
"temperature": temperature,
"wind_chill":windChill,
"wind_direction":windDir,
"wind_speed":windSpeed,
"humidity":humidity,
"pressure":pressure,
"rising":rising,
"visibility":visibility,
"sunrise":sunrise,
"sunset":sunset,
"condition":condition,
"forecast":forecast
}
@plugin
class pollen:
@command("pollen")
def pollen(self, message):
"""Get the pollen index for a given location
"""
if not message:
raise Exception("You must provide a place name.")
# Use Yahoo's yql to build the query
yurl = 'https://query.yahooapis.com/v1/public/yql?q=select woeid from geo.places(1) where text = "' + message.data + '"&format=json'
# Fetch the results
r = requests.get(yurl)
json = r.json()
if not json['query']['results']:
return "Could not find " + place + "."
woeid = json['query']['results']['place']['woeid']
print(woeid)
purl = "https://pollencheck.p.mashape.com/api/1/forecasts/" + woeid
headers = {
"X-Mashape-Key": "O6cwEp209Jmsh614NhNE6DpXIUKhp1npOMrjsnvWzdpgHYgzob",
"Accept": "application/json"
}
pollen_data = requests.get(purl, headers=headers)
p_json = pollen_data.json()
if not p_json:
raise Exception("Could not get data for '" + message.data + "', try a large city.")
return message.reply(data=p_json, text="Total pollen count: {0[maxLevel]}".format(p_json['periods'][0]['combined']))
@plugin
class forecast_io:
@command("whereis")
def whereis(self, message):
"""Get the latitude and longitdue of a given place
"""
if not message:
raise Exception("You must provide a place name.")
ll = self.latlong(message.data)
if isinstance(ll, dict):
return message.reply(data=ll, text="Latitude: {}, Longitude: {}".format(ll['latitude'], ll['longitude']))
else:
return message.reply(data=ll, text=ll)
@command("condition")
def condition(self, message):
"""Get the current weather using the https://developer.forecast.io/docs/v2 API.
"""
if not message:
raise Exception("You must provide a place name.")
w = self.get_forecast_io_weather(message.data)
if isinstance(w, dict):
return message.reply(data=w,
text="Current condition for {1}: {0[summary]} P({0[precipProbability]}) probability of precipitation. \
{0[temperature]}°C, feels like {0[apparentTemperature]}°C. Dew Point: {0[dewPoint]}°C. \
Humidity: {0[humidity]}. Wind Speed: {0[windSpeed]}mph bearing {0[windBearing]:03d}. \
Cloud Cover: {0[cloudCover]}. Pressure: {0[pressure]}mb. Ozone: {0[ozone]}.".format(w['currently'], message.data))
else:
return message.reply(data=w, text=w)
def latlong(self, place):
# Use Yahoo's yql to build the query
if not place:
raise Exception("You must provide a place name.")
url = 'https://query.yahooapis.com/v1/public/yql?q=select centroid from geo.places(1) where text = "' + place + '"&format=json'
# Fetch the results
r = requests.get(url)
json = r.json()
if not json['query']['results']:
return "Could not find " + place + "."
return json['query']['results']['place']['centroid']
def get_forecast_io_weather(self, place):
if not place:
raise Exception("You must provide a place name.")
ll = self.latlong(place)
# TODO: yeild an error
if not isinstance(ll, dict):
return ll
# Build a forecast IO request string. TODO: Remove API key and regenerate it
url = 'https://api.forecast.io/forecast/da05193c059f48ff118de841ccb7cd92/' + ll['latitude'] + "," + ll['longitude'] + "?units=uk"
# Fetch the results
r = requests.get(url)
json = r.json()
return json
| ellxc/piperbot | plugins/weather.py | Python | gpl-2.0 | 8,113 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0007_auto_20151023_1012'),
]
operations = [
migrations.AddField(
model_name='documentclassification',
name='created_at',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AddField(
model_name='documentclassification',
name='ip',
field=models.CharField(default=b'', max_length=100),
),
]
| heolin123/day_or_night | mainapp/migrations/0008_auto_20151023_1317.py | Python | gpl-2.0 | 650 |
#
# A PyGtk-based Python Trace Collector window
#
# Copyright (C) 2007 TK Soh <teekaysoh@gmail.com>
#
import pygtk
pygtk.require("2.0")
import gtk
import gobject
import pango
import threading
import Queue
import win32trace
try:
from gitgtk.gitlib import toutf
except ImportError:
import locale
_encoding = locale.getpreferredencoding()
def toutf(s):
return s.decode(_encoding, 'replace').encode('utf-8')
class TraceLog():
def __init__(self):
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_title("Python Trace Collector")
# construct window
self.window.set_default_size(700, 400)
self.main_area = gtk.VBox()
self.window.add(self.main_area)
# mimic standard dialog widgets
self.action_area = gtk.HBox()
self.main_area.pack_end(self.action_area, False, False, 5)
sep = gtk.HSeparator()
self.main_area.pack_end(sep, False, False, 0)
self.vbox = gtk.VBox()
self.main_area.pack_end(self.vbox)
# add python trace ouput window
scrolledwindow = gtk.ScrolledWindow()
scrolledwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.textview = gtk.TextView(buffer=None)
self.textview.set_editable(False)
self.textview.modify_font(pango.FontDescription("Monospace"))
scrolledwindow.add(self.textview)
self.textview.set_editable(False)
self.textbuffer = self.textview.get_buffer()
self.vbox.pack_start(scrolledwindow, True, True)
self.vbox.show_all()
# add buttons
self._button_quit = gtk.Button("Quit")
self._button_quit.connect('clicked', self._on_ok_clicked)
self.action_area.pack_end(self._button_quit, False, False, 5)
self._button_clear = gtk.Button("Clear")
self._button_clear.connect('clicked', self._on_clear_clicked)
self.action_area.pack_end(self._button_clear, False, False, 5)
# add assorted window event handlers
self.window.connect('map_event', self._on_window_map_event)
self.window.connect('delete_event', self._on_window_close_clicked)
def _on_ok_clicked(self, button):
self._stop_read_thread()
gtk.main_quit()
def _on_clear_clicked(self, button):
self.write("", False)
def _on_window_close_clicked(self, event, param):
self._stop_read_thread()
gtk.main_quit()
def _on_window_map_event(self, event, param):
self._begin_trace()
def _begin_trace(self):
self.queue = Queue.Queue()
win32trace.InitRead()
self.write("Collecting Python Trace Output...\n")
gobject.timeout_add(10, self._process_queue)
self._start_read_thread()
def _start_read_thread(self):
self._read_trace = True
self.thread1 = threading.Thread(target=self._do_read_trace)
self.thread1.start()
def _stop_read_thread(self):
self._read_trace = False
# wait for worker thread to to fix Unhandled exception in thread
self.thread1.join()
def _process_queue(self):
"""
Handle all the messages currently in the queue (if any).
"""
while self.queue.qsize():
try:
msg = self.queue.get(0)
self.write(msg)
except Queue.Empty:
pass
return True
def _do_read_trace(self):
"""
print buffer collected in win32trace
"""
while self._read_trace:
msg = win32trace.read()
if msg:
self.queue.put(msg)
def write(self, msg, append=True):
msg = toutf(msg)
if append:
enditer = self.textbuffer.get_end_iter()
self.textbuffer.insert(enditer, msg)
else:
self.textbuffer.set_text(msg)
def main(self):
self.window.show_all()
gtk.main()
def run():
dlg = TraceLog()
dlg.main()
if __name__ == "__main__":
run()
| tdjordan/tortoisegit | tracelog.py | Python | gpl-2.0 | 4,176 |
"""URL config for business tool"""
from django.conf.urls import url, patterns
from nav.web.business import views
urlpatterns = patterns('',
url(r'^$', views.BusinessView.as_view(),
name='business-index'),
url('^device_availability/$', views.DeviceAvailabilityReport.as_view(),
name='business-report-device-availability'),
url('^link_availability/$', views.LinkAvailabilityReport.as_view(),
name='business-report-link-availability')
)
| sigmunau/nav | python/nav/web/business/urls.py | Python | gpl-2.0 | 472 |
# encoding: utf-8
from yast import import_module
import_module('UI')
from yast import *
class HCenter3Client:
def main(self):
UI.OpenDialog(
Opt("defaultsize"),
VBox(
VCenter(PushButton(Opt("vstretch"), "Button 1")),
VCenter(PushButton(Opt("vstretch"), "Button 2")),
VCenter(PushButton(Opt("vstretch"), "Button 3"))
)
)
UI.UserInput()
UI.CloseDialog()
HCenter3Client().main()
| yast/yast-python-bindings | examples/HCenter3.py | Python | gpl-2.0 | 462 |
import traceback
import sys
from gribapi import *
INPUT = 'rap_130_20120822_2200_001.grb2'
VERBOSE = 1 # verbose error reporting
def example():
f = open(INPUT)
while 1:
gid = grib_new_from_file(f)
if gid is None: break
iterid = grib_keys_iterator_new(gid, 'ls')
# Different types of keys can be skipped
# grib_skip_computed(iterid)
# grib_skip_coded(iterid)
# grib_skip_edition_specific(iterid)
# grib_skip_duplicates(iterid)
# grib_skip_read_only(iterid)
# grib_skip_function(iterid)
while grib_keys_iterator_next(iterid):
keyname = grib_keys_iterator_get_name(iterid)
keyval = grib_get_string(iterid, keyname)
print "%s = %s" % (keyname, keyval)
grib_keys_iterator_delete(iterid)
grib_release(gid)
f.close()
def main():
try:
example()
except GribInternalError, err:
if VERBOSE:
traceback.print_exc(file=sys.stderr)
else:
print >> sys.stderr, err.msg
return 1
if __name__ == "__main__":
sys.exit(main()) | thomasvdv/flightbit | forecast/keys_iterator.py | Python | gpl-2.0 | 1,140 |
# **********************************************************************
#
# Copyright (c) 2003-2017 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import Test
class AI(Test.A):
def callA(self, current=None):
return "A"
class BI(Test.B, AI):
def callB(self, current=None):
return "B"
class CI(Test.C, AI):
def callC(self, current=None):
return "C"
class DI(Test.D, BI, CI):
def callD(self, current=None):
return "D"
class EI(Test.E):
def callE(self, current=None):
return "E"
class FI(Test.F, EI):
def callF(self, current=None):
return "F"
class GI(Test.G):
def __init__(self, communicator):
self._communicator = communicator
def shutdown(self, current=None):
self._communicator.shutdown()
def callG(self, current=None):
return "G"
class HI(Test.H, GI):
def __init__(self, communicator):
GI.__init__(self, communicator)
def callH(self, current=None):
return "H"
| ljx0305/ice | python/test/Ice/facets/TestI.py | Python | gpl-2.0 | 1,190 |
__author__ = 'en0'
from http import context
from uuid import uuid4
from redis import Redis
from gevent import spawn
from functools import wraps
class AsyncJob(object):
def __init__(self, target):
assert isinstance(context.db, Redis)
self._target = target
self._db = context.db
def __call__(self, fn):
wraps(fn)
def _wrapper(*args, **kwargs):
_args = fn(*args, **kwargs)
_job_id = str(uuid4())
_key = "jobs:{0}".format(_job_id)
_status_key = "jobs:{0}:status".format(_job_id)
_expire_time = 3600
self._db.set(_status_key, 202)
self._db.expire(_status_key, _expire_time)
def task():
# noinspection PyBroadException
try:
data = self._target(*_args)
except:
self._db.set(_status_key, 500)
else:
self._db.set(_key, data)
self._db.set(_status_key, 200)
self._db.expire(_key, _expire_time)
self._db.expire(_status_key, _expire_time)
spawn(task)
return dict(job=_job_id)
return _wrapper
| en0/PivotalPoker | src/utils/async_job.py | Python | gpl-2.0 | 1,245 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import bibauthorid_config as bconfig
from bibauthorid_comparison import compare_bibrefrecs
from bibauthorid_comparison import clear_all_caches as clear_comparison_caches
from bibauthorid_backinterface import bib_matrix
from bibauthorid_backinterface import get_sql_time
from bibauthorid_backinterface import filter_modified_record_ids
from bibauthorid_general_utils import update_status \
, update_status_final
if bconfig.DEBUG_CHECKS:
def _debug_is_eq(v1, v2):
eps = 1e-2
return v1 + eps > v2 and v2 + eps > v1
def _debug_is_eq_v(vl1, vl2):
if isinstance(vl1, str) and isinstance(vl2, str):
return vl1 == vl2
if isinstance(vl1, tuple) and isinstance(vl2, tuple):
return _debug_is_eq(vl1[0], vl2[0]) and _debug_is_eq(vl1[1], vl2[1])
return False
class probability_matrix:
'''
This class contains and maintains the comparison
between all virtual authors. It is able to write
and read from the database and update the results.
'''
def __init__(self, cluster_set, use_cache=False, save_cache=False):
'''
Constructs probability matrix. If use_cache is true, it will
try to load old computations from the database. If save cache
is true it will save the current results into the database.
@param cluster_set: A cluster set object, used to initialize
the matrix.
'''
def check_for_cleaning(cur_calc):
if cur_calc % 10000000 == 0:
clear_comparison_caches()
self._bib_matrix = bib_matrix(cluster_set)
old_matrix = bib_matrix()
ncl = sum(len(cl.bibs) for cl in cluster_set.clusters)
expected = ((ncl * (ncl - 1)) / 2)
if expected == 0:
expected = 1
if use_cache and old_matrix.load(cluster_set.last_name):
cached_bibs = set(filter_modified_record_ids(
old_matrix.get_keys(),
old_matrix.creation_time))
else:
cached_bibs = set()
if save_cache:
creation_time = get_sql_time()
cur_calc, opti = 0, 0
for cl1 in cluster_set.clusters:
update_status((float(opti) + cur_calc) / expected, "Prob matrix: calc %d, opti %d." % (cur_calc, opti))
for cl2 in cluster_set.clusters:
if id(cl1) < id(cl2) and not cl1.hates(cl2):
for bib1 in cl1.bibs:
for bib2 in cl2.bibs:
if bib1 in cached_bibs and bib2 in cached_bibs:
val = old_matrix[bib1, bib2]
if not val:
cur_calc += 1
check_for_cleaning(cur_calc)
val = compare_bibrefrecs(bib1, bib2)
else:
opti += 1
if bconfig.DEBUG_CHECKS:
assert _debug_is_eq_v(val, compare_bibrefrecs(bib1, bib2))
else:
cur_calc += 1
check_for_cleaning(cur_calc)
val = compare_bibrefrecs(bib1, bib2)
self._bib_matrix[bib1, bib2] = val
clear_comparison_caches()
if save_cache:
update_status(1., "saving...")
self._bib_matrix.store(cluster_set.last_name, creation_time)
update_status_final("Matrix done. %d calc, %d opt." % (cur_calc, opti))
def __getitem__(self, bibs):
return self._bib_matrix[bibs[0], bibs[1]]
| jrbl/invenio | modules/bibauthorid/lib/bibauthorid_prob_matrix.py | Python | gpl-2.0 | 4,586 |
#!/usr/bin/env python
from canari.maltego.message import Entity, EntityField, EntityFieldType, MatchingRule
__author__ = 'jaegeral'
__copyright__ = 'Copyright 2014, Viper Project'
__credits__ = []
__license__ = 'GPL'
__version__ = '0.1'
__maintainer__ = 'jaegeral'
__email__ = 'mail@alexanderjaeger.de'
__status__ = 'Development'
__all__ = [
'viperentity',
'viperhash',
'vipertag',
'vipername',
'viperfile',
'myViperEntity'
]
class viperentity(Entity):
_namespace_ = 'viper'
@EntityField(name='hashtype', propname='hashtype', displayname='Hash Type')
class viperhash(viperentity):
pass
#TODO: Remove
class MyViperEntity(viperentity):
pass
class viperfile(viperentity):
pass
class vipername(viperentity):
pass
class vipertag(viperentity):
pass
| deralexxx/maltego-viper | src/viper/transforms/common/entities.py | Python | gpl-2.0 | 804 |
from struct import unpack, pack, calcsize
from mobi_languages import LANGUAGES
from lz77 import uncompress
def LOG(*args):
pass
MOBI_HDR_FIELDS = (
("id", 16, "4s"),
("header_len", 20, "I"),
("mobi_type", 24, "I"),
("encoding", 28, "I"),
("UID", 32, "I"),
("generator_version", 36, "I"),
("reserved", 40, "40s"),
("first_nonbook_idx", 80, "I"),
("full_name_offs", 84, "I"),
("full_name_len", 88, "I"),
("locale_highbytes", 92, "H"),
("locale_country", 94, "B"),
("locale_language", 95, "B"),
("input_lang", 96, "I"),
("output_lang", 100, "I"),
("format_version", 104, "I"),
("first_image_idx", 108, "I"),
("huff/cdic_record", 112, "I"),
("huff/cdic_count", 116, "I"),
("datp_record", 120, "I"),
("datp_count", 124, "I"),
("exth_flags", 128, "I"),
("unknowni@132", 132, "32s"),
("unknown@164", 164, "I"),
("drm_offs", 168, "I"),
("drm_count", 172, "I"),
("drm_size", 176, "I"),
("drm_flags", 180, "I"),
("unknown@184", 184, "I"),
("unknown@188", 188, "I"),
("unknown@192", 192, "H"),
("last_image_record", 194, "H"),
("unknown@196", 196, "I"),
("fcis_record", 200, "I"),
("unknown@204", 204, "I"),
("flis_record", 208, "I"),
("unknown@212", 212, "I"),
("extra_data_flags", 242, "H")
)
EXTH_FMT = ">4x2I"
'''4x = "EXTH", I = hlen, I = record count'''
EXTH_RECORD_TYPES = {
1: 'drm server id',
2: 'drm commerce id',
3: 'drm ebookbase book id',
100: 'author', # list
101: 'publisher', # list
102: 'imprint',
103: 'description',
104: 'isbn', # list
105: 'subject', # list
106: 'publication date',
107: 'review',
108: 'contributor', # list
109: 'rights',
110: 'subjectcode', # list
111: 'type',
112: 'source',
113: 'asin',
114: 'version number', # int
115: 'sample', # int (or bool)?
116: 'start reading',
117: 'adult',
118: 'retail price',
119: 'retail price currency',
201: 'cover offset', # int
202: 'thumbnail offset', # int
203: 'has fake cover', # bool?
208: 'watermark',
209: 'tamper proof keys',
401: 'clipping limit', # int
402: 'publisher limit',
404: 'ttsflag',
501: 'cde type',
502: 'last update time',
503: 'updated title'
}
PRC_HDRFMT = '>H2xIHHI' # Compression,unused,Len,Count,Size,Pos
def parse_palmdb(filename):
import palm
db = palm.Database(filename)
return db
class Book:
def __init__(self, fn):
self.filename = fn
# Set some fields to defaults
self.title = fn
self.author = "??"
self.language = "??"
# Rob Addition: Description
self.description = ""
self.is_a_book = False
f = open(fn)
d = f.read(68)
f.close()
encodings = {
1252: 'cp1252',
65001: 'utf-8'
}
supported_types = ('BOOKMOBI', 'TEXtREAd')
self.type = d[60:68]
if self.type not in supported_types:
LOG(1, "Unsupported file type %s" % (self.type))
return None
try:
db = parse_palmdb(fn)
except:
return None
self.is_a_book = True
# now we have a better guess at the title, use it for now
self.title = db.name
self.records = db.records
rec0 = self.records[0].data
#LOG(5,repr(rec0))
if self.type == 'BOOKMOBI':
LOG(3, "This is a MOBI book")
self.mobi = {}
for field, pos, fmt in MOBI_HDR_FIELDS:
end = pos + calcsize(fmt)
if (end > len(rec0) or
("header_len" in self.mobi
and end > self.mobi["header_len"])):
continue
LOG(4, "field: %s, fmt: %s, @ [%d:%d], data: %s" % (
field, fmt, pos, end, repr(rec0[pos:end])))
(self.mobi[field], ) = unpack(">%s" % fmt, rec0[pos:end])
LOG(3, "self.mobi: %s" % repr(self.mobi))
# Get and decode the book name
if self.mobi['locale_language'] in LANGUAGES:
lang = LANGUAGES[self.mobi['locale_language']]
if self.mobi['locale_country'] == 0:
LOG(2, "Book language: %s" % lang[0][1])
self.language = "%s (%s)" % (lang[0][1], lang[0][0])
elif self.mobi['locale_country'] in lang:
country = lang[self.mobi['locale_country']]
LOG(2, "Book language is %s (%s)" % (
lang[0][1], country[1]))
self.language = "%s (%s-%s)" % (
lang[0][1],
lang[0][0],
country[0]
)
pos = self.mobi['full_name_offs']
end = pos + self.mobi['full_name_len']
self.title = rec0[pos:end].decode(encodings[self.mobi['encoding']])
LOG(2, "Book name: %s" % self.title)
if self.mobi['id'] != 'MOBI':
LOG(0, "Mobi header missing!")
return None
if (0x40 & self.mobi['exth_flags']): # check for EXTH
self.exth = parse_exth(rec0, self.mobi['header_len'] + 16)
LOG(3, "EXTH header: %s" % repr(self.exth))
if 'author' in self.exth:
self.author = ' & '.join(self.exth['author'])
else:
self.author = "n/a"
self.rawdata = d
if (('updated title' in self.exth) and
(type(self.exth['updated title']) is str)):
self.title = ' '.join(self.exth['updated title'])
if 'description' in self.exth:
self.description = ' <P> '.join(self.exth['description'])
elif self.type == 'TEXtREAd':
LOG(2, "This is an older MOBI book")
self.rawdata = d
compression, data_len, rec_count, rec_size, pos = unpack(
PRC_HDRFMT, rec0[:calcsize(PRC_HDRFMT)])
LOG(3, "compression %d, data_len %d, rec_count %d, rec_size %d" %
(compression, data_len, rec_count, rec_size))
if compression == 2:
data = uncompress(self.records[1].data)
else:
data = self.records[1].data
from BeautifulSoup import BeautifulSoup
soup = BeautifulSoup(data)
self.metadata = soup.fetch("dc-metadata")
try:
self.title = soup.fetch("dc:title")[0].getText()
self.author = soup.fetch("dc:creator")[0].getText()
self.language = soup.fetch("dc:language")[0].getText()
except:
self.title, self.author, self.language = ("Unknown", "Unknown",
"en-us")
try:
self.description = soup.fetch("dc:description")[0].getText()
except:
pass
def to_html(self):
last_idx = (
self.mobi['first_image_idx'] if 'mobi' in self.__dict__ else -1)
return ''.join([uncompress(x.data) for x in self.records[1:last_idx]])
def parse_exth(data, pos):
ret = {}
n = 0
if (pos != data.find('EXTH')):
LOG(0, "EXTH header not found where it should be @%d" % pos)
return None
else:
end = pos + calcsize(EXTH_FMT)
(hlen, count) = unpack(EXTH_FMT, data[pos:end])
LOG(4, "pos: %d, EXTH header len: %d, record count: %d" % (
pos, hlen, count))
pos = end
while n < count:
end = pos + calcsize(">2I")
t, l = unpack(">2I", data[pos:end])
v = data[end:pos + l]
if l - 8 == 4:
v = unpack(">I", v)[0]
if t in EXTH_RECORD_TYPES:
rec = EXTH_RECORD_TYPES[t]
LOG(4, "EXTH record '%s' @%d+%d: '%s'" % (
rec, pos, l - 8, v))
if rec not in ret:
ret[rec] = [v]
else:
ret[rec].append(v)
else:
LOG(4, "Found an unknown EXTH record type %d @%d+%d: '%s'" %
(t, pos, l - 8, repr(v)))
pos += l
n += 1
return ret
| robwebset/script.ebooks | resources/lib/kiehinen/ebook.py | Python | gpl-2.0 | 8,604 |
import time, os, logging
from autotest.client import utils
from autotest.client.shared import error
from virttest import remote, utils_misc
@error.context_aware
def run_multi_vms_file_transfer(test, params, env):
"""
Transfer a file back and forth between multi VMs for long time.
1) Boot up two VMs .
2) Create a large file by dd on host.
3) Copy this file to VM1.
4) Compare copied file's md5 with original file.
5) Copy this file from VM1 to VM2.
6) Compare copied file's md5 with original file.
7) Copy this file from VM2 to VM1.
8) Compare copied file's md5 with original file.
9) Repeat step 5-8
@param test: KVM test object.
@param params: Dictionary with the test parameters.
@param env: Dictionary with test environment.
"""
def md5_check(session, orig_md5):
msg = "Compare copied file's md5 with original file."
error.context(msg, logging.info)
md5_cmd = "md5sum %s | awk '{print $1}'" % guest_path
s, o = session.cmd_status_output(md5_cmd)
if s:
msg = "Fail to get md5 value from guest. Output is %s" % o
raise error.TestError(msg)
new_md5 = o.splitlines()[-1]
if new_md5 != orig_md5:
msg = "File changed after transfer host -> VM1. Original md5 value"
msg += " is %s. Current md5 value is %s" % (orig_md5, new_md5)
raise error.TestFail(msg)
vm1 = env.get_vm(params["main_vm"])
vm1.verify_alive()
login_timeout = int(params.get("login_timeout", 360))
vm2 = env.get_vm(params["vms"].split()[-1])
vm2.verify_alive()
session_vm1 = vm1.wait_for_login(timeout=login_timeout)
session_vm2 = vm2.wait_for_login(timeout=login_timeout)
transfer_timeout = int(params.get("transfer_timeout", 1000))
username = params.get("username")
password = params.get("password")
port = int(params.get("file_transfer_port"))
if (not port) or (not username) or (not password):
raise error.TestError("Please set file_transfer_port, username,"
" password paramters for guest")
tmp_dir = params.get("tmp_dir", "/tmp/")
repeat_time = int(params.get("repeat_time", "10"))
clean_cmd = params.get("clean_cmd", "rm -f")
filesize = int(params.get("filesize", 4000))
count = int(filesize / 10)
if count == 0:
count = 1
host_path = os.path.join(tmp_dir, "tmp-%s" %
utils_misc.generate_random_string(8))
cmd = "dd if=/dev/zero of=%s bs=10M count=%d" % (host_path, count)
guest_path = (tmp_dir + "file_transfer-%s" %
utils_misc.generate_random_string(8))
try:
error.context("Creating %dMB file on host" % filesize, logging.info)
utils.run(cmd)
orig_md5 = utils.hash_file(host_path, method="md5")
error.context("Transfering file host -> VM1, timeout: %ss" % \
transfer_timeout, logging.info)
t_begin = time.time()
vm1.copy_files_to(host_path, guest_path, timeout=transfer_timeout)
t_end = time.time()
throughput = filesize / (t_end - t_begin)
logging.info("File transfer host -> VM1 succeed, "
"estimated throughput: %.2fMB/s", throughput)
md5_check(session_vm1, orig_md5)
ip_vm1 = vm1.get_address()
ip_vm2 = vm2.get_address()
for i in range(repeat_time):
log_vm1 = os.path.join(test.debugdir, "remote_scp_to_vm1_%s.log" %i)
log_vm2 = os.path.join(test.debugdir, "remote_scp_to_vm2_%s.log" %i)
msg = "Transfering file VM1 -> VM2, timeout: %ss." % transfer_timeout
msg += " Repeat: %s/%s" % (i + 1, repeat_time)
error.context(msg, logging.info)
t_begin = time.time()
s = remote.scp_between_remotes(src=ip_vm1, dst=ip_vm2, port=port,
s_passwd=password, d_passwd=password,
s_name=username, d_name=username,
s_path=guest_path, d_path=guest_path,
timeout=transfer_timeout,
log_filename=log_vm1)
t_end = time.time()
throughput = filesize / (t_end - t_begin)
logging.info("File transfer VM1 -> VM2 succeed, "
"estimated throughput: %.2fMB/s", throughput)
md5_check(session_vm2, orig_md5)
session_vm1.cmd("rm -rf %s" % guest_path)
msg = "Transfering file VM2 -> VM1, timeout: %ss." % transfer_timeout
msg += " Repeat: %s/%s" % (i + 1, repeat_time)
error.context(msg, logging.info)
t_begin = time.time()
remote.scp_between_remotes(src=ip_vm2, dst=ip_vm1, port=port,
s_passwd=password, d_passwd=password,
s_name=username, d_name=username,
s_path=guest_path, d_path=guest_path,
timeout=transfer_timeout,
log_filename=log_vm1)
t_end = time.time()
throughput = filesize / (t_end - t_begin)
logging.info("File transfer VM2 -> VM1 succeed, "
"estimated throughput: %.2fMB/s", throughput)
md5_check(session_vm1, orig_md5)
session_vm2.cmd("%s %s" % (clean_cmd, guest_path))
finally:
try:
session_vm1.cmd("%s %s" % (clean_cmd, guest_path))
except Exception:
pass
try:
session_vm2.cmd("%s %s" % (clean_cmd, guest_path))
except Exception:
pass
try:
os.remove(host_path)
except OSError:
pass
if session_vm1:
session_vm1.close()
if session_vm2:
session_vm2.close()
| ehabkost/virt-test | qemu/tests/multi_vms_file_transfer.py | Python | gpl-2.0 | 6,006 |
# This code is licensed under The GNU General Public License version 2 (GPLv2)
# If you decide to fork this code please obey by the licensing rules.
#
# Thanks go to the-one who initially created the initial speedtest code in early 2014
# That code broke but it didn't take too much to fix it, if you get problems it's most likely
# down to the fact that you need to use another download link that plays nicely with XBMC/Kodi
import xbmc, xbmcplugin
import xbmcgui
import xbmcaddon
import urllib
import time
import os
import sys
import datetime
ADDON_ID = 'plugin.program.jogosEmuladores'
ADDON = xbmcaddon.Addon(id=ADDON_ID)
HOME = ADDON.getAddonInfo('path')
addon_name="Speed Test"
AddonTitle="[COLOR ghostwhite]Project X[/COLOR] [COLOR lightsteelblue]Wizard[/COLOR]"
max_Bps = 0.0
currently_downloaded_bytes = 0.0
#-----------------------------------------------------------------------------------------------------------------
def download(url, dest, dp = None):
if not dp:
dp = xbmcgui.DialogProgress()
dp.create(AddonTitle,"Connecting to server",'[COLOR slategray][I]Testing your internet speed...[/I][/COLOR]', 'Please wait...')
dp.update(0)
start_time=time.time()
try:
urllib.urlretrieve(url, dest, lambda nb, bs, fs: _pbhook(nb, bs, fs, dp, start_time))
except:
pass
return ( time.time() - start_time )
#-----------------------------------------------------------------------------------------------------------------
def _pbhook(numblocks, blocksize, filesize, dp, start_time):
global max_Bps
global currently_downloaded_bytes
try:
percent = min(numblocks * blocksize * 100 / filesize, 100)
currently_downloaded_bytes = float(numblocks) * blocksize
currently_downloaded = currently_downloaded_bytes / (1024 * 1024)
Bps_speed = currently_downloaded_bytes / (time.time() - start_time)
if Bps_speed > 0:
eta = (filesize - numblocks * blocksize) / Bps_speed
if Bps_speed > max_Bps: max_Bps = Bps_speed
else:
eta = 0
kbps_speed = Bps_speed * 8 / 1024
mbps_speed = kbps_speed / 1024
total = float(filesize) / (1024 * 1024)
mbs = '%.02f MB of %.02f MB' % (currently_downloaded, total)
dp.update(percent)
except:
currently_downloaded_bytes = float(filesize)
percent = 100
dp.update(percent)
if dp.iscanceled():
dp.close()
raise Exception("Cancelled")
#-----------------------------------------------------------------------------------------------------------------
def make_dir(mypath, dirname):
''' Creates sub-directories if they are not found. '''
import xbmcvfs
if not xbmcvfs.exists(mypath):
try:
xbmcvfs.mkdirs(mypath)
except:
xbmcvfs.mkdir(mypath)
subpath = os.path.join(mypath, dirname)
if not xbmcvfs.exists(subpath):
try:
xbmcvfs.mkdirs(subpath)
except:
xbmcvfs.mkdir(subpath)
return subpath
#-----------------------------------------------------------------------------------------------------------------
def GetEpochStr():
time_now = datetime.datetime.now()
epoch = time.mktime(time_now.timetuple())+(time_now.microsecond/1000000.)
epoch_str = str('%f' % epoch)
epoch_str = epoch_str.replace('.','')
epoch_str = epoch_str[:-3]
return epoch_str
#-----------------------------------------------------------------------------------------------------------------
def runtest(url):
addon_profile_path = xbmc.translatePath(ADDON.getAddonInfo('profile'))
speed_test_files_dir = make_dir(addon_profile_path, 'speedtestfiles')
speed_test_download_file = os.path.join(speed_test_files_dir, GetEpochStr() + '.speedtest')
timetaken = download(url, speed_test_download_file)
os.remove(speed_test_download_file)
avgspeed = ((currently_downloaded_bytes / timetaken) * 8 / ( 1024 * 1024 ))
maxspeed = (max_Bps * 8/(1024*1024))
if avgspeed < 2:
livestreams = 'Very low quality streams may work.'
onlinevids = 'Expect buffering, do not try HD.'
rating = '[COLOR ghostwhite][B] Verdict: [I]Very Poor[/I] | Score: [COLOR slategray][I]1/10[/I][/B][/COLOR]'
elif avgspeed < 2.5:
livestreams = 'You should be ok for SD content only.'
onlinevids = 'SD/DVD quality should be ok, do not try HD.'
rating = '[COLOR ghostwhite][B][I]Poor[/I] | Score: [COLOR slategray][I]2/10[/I][/B][/COLOR]'
elif avgspeed < 5:
livestreams = 'Some HD streams may struggle, SD will be fine.'
onlinevids = '720p will be fine but some 1080p may struggle.'
rating = '[COLOR ghostwhite][B][I]OK[/I] | Score: [COLOR slategray][I]4/10[/I][/B][/COLOR]'
elif avgspeed < 9:
livestreams = 'All streams including HD should stream fine.'
onlinevids = 'Movies (720p & 1080p) will stream fine but 3D and 4K will struggle.'
rating = '[COLOR ghostwhite][B][I]Good[/I] | Score: [COLOR slategray][I]6/10[/I][/B][/COLOR]'
elif avgspeed < 15:
livestreams = 'All streams including HD should stream fine'
onlinevids = 'Movies (720p & 1080p and 3D) will stream fine but 4K may struggle.'
rating = '[COLOR ghostwhite][B][I]Very good[/I] | Score: [COLOR slategray][I]8/10[/I][/B][/COLOR]'
else:
livestreams = 'All streams including HD should stream fine'
onlinevids = 'You can play all movies (720p, 1080p, 3D and 4K)'
rating = '[COLOR ghostwhite][B][I]Excellent[/I] | Score: [COLOR slategray][I]10/10[/I][/B][/COLOR]'
print "Average Speed: " + str(avgspeed)
print "Max. Speed: " + str(maxspeed)
dialog = xbmcgui.Dialog()
ok = dialog.ok(
'[COLOR lightsteelblue][B]Your Result:[/COLOR][/B] ' + rating,
# '[COLOR blue]Duration:[/COLOR] %.02f secs' % timetaken,
'[COLOR lightsteelblue][B]Live Streams:[/COLOR][/B] ' + livestreams,
'[COLOR lightsteelblue][B]Movie Streams:[/COLOR][/B] ' + onlinevids,
'[COLOR lightsteelblue][B]Duration:[/COLOR][/B] %.02f secs ' % timetaken + '[COLOR lightsteelblue][B]Average Speed:[/B][/COLOR] %.02f Mb/s ' % avgspeed + '[COLOR lightsteelblue][B]Max Speed:[/B][/COLOR] %.02f Mb/s ' % maxspeed,
# '[COLOR blue]Maximum Speed:[/COLOR] %.02f Mb/s ' % maxspeed,
) | repotvsupertuga/repo | plugin.program.jogosEmuladores/speedtest.py | Python | gpl-2.0 | 6,577 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
About
"""
__version__ = '0.2.2'
| ppizarror/korektor | bin/easyprocess/about.py | Python | gpl-2.0 | 83 |
#!/usr/bin/python
#
# Copyright (C) 2010, 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Script for unittesting the RAPI client module"""
import unittest
import warnings
import pycurl
from ganeti import opcodes
from ganeti import constants
from ganeti import http
from ganeti import serializer
from ganeti import utils
from ganeti import query
from ganeti import objects
from ganeti import rapi
from ganeti import errors
import ganeti.rapi.testutils
from ganeti.rapi import connector
from ganeti.rapi import rlib2
from ganeti.rapi import client
import testutils
# List of resource handlers which aren't used by the RAPI client
_KNOWN_UNUSED = set([
rlib2.R_root,
rlib2.R_2,
])
# Global variable for collecting used handlers
_used_handlers = None
class RapiMock(object):
def __init__(self):
self._mapper = connector.Mapper()
self._responses = []
self._last_handler = None
self._last_req_data = None
def ResetResponses(self):
del self._responses[:]
def AddResponse(self, response, code=200):
self._responses.insert(0, (code, response))
def CountPending(self):
return len(self._responses)
def GetLastHandler(self):
return self._last_handler
def GetLastRequestData(self):
return self._last_req_data
def FetchResponse(self, path, method, headers, request_body):
self._last_req_data = request_body
try:
(handler_cls, items, args) = self._mapper.getController(path)
# Record handler as used
_used_handlers.add(handler_cls)
self._last_handler = handler_cls(items, args, None)
if not hasattr(self._last_handler, method.upper()):
raise http.HttpNotImplemented(message="Method not implemented")
except http.HttpException, ex:
code = ex.code
response = ex.message
else:
if not self._responses:
raise Exception("No responses")
(code, response) = self._responses.pop()
return (code, NotImplemented, response)
class TestConstants(unittest.TestCase):
def test(self):
self.assertEqual(client.GANETI_RAPI_PORT, constants.DEFAULT_RAPI_PORT)
self.assertEqual(client.GANETI_RAPI_VERSION, constants.RAPI_VERSION)
self.assertEqual(client.HTTP_APP_JSON, http.HTTP_APP_JSON)
self.assertEqual(client._REQ_DATA_VERSION_FIELD, rlib2._REQ_DATA_VERSION)
self.assertEqual(client.JOB_STATUS_QUEUED, constants.JOB_STATUS_QUEUED)
self.assertEqual(client.JOB_STATUS_WAITING, constants.JOB_STATUS_WAITING)
self.assertEqual(client.JOB_STATUS_CANCELING,
constants.JOB_STATUS_CANCELING)
self.assertEqual(client.JOB_STATUS_RUNNING, constants.JOB_STATUS_RUNNING)
self.assertEqual(client.JOB_STATUS_CANCELED, constants.JOB_STATUS_CANCELED)
self.assertEqual(client.JOB_STATUS_SUCCESS, constants.JOB_STATUS_SUCCESS)
self.assertEqual(client.JOB_STATUS_ERROR, constants.JOB_STATUS_ERROR)
self.assertEqual(client.JOB_STATUS_PENDING, constants.JOBS_PENDING)
self.assertEqual(client.JOB_STATUS_FINALIZED, constants.JOBS_FINALIZED)
self.assertEqual(client.JOB_STATUS_ALL, constants.JOB_STATUS_ALL)
# Node evacuation
self.assertEqual(client.NODE_EVAC_PRI, constants.NODE_EVAC_PRI)
self.assertEqual(client.NODE_EVAC_SEC, constants.NODE_EVAC_SEC)
self.assertEqual(client.NODE_EVAC_ALL, constants.NODE_EVAC_ALL)
# Legacy name
self.assertEqual(client.JOB_STATUS_WAITLOCK, constants.JOB_STATUS_WAITING)
# RAPI feature strings
self.assertEqual(client._INST_CREATE_REQV1, rlib2._INST_CREATE_REQV1)
self.assertEqual(client.INST_CREATE_REQV1, rlib2._INST_CREATE_REQV1)
self.assertEqual(client._INST_REINSTALL_REQV1, rlib2._INST_REINSTALL_REQV1)
self.assertEqual(client.INST_REINSTALL_REQV1, rlib2._INST_REINSTALL_REQV1)
self.assertEqual(client._NODE_MIGRATE_REQV1, rlib2._NODE_MIGRATE_REQV1)
self.assertEqual(client.NODE_MIGRATE_REQV1, rlib2._NODE_MIGRATE_REQV1)
self.assertEqual(client._NODE_EVAC_RES1, rlib2._NODE_EVAC_RES1)
self.assertEqual(client.NODE_EVAC_RES1, rlib2._NODE_EVAC_RES1)
def testErrors(self):
self.assertEqual(client.ECODE_ALL, errors.ECODE_ALL)
# Make sure all error codes are in both RAPI client and errors module
for name in filter(lambda s: (s.startswith("ECODE_") and s != "ECODE_ALL"),
dir(client)):
value = getattr(client, name)
self.assertEqual(value, getattr(errors, name))
self.assertTrue(value in client.ECODE_ALL)
self.assertTrue(value in errors.ECODE_ALL)
class RapiMockTest(unittest.TestCase):
def test404(self):
(code, _, body) = RapiMock().FetchResponse("/foo", "GET", None, None)
self.assertEqual(code, 404)
self.assertTrue(body is None)
def test501(self):
(code, _, body) = RapiMock().FetchResponse("/version", "POST", None, None)
self.assertEqual(code, 501)
self.assertEqual(body, "Method not implemented")
def test200(self):
rapi = RapiMock()
rapi.AddResponse("2")
(code, _, response) = rapi.FetchResponse("/version", "GET", None, None)
self.assertEqual(200, code)
self.assertEqual("2", response)
self.failUnless(isinstance(rapi.GetLastHandler(), rlib2.R_version))
def _FakeNoSslPycurlVersion():
# Note: incomplete version tuple
return (3, "7.16.0", 462848, "mysystem", 1581, None, 0)
def _FakeFancySslPycurlVersion():
# Note: incomplete version tuple
return (3, "7.16.0", 462848, "mysystem", 1581, "FancySSL/1.2.3", 0)
def _FakeOpenSslPycurlVersion():
# Note: incomplete version tuple
return (2, "7.15.5", 462597, "othersystem", 668, "OpenSSL/0.9.8c", 0)
def _FakeGnuTlsPycurlVersion():
# Note: incomplete version tuple
return (3, "7.18.0", 463360, "somesystem", 1581, "GnuTLS/2.0.4", 0)
class TestExtendedConfig(unittest.TestCase):
def testAuth(self):
cl = client.GanetiRapiClient("master.example.com",
username="user", password="pw",
curl_factory=lambda: rapi.testutils.FakeCurl(RapiMock()))
curl = cl._CreateCurl()
self.assertEqual(curl.getopt(pycurl.HTTPAUTH), pycurl.HTTPAUTH_BASIC)
self.assertEqual(curl.getopt(pycurl.USERPWD), "user:pw")
def testInvalidAuth(self):
# No username
self.assertRaises(client.Error, client.GanetiRapiClient,
"master-a.example.com", password="pw")
# No password
self.assertRaises(client.Error, client.GanetiRapiClient,
"master-b.example.com", username="user")
def testCertVerifyInvalidCombinations(self):
self.assertRaises(client.Error, client.GenericCurlConfig,
use_curl_cabundle=True, cafile="cert1.pem")
self.assertRaises(client.Error, client.GenericCurlConfig,
use_curl_cabundle=True, capath="certs/")
self.assertRaises(client.Error, client.GenericCurlConfig,
use_curl_cabundle=True,
cafile="cert1.pem", capath="certs/")
def testProxySignalVerifyHostname(self):
for use_gnutls in [False, True]:
if use_gnutls:
pcverfn = _FakeGnuTlsPycurlVersion
else:
pcverfn = _FakeOpenSslPycurlVersion
for proxy in ["", "http://127.0.0.1:1234"]:
for use_signal in [False, True]:
for verify_hostname in [False, True]:
cfgfn = client.GenericCurlConfig(proxy=proxy, use_signal=use_signal,
verify_hostname=verify_hostname,
_pycurl_version_fn=pcverfn)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com",
curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assertEqual(curl.getopt(pycurl.PROXY), proxy)
self.assertEqual(curl.getopt(pycurl.NOSIGNAL), not use_signal)
if verify_hostname:
self.assertEqual(curl.getopt(pycurl.SSL_VERIFYHOST), 2)
else:
self.assertEqual(curl.getopt(pycurl.SSL_VERIFYHOST), 0)
def testNoCertVerify(self):
cfgfn = client.GenericCurlConfig()
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assertFalse(curl.getopt(pycurl.SSL_VERIFYPEER))
self.assertFalse(curl.getopt(pycurl.CAINFO))
self.assertFalse(curl.getopt(pycurl.CAPATH))
def testCertVerifyCurlBundle(self):
cfgfn = client.GenericCurlConfig(use_curl_cabundle=True)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assert_(curl.getopt(pycurl.SSL_VERIFYPEER))
self.assertFalse(curl.getopt(pycurl.CAINFO))
self.assertFalse(curl.getopt(pycurl.CAPATH))
def testCertVerifyCafile(self):
mycert = "/tmp/some/UNUSED/cert/file.pem"
cfgfn = client.GenericCurlConfig(cafile=mycert)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assert_(curl.getopt(pycurl.SSL_VERIFYPEER))
self.assertEqual(curl.getopt(pycurl.CAINFO), mycert)
self.assertFalse(curl.getopt(pycurl.CAPATH))
def testCertVerifyCapath(self):
certdir = "/tmp/some/UNUSED/cert/directory"
pcverfn = _FakeOpenSslPycurlVersion
cfgfn = client.GenericCurlConfig(capath=certdir,
_pycurl_version_fn=pcverfn)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assert_(curl.getopt(pycurl.SSL_VERIFYPEER))
self.assertEqual(curl.getopt(pycurl.CAPATH), certdir)
self.assertFalse(curl.getopt(pycurl.CAINFO))
def testCertVerifyCapathGnuTls(self):
certdir = "/tmp/some/UNUSED/cert/directory"
pcverfn = _FakeGnuTlsPycurlVersion
cfgfn = client.GenericCurlConfig(capath=certdir,
_pycurl_version_fn=pcverfn)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
self.assertRaises(client.Error, cl._CreateCurl)
def testCertVerifyNoSsl(self):
certdir = "/tmp/some/UNUSED/cert/directory"
pcverfn = _FakeNoSslPycurlVersion
cfgfn = client.GenericCurlConfig(capath=certdir,
_pycurl_version_fn=pcverfn)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
self.assertRaises(client.Error, cl._CreateCurl)
def testCertVerifyFancySsl(self):
certdir = "/tmp/some/UNUSED/cert/directory"
pcverfn = _FakeFancySslPycurlVersion
cfgfn = client.GenericCurlConfig(capath=certdir,
_pycurl_version_fn=pcverfn)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
self.assertRaises(NotImplementedError, cl._CreateCurl)
def testCertVerifyCapath(self):
for connect_timeout in [None, 1, 5, 10, 30, 60, 300]:
for timeout in [None, 1, 30, 60, 3600, 24 * 3600]:
cfgfn = client.GenericCurlConfig(connect_timeout=connect_timeout,
timeout=timeout)
curl_factory = lambda: rapi.testutils.FakeCurl(RapiMock())
cl = client.GanetiRapiClient("master.example.com", curl_config_fn=cfgfn,
curl_factory=curl_factory)
curl = cl._CreateCurl()
self.assertEqual(curl.getopt(pycurl.CONNECTTIMEOUT), connect_timeout)
self.assertEqual(curl.getopt(pycurl.TIMEOUT), timeout)
class GanetiRapiClientTests(testutils.GanetiTestCase):
def setUp(self):
testutils.GanetiTestCase.setUp(self)
self.rapi = RapiMock()
self.curl = rapi.testutils.FakeCurl(self.rapi)
self.client = client.GanetiRapiClient("master.example.com",
curl_factory=lambda: self.curl)
def assertHandler(self, handler_cls):
self.failUnless(isinstance(self.rapi.GetLastHandler(), handler_cls))
def assertQuery(self, key, value):
self.assertEqual(value, self.rapi.GetLastHandler().queryargs.get(key, None))
def assertItems(self, items):
self.assertEqual(items, self.rapi.GetLastHandler().items)
def assertBulk(self):
self.assertTrue(self.rapi.GetLastHandler().useBulk())
def assertDryRun(self):
self.assertTrue(self.rapi.GetLastHandler().dryRun())
def assertUseForce(self):
self.assertTrue(self.rapi.GetLastHandler().useForce())
def testEncodeQuery(self):
query = [
("a", None),
("b", 1),
("c", 2),
("d", "Foo"),
("e", True),
]
expected = [
("a", ""),
("b", 1),
("c", 2),
("d", "Foo"),
("e", 1),
]
self.assertEqualValues(self.client._EncodeQuery(query),
expected)
# invalid types
for i in [[1, 2, 3], {"moo": "boo"}, (1, 2, 3)]:
self.assertRaises(ValueError, self.client._EncodeQuery, [("x", i)])
def testCurlSettings(self):
self.rapi.AddResponse("2")
self.assertEqual(2, self.client.GetVersion())
self.assertHandler(rlib2.R_version)
# Signals should be disabled by default
self.assert_(self.curl.getopt(pycurl.NOSIGNAL))
# No auth and no proxy
self.assertFalse(self.curl.getopt(pycurl.USERPWD))
self.assert_(self.curl.getopt(pycurl.PROXY) is None)
# Content-type is required for requests
headers = self.curl.getopt(pycurl.HTTPHEADER)
self.assert_("Content-type: application/json" in headers)
def testHttpError(self):
self.rapi.AddResponse(None, code=404)
try:
self.client.GetJobStatus(15140)
except client.GanetiApiError, err:
self.assertEqual(err.code, 404)
else:
self.fail("Didn't raise exception")
def testGetVersion(self):
self.rapi.AddResponse("2")
self.assertEqual(2, self.client.GetVersion())
self.assertHandler(rlib2.R_version)
def testGetFeatures(self):
for features in [[], ["foo", "bar", "baz"]]:
self.rapi.AddResponse(serializer.DumpJson(features))
self.assertEqual(features, self.client.GetFeatures())
self.assertHandler(rlib2.R_2_features)
def testGetFeaturesNotFound(self):
self.rapi.AddResponse(None, code=404)
self.assertEqual([], self.client.GetFeatures())
def testGetOperatingSystems(self):
self.rapi.AddResponse("[\"beos\"]")
self.assertEqual(["beos"], self.client.GetOperatingSystems())
self.assertHandler(rlib2.R_2_os)
def testGetClusterTags(self):
self.rapi.AddResponse("[\"tag\"]")
self.assertEqual(["tag"], self.client.GetClusterTags())
self.assertHandler(rlib2.R_2_tags)
def testAddClusterTags(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234,
self.client.AddClusterTags(["awesome"], dry_run=True))
self.assertHandler(rlib2.R_2_tags)
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testDeleteClusterTags(self):
self.rapi.AddResponse("5107")
self.assertEqual(5107, self.client.DeleteClusterTags(["awesome"],
dry_run=True))
self.assertHandler(rlib2.R_2_tags)
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testGetInfo(self):
self.rapi.AddResponse("{}")
self.assertEqual({}, self.client.GetInfo())
self.assertHandler(rlib2.R_2_info)
def testGetInstances(self):
self.rapi.AddResponse("[]")
self.assertEqual([], self.client.GetInstances(bulk=True))
self.assertHandler(rlib2.R_2_instances)
self.assertBulk()
def testGetInstance(self):
self.rapi.AddResponse("[]")
self.assertEqual([], self.client.GetInstance("instance"))
self.assertHandler(rlib2.R_2_instances_name)
self.assertItems(["instance"])
def testGetInstanceInfo(self):
self.rapi.AddResponse("21291")
self.assertEqual(21291, self.client.GetInstanceInfo("inst3"))
self.assertHandler(rlib2.R_2_instances_name_info)
self.assertItems(["inst3"])
self.assertQuery("static", None)
self.rapi.AddResponse("3428")
self.assertEqual(3428, self.client.GetInstanceInfo("inst31", static=False))
self.assertHandler(rlib2.R_2_instances_name_info)
self.assertItems(["inst31"])
self.assertQuery("static", ["0"])
self.rapi.AddResponse("15665")
self.assertEqual(15665, self.client.GetInstanceInfo("inst32", static=True))
self.assertHandler(rlib2.R_2_instances_name_info)
self.assertItems(["inst32"])
self.assertQuery("static", ["1"])
def testInstancesMultiAlloc(self):
response = {
constants.JOB_IDS_KEY: ["23423"],
constants.ALLOCATABLE_KEY: ["foobar"],
constants.FAILED_KEY: ["foobar2"],
}
self.rapi.AddResponse(serializer.DumpJson(response))
insts = [self.client.InstanceAllocation("create", "foobar",
"plain", [], []),
self.client.InstanceAllocation("create", "foobar2",
"drbd8", [{"size": 100}], [])]
resp = self.client.InstancesMultiAlloc(insts)
self.assertEqual(resp, response)
self.assertHandler(rlib2.R_2_instances_multi_alloc)
def testCreateInstanceOldVersion(self):
# The old request format, version 0, is no longer supported
self.rapi.AddResponse(None, code=404)
self.assertRaises(client.GanetiApiError, self.client.CreateInstance,
"create", "inst1.example.com", "plain", [], [])
self.assertEqual(self.rapi.CountPending(), 0)
def testCreateInstance(self):
self.rapi.AddResponse(serializer.DumpJson([rlib2._INST_CREATE_REQV1]))
self.rapi.AddResponse("23030")
job_id = self.client.CreateInstance("create", "inst1.example.com",
"plain", [], [], dry_run=True)
self.assertEqual(job_id, 23030)
self.assertHandler(rlib2.R_2_instances)
self.assertDryRun()
data = serializer.LoadJson(self.rapi.GetLastRequestData())
for field in ["dry_run", "beparams", "hvparams", "start"]:
self.assertFalse(field in data)
self.assertEqual(data["name"], "inst1.example.com")
self.assertEqual(data["disk_template"], "plain")
def testCreateInstance2(self):
self.rapi.AddResponse(serializer.DumpJson([rlib2._INST_CREATE_REQV1]))
self.rapi.AddResponse("24740")
job_id = self.client.CreateInstance("import", "inst2.example.com",
"drbd8", [{"size": 100,}],
[{}, {"bridge": "br1", }],
dry_run=False, start=True,
pnode="node1", snode="node9",
ip_check=False)
self.assertEqual(job_id, 24740)
self.assertHandler(rlib2.R_2_instances)
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(data[rlib2._REQ_DATA_VERSION], 1)
self.assertEqual(data["name"], "inst2.example.com")
self.assertEqual(data["disk_template"], "drbd8")
self.assertEqual(data["start"], True)
self.assertEqual(data["ip_check"], False)
self.assertEqualValues(data["disks"], [{"size": 100,}])
self.assertEqualValues(data["nics"], [{}, {"bridge": "br1", }])
def testDeleteInstance(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234, self.client.DeleteInstance("instance", dry_run=True))
self.assertHandler(rlib2.R_2_instances_name)
self.assertItems(["instance"])
self.assertDryRun()
def testGetInstanceTags(self):
self.rapi.AddResponse("[]")
self.assertEqual([], self.client.GetInstanceTags("fooinstance"))
self.assertHandler(rlib2.R_2_instances_name_tags)
self.assertItems(["fooinstance"])
def testAddInstanceTags(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234,
self.client.AddInstanceTags("fooinstance", ["awesome"], dry_run=True))
self.assertHandler(rlib2.R_2_instances_name_tags)
self.assertItems(["fooinstance"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testDeleteInstanceTags(self):
self.rapi.AddResponse("25826")
self.assertEqual(25826, self.client.DeleteInstanceTags("foo", ["awesome"],
dry_run=True))
self.assertHandler(rlib2.R_2_instances_name_tags)
self.assertItems(["foo"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testRebootInstance(self):
self.rapi.AddResponse("6146")
job_id = self.client.RebootInstance("i-bar", reboot_type="hard",
ignore_secondaries=True, dry_run=True,
reason="Updates")
self.assertEqual(6146, job_id)
self.assertHandler(rlib2.R_2_instances_name_reboot)
self.assertItems(["i-bar"])
self.assertDryRun()
self.assertQuery("type", ["hard"])
self.assertQuery("ignore_secondaries", ["1"])
self.assertQuery("reason", ["Updates"])
def testRebootInstanceDefaultReason(self):
self.rapi.AddResponse("6146")
job_id = self.client.RebootInstance("i-bar", reboot_type="hard",
ignore_secondaries=True, dry_run=True)
self.assertEqual(6146, job_id)
self.assertHandler(rlib2.R_2_instances_name_reboot)
self.assertItems(["i-bar"])
self.assertDryRun()
self.assertQuery("type", ["hard"])
self.assertQuery("ignore_secondaries", ["1"])
self.assertQuery("reason", None)
def testShutdownInstance(self):
self.rapi.AddResponse("1487")
self.assertEqual(1487, self.client.ShutdownInstance("foo-instance",
dry_run=True,
reason="NoMore"))
self.assertHandler(rlib2.R_2_instances_name_shutdown)
self.assertItems(["foo-instance"])
self.assertDryRun()
self.assertQuery("reason", ["NoMore"])
def testShutdownInstanceDefaultReason(self):
self.rapi.AddResponse("1487")
self.assertEqual(1487, self.client.ShutdownInstance("foo-instance",
dry_run=True))
self.assertHandler(rlib2.R_2_instances_name_shutdown)
self.assertItems(["foo-instance"])
self.assertDryRun()
self.assertQuery("reason", None)
def testStartupInstance(self):
self.rapi.AddResponse("27149")
self.assertEqual(27149, self.client.StartupInstance("bar-instance",
dry_run=True,
reason="New"))
self.assertHandler(rlib2.R_2_instances_name_startup)
self.assertItems(["bar-instance"])
self.assertDryRun()
self.assertQuery("reason", ["New"])
def testStartupInstanceDefaultReason(self):
self.rapi.AddResponse("27149")
self.assertEqual(27149, self.client.StartupInstance("bar-instance",
dry_run=True))
self.assertHandler(rlib2.R_2_instances_name_startup)
self.assertItems(["bar-instance"])
self.assertDryRun()
self.assertQuery("reason", None)
def testReinstallInstance(self):
self.rapi.AddResponse(serializer.DumpJson([]))
self.rapi.AddResponse("19119")
self.assertEqual(19119, self.client.ReinstallInstance("baz-instance",
os="DOS",
no_startup=True))
self.assertHandler(rlib2.R_2_instances_name_reinstall)
self.assertItems(["baz-instance"])
self.assertQuery("os", ["DOS"])
self.assertQuery("nostartup", ["1"])
self.assertEqual(self.rapi.CountPending(), 0)
def testReinstallInstanceNew(self):
self.rapi.AddResponse(serializer.DumpJson([rlib2._INST_REINSTALL_REQV1]))
self.rapi.AddResponse("25689")
self.assertEqual(25689, self.client.ReinstallInstance("moo-instance",
os="Debian",
no_startup=True))
self.assertHandler(rlib2.R_2_instances_name_reinstall)
self.assertItems(["moo-instance"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 2)
self.assertEqual(data["os"], "Debian")
self.assertEqual(data["start"], False)
self.assertEqual(self.rapi.CountPending(), 0)
def testReinstallInstanceWithOsparams1(self):
self.rapi.AddResponse(serializer.DumpJson([]))
self.assertRaises(client.GanetiApiError, self.client.ReinstallInstance,
"doo-instance", osparams={"x": "y"})
self.assertEqual(self.rapi.CountPending(), 0)
def testReinstallInstanceWithOsparams2(self):
osparams = {
"Hello": "World",
"foo": "bar",
}
self.rapi.AddResponse(serializer.DumpJson([rlib2._INST_REINSTALL_REQV1]))
self.rapi.AddResponse("1717")
self.assertEqual(1717, self.client.ReinstallInstance("zoo-instance",
osparams=osparams))
self.assertHandler(rlib2.R_2_instances_name_reinstall)
self.assertItems(["zoo-instance"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 2)
self.assertEqual(data["osparams"], osparams)
self.assertEqual(data["start"], True)
self.assertEqual(self.rapi.CountPending(), 0)
def testReplaceInstanceDisks(self):
self.rapi.AddResponse("999")
job_id = self.client.ReplaceInstanceDisks("instance-name",
disks=[0, 1], iallocator="hail")
self.assertEqual(999, job_id)
self.assertHandler(rlib2.R_2_instances_name_replace_disks)
self.assertItems(["instance-name"])
self.assertQuery("disks", ["0,1"])
self.assertQuery("mode", ["replace_auto"])
self.assertQuery("iallocator", ["hail"])
self.rapi.AddResponse("1000")
job_id = self.client.ReplaceInstanceDisks("instance-bar",
disks=[1], mode="replace_on_secondary", remote_node="foo-node")
self.assertEqual(1000, job_id)
self.assertItems(["instance-bar"])
self.assertQuery("disks", ["1"])
self.assertQuery("remote_node", ["foo-node"])
self.rapi.AddResponse("5175")
self.assertEqual(5175, self.client.ReplaceInstanceDisks("instance-moo"))
self.assertItems(["instance-moo"])
self.assertQuery("disks", None)
def testPrepareExport(self):
self.rapi.AddResponse("8326")
self.assertEqual(8326, self.client.PrepareExport("inst1", "local"))
self.assertHandler(rlib2.R_2_instances_name_prepare_export)
self.assertItems(["inst1"])
self.assertQuery("mode", ["local"])
def testExportInstance(self):
self.rapi.AddResponse("19695")
job_id = self.client.ExportInstance("inst2", "local", "nodeX",
shutdown=True)
self.assertEqual(job_id, 19695)
self.assertHandler(rlib2.R_2_instances_name_export)
self.assertItems(["inst2"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(data["mode"], "local")
self.assertEqual(data["destination"], "nodeX")
self.assertEqual(data["shutdown"], True)
def testMigrateInstanceDefaults(self):
self.rapi.AddResponse("24873")
job_id = self.client.MigrateInstance("inst91")
self.assertEqual(job_id, 24873)
self.assertHandler(rlib2.R_2_instances_name_migrate)
self.assertItems(["inst91"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertFalse(data)
def testMigrateInstance(self):
for mode in constants.HT_MIGRATION_MODES:
for cleanup in [False, True]:
self.rapi.AddResponse("31910")
job_id = self.client.MigrateInstance("inst289", mode=mode,
cleanup=cleanup)
self.assertEqual(job_id, 31910)
self.assertHandler(rlib2.R_2_instances_name_migrate)
self.assertItems(["inst289"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 2)
self.assertEqual(data["mode"], mode)
self.assertEqual(data["cleanup"], cleanup)
def testFailoverInstanceDefaults(self):
self.rapi.AddResponse("7639")
job_id = self.client.FailoverInstance("inst13579")
self.assertEqual(job_id, 7639)
self.assertHandler(rlib2.R_2_instances_name_failover)
self.assertItems(["inst13579"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertFalse(data)
def testFailoverInstance(self):
for iallocator in ["dumb", "hail"]:
for ignore_consistency in [False, True]:
for target_node in ["node-a", "node2"]:
self.rapi.AddResponse("19161")
job_id = \
self.client.FailoverInstance("inst251", iallocator=iallocator,
ignore_consistency=ignore_consistency,
target_node=target_node)
self.assertEqual(job_id, 19161)
self.assertHandler(rlib2.R_2_instances_name_failover)
self.assertItems(["inst251"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 3)
self.assertEqual(data["iallocator"], iallocator)
self.assertEqual(data["ignore_consistency"], ignore_consistency)
self.assertEqual(data["target_node"], target_node)
self.assertEqual(self.rapi.CountPending(), 0)
def testRenameInstanceDefaults(self):
new_name = "newnametha7euqu"
self.rapi.AddResponse("8791")
job_id = self.client.RenameInstance("inst18821", new_name)
self.assertEqual(job_id, 8791)
self.assertHandler(rlib2.R_2_instances_name_rename)
self.assertItems(["inst18821"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqualValues(data, {"new_name": new_name, })
def testRenameInstance(self):
new_name = "new-name-yiux1iin"
for ip_check in [False, True]:
for name_check in [False, True]:
self.rapi.AddResponse("24776")
job_id = self.client.RenameInstance("inst20967", new_name,
ip_check=ip_check,
name_check=name_check)
self.assertEqual(job_id, 24776)
self.assertHandler(rlib2.R_2_instances_name_rename)
self.assertItems(["inst20967"])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 3)
self.assertEqual(data["new_name"], new_name)
self.assertEqual(data["ip_check"], ip_check)
self.assertEqual(data["name_check"], name_check)
def testGetJobs(self):
self.rapi.AddResponse('[ { "id": "123", "uri": "\\/2\\/jobs\\/123" },'
' { "id": "124", "uri": "\\/2\\/jobs\\/124" } ]')
self.assertEqual([123, 124], self.client.GetJobs())
self.assertHandler(rlib2.R_2_jobs)
self.rapi.AddResponse('[ { "id": "123", "uri": "\\/2\\/jobs\\/123" },'
' { "id": "124", "uri": "\\/2\\/jobs\\/124" } ]')
self.assertEqual([{"id": "123", "uri": "/2/jobs/123"},
{"id": "124", "uri": "/2/jobs/124"}],
self.client.GetJobs(bulk=True))
self.assertHandler(rlib2.R_2_jobs)
self.assertBulk()
def testGetJobStatus(self):
self.rapi.AddResponse("{\"foo\": \"bar\"}")
self.assertEqual({"foo": "bar"}, self.client.GetJobStatus(1234))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["1234"])
def testWaitForJobChange(self):
fields = ["id", "summary"]
expected = {
"job_info": [123, "something"],
"log_entries": [],
}
self.rapi.AddResponse(serializer.DumpJson(expected))
result = self.client.WaitForJobChange(123, fields, [], -1)
self.assertEqualValues(expected, result)
self.assertHandler(rlib2.R_2_jobs_id_wait)
self.assertItems(["123"])
def testCancelJob(self):
self.rapi.AddResponse("[true, \"Job 123 will be canceled\"]")
self.assertEqual([True, "Job 123 will be canceled"],
self.client.CancelJob(999, dry_run=True))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["999"])
self.assertDryRun()
def testGetNodes(self):
self.rapi.AddResponse("[ { \"id\": \"node1\", \"uri\": \"uri1\" },"
" { \"id\": \"node2\", \"uri\": \"uri2\" } ]")
self.assertEqual(["node1", "node2"], self.client.GetNodes())
self.assertHandler(rlib2.R_2_nodes)
self.rapi.AddResponse("[ { \"id\": \"node1\", \"uri\": \"uri1\" },"
" { \"id\": \"node2\", \"uri\": \"uri2\" } ]")
self.assertEqual([{"id": "node1", "uri": "uri1"},
{"id": "node2", "uri": "uri2"}],
self.client.GetNodes(bulk=True))
self.assertHandler(rlib2.R_2_nodes)
self.assertBulk()
def testGetNode(self):
self.rapi.AddResponse("{}")
self.assertEqual({}, self.client.GetNode("node-foo"))
self.assertHandler(rlib2.R_2_nodes_name)
self.assertItems(["node-foo"])
def testEvacuateNode(self):
self.rapi.AddResponse(serializer.DumpJson([rlib2._NODE_EVAC_RES1]))
self.rapi.AddResponse("9876")
job_id = self.client.EvacuateNode("node-1", remote_node="node-2")
self.assertEqual(9876, job_id)
self.assertHandler(rlib2.R_2_nodes_name_evacuate)
self.assertItems(["node-1"])
self.assertEqual(serializer.LoadJson(self.rapi.GetLastRequestData()),
{ "remote_node": "node-2", })
self.assertEqual(self.rapi.CountPending(), 0)
self.rapi.AddResponse(serializer.DumpJson([rlib2._NODE_EVAC_RES1]))
self.rapi.AddResponse("8888")
job_id = self.client.EvacuateNode("node-3", iallocator="hail", dry_run=True,
mode=constants.NODE_EVAC_ALL,
early_release=True)
self.assertEqual(8888, job_id)
self.assertItems(["node-3"])
self.assertEqual(serializer.LoadJson(self.rapi.GetLastRequestData()), {
"iallocator": "hail",
"mode": "all",
"early_release": True,
})
self.assertDryRun()
self.assertRaises(client.GanetiApiError,
self.client.EvacuateNode,
"node-4", iallocator="hail", remote_node="node-5")
self.assertEqual(self.rapi.CountPending(), 0)
def testEvacuateNodeOldResponse(self):
self.rapi.AddResponse(serializer.DumpJson([]))
self.assertRaises(client.GanetiApiError, self.client.EvacuateNode,
"node-4", accept_old=False)
self.assertEqual(self.rapi.CountPending(), 0)
for mode in [client.NODE_EVAC_PRI, client.NODE_EVAC_ALL]:
self.rapi.AddResponse(serializer.DumpJson([]))
self.assertRaises(client.GanetiApiError, self.client.EvacuateNode,
"node-4", accept_old=True, mode=mode)
self.assertEqual(self.rapi.CountPending(), 0)
self.rapi.AddResponse(serializer.DumpJson([]))
self.rapi.AddResponse(serializer.DumpJson("21533"))
result = self.client.EvacuateNode("node-3", iallocator="hail",
dry_run=True, accept_old=True,
mode=client.NODE_EVAC_SEC,
early_release=True)
self.assertEqual(result, "21533")
self.assertItems(["node-3"])
self.assertQuery("iallocator", ["hail"])
self.assertQuery("early_release", ["1"])
self.assertFalse(self.rapi.GetLastRequestData())
self.assertDryRun()
self.assertEqual(self.rapi.CountPending(), 0)
def testMigrateNode(self):
self.rapi.AddResponse(serializer.DumpJson([]))
self.rapi.AddResponse("1111")
self.assertEqual(1111, self.client.MigrateNode("node-a", dry_run=True))
self.assertHandler(rlib2.R_2_nodes_name_migrate)
self.assertItems(["node-a"])
self.assert_("mode" not in self.rapi.GetLastHandler().queryargs)
self.assertDryRun()
self.assertFalse(self.rapi.GetLastRequestData())
self.rapi.AddResponse(serializer.DumpJson([]))
self.rapi.AddResponse("1112")
self.assertEqual(1112, self.client.MigrateNode("node-a", dry_run=True,
mode="live"))
self.assertHandler(rlib2.R_2_nodes_name_migrate)
self.assertItems(["node-a"])
self.assertQuery("mode", ["live"])
self.assertDryRun()
self.assertFalse(self.rapi.GetLastRequestData())
self.rapi.AddResponse(serializer.DumpJson([]))
self.assertRaises(client.GanetiApiError, self.client.MigrateNode,
"node-c", target_node="foonode")
self.assertEqual(self.rapi.CountPending(), 0)
def testMigrateNodeBodyData(self):
self.rapi.AddResponse(serializer.DumpJson([rlib2._NODE_MIGRATE_REQV1]))
self.rapi.AddResponse("27539")
self.assertEqual(27539, self.client.MigrateNode("node-a", dry_run=False,
mode="live"))
self.assertHandler(rlib2.R_2_nodes_name_migrate)
self.assertItems(["node-a"])
self.assertFalse(self.rapi.GetLastHandler().queryargs)
self.assertEqual(serializer.LoadJson(self.rapi.GetLastRequestData()),
{ "mode": "live", })
self.rapi.AddResponse(serializer.DumpJson([rlib2._NODE_MIGRATE_REQV1]))
self.rapi.AddResponse("14219")
self.assertEqual(14219, self.client.MigrateNode("node-x", dry_run=True,
target_node="node9",
iallocator="ial"))
self.assertHandler(rlib2.R_2_nodes_name_migrate)
self.assertItems(["node-x"])
self.assertDryRun()
self.assertEqual(serializer.LoadJson(self.rapi.GetLastRequestData()),
{ "target_node": "node9", "iallocator": "ial", })
self.assertEqual(self.rapi.CountPending(), 0)
def testGetNodeRole(self):
self.rapi.AddResponse("\"master\"")
self.assertEqual("master", self.client.GetNodeRole("node-a"))
self.assertHandler(rlib2.R_2_nodes_name_role)
self.assertItems(["node-a"])
def testSetNodeRole(self):
self.rapi.AddResponse("789")
self.assertEqual(789,
self.client.SetNodeRole("node-foo", "master-candidate", force=True))
self.assertHandler(rlib2.R_2_nodes_name_role)
self.assertItems(["node-foo"])
self.assertQuery("force", ["1"])
self.assertEqual("\"master-candidate\"", self.rapi.GetLastRequestData())
def testPowercycleNode(self):
self.rapi.AddResponse("23051")
self.assertEqual(23051,
self.client.PowercycleNode("node5468", force=True))
self.assertHandler(rlib2.R_2_nodes_name_powercycle)
self.assertItems(["node5468"])
self.assertQuery("force", ["1"])
self.assertFalse(self.rapi.GetLastRequestData())
self.assertEqual(self.rapi.CountPending(), 0)
def testModifyNode(self):
self.rapi.AddResponse("3783")
job_id = self.client.ModifyNode("node16979.example.com", drained=True)
self.assertEqual(job_id, 3783)
self.assertHandler(rlib2.R_2_nodes_name_modify)
self.assertItems(["node16979.example.com"])
self.assertEqual(self.rapi.CountPending(), 0)
def testGetNodeStorageUnits(self):
self.rapi.AddResponse("42")
self.assertEqual(42,
self.client.GetNodeStorageUnits("node-x", "lvm-pv", "fields"))
self.assertHandler(rlib2.R_2_nodes_name_storage)
self.assertItems(["node-x"])
self.assertQuery("storage_type", ["lvm-pv"])
self.assertQuery("output_fields", ["fields"])
def testModifyNodeStorageUnits(self):
self.rapi.AddResponse("14")
self.assertEqual(14,
self.client.ModifyNodeStorageUnits("node-z", "lvm-pv", "hda"))
self.assertHandler(rlib2.R_2_nodes_name_storage_modify)
self.assertItems(["node-z"])
self.assertQuery("storage_type", ["lvm-pv"])
self.assertQuery("name", ["hda"])
self.assertQuery("allocatable", None)
for allocatable, query_allocatable in [(True, "1"), (False, "0")]:
self.rapi.AddResponse("7205")
job_id = self.client.ModifyNodeStorageUnits("node-z", "lvm-pv", "hda",
allocatable=allocatable)
self.assertEqual(7205, job_id)
self.assertHandler(rlib2.R_2_nodes_name_storage_modify)
self.assertItems(["node-z"])
self.assertQuery("storage_type", ["lvm-pv"])
self.assertQuery("name", ["hda"])
self.assertQuery("allocatable", [query_allocatable])
def testRepairNodeStorageUnits(self):
self.rapi.AddResponse("99")
self.assertEqual(99, self.client.RepairNodeStorageUnits("node-z", "lvm-pv",
"hda"))
self.assertHandler(rlib2.R_2_nodes_name_storage_repair)
self.assertItems(["node-z"])
self.assertQuery("storage_type", ["lvm-pv"])
self.assertQuery("name", ["hda"])
def testGetNodeTags(self):
self.rapi.AddResponse("[\"fry\", \"bender\"]")
self.assertEqual(["fry", "bender"], self.client.GetNodeTags("node-k"))
self.assertHandler(rlib2.R_2_nodes_name_tags)
self.assertItems(["node-k"])
def testAddNodeTags(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234,
self.client.AddNodeTags("node-v", ["awesome"], dry_run=True))
self.assertHandler(rlib2.R_2_nodes_name_tags)
self.assertItems(["node-v"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testDeleteNodeTags(self):
self.rapi.AddResponse("16861")
self.assertEqual(16861, self.client.DeleteNodeTags("node-w", ["awesome"],
dry_run=True))
self.assertHandler(rlib2.R_2_nodes_name_tags)
self.assertItems(["node-w"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testGetGroups(self):
groups = [{"name": "group1",
"uri": "/2/groups/group1",
},
{"name": "group2",
"uri": "/2/groups/group2",
},
]
self.rapi.AddResponse(serializer.DumpJson(groups))
self.assertEqual(["group1", "group2"], self.client.GetGroups())
self.assertHandler(rlib2.R_2_groups)
def testGetGroupsBulk(self):
groups = [{"name": "group1",
"uri": "/2/groups/group1",
"node_cnt": 2,
"node_list": ["gnt1.test",
"gnt2.test",
],
},
{"name": "group2",
"uri": "/2/groups/group2",
"node_cnt": 1,
"node_list": ["gnt3.test",
],
},
]
self.rapi.AddResponse(serializer.DumpJson(groups))
self.assertEqual(groups, self.client.GetGroups(bulk=True))
self.assertHandler(rlib2.R_2_groups)
self.assertBulk()
def testGetGroup(self):
group = {"ctime": None,
"name": "default",
}
self.rapi.AddResponse(serializer.DumpJson(group))
self.assertEqual({"ctime": None, "name": "default"},
self.client.GetGroup("default"))
self.assertHandler(rlib2.R_2_groups_name)
self.assertItems(["default"])
def testCreateGroup(self):
self.rapi.AddResponse("12345")
job_id = self.client.CreateGroup("newgroup", dry_run=True)
self.assertEqual(job_id, 12345)
self.assertHandler(rlib2.R_2_groups)
self.assertDryRun()
def testDeleteGroup(self):
self.rapi.AddResponse("12346")
job_id = self.client.DeleteGroup("newgroup", dry_run=True)
self.assertEqual(job_id, 12346)
self.assertHandler(rlib2.R_2_groups_name)
self.assertDryRun()
def testRenameGroup(self):
self.rapi.AddResponse("12347")
job_id = self.client.RenameGroup("oldname", "newname")
self.assertEqual(job_id, 12347)
self.assertHandler(rlib2.R_2_groups_name_rename)
def testModifyGroup(self):
self.rapi.AddResponse("12348")
job_id = self.client.ModifyGroup("mygroup", alloc_policy="foo")
self.assertEqual(job_id, 12348)
self.assertHandler(rlib2.R_2_groups_name_modify)
def testAssignGroupNodes(self):
self.rapi.AddResponse("12349")
job_id = self.client.AssignGroupNodes("mygroup", ["node1", "node2"],
force=True, dry_run=True)
self.assertEqual(job_id, 12349)
self.assertHandler(rlib2.R_2_groups_name_assign_nodes)
self.assertDryRun()
self.assertUseForce()
def testGetNetworksBulk(self):
networks = [{"name": "network1",
"uri": "/2/networks/network1",
"network": "192.168.0.0/24",
},
{"name": "network2",
"uri": "/2/networks/network2",
"network": "192.168.0.0/24",
},
]
self.rapi.AddResponse(serializer.DumpJson(networks))
self.assertEqual(networks, self.client.GetNetworks(bulk=True))
self.assertHandler(rlib2.R_2_networks)
self.assertBulk()
def testGetNetwork(self):
network = {"ctime": None,
"name": "network1",
}
self.rapi.AddResponse(serializer.DumpJson(network))
self.assertEqual({"ctime": None, "name": "network1"},
self.client.GetNetwork("network1"))
self.assertHandler(rlib2.R_2_networks_name)
self.assertItems(["network1"])
def testCreateNetwork(self):
self.rapi.AddResponse("12345")
job_id = self.client.CreateNetwork("newnetwork", network="192.168.0.0/24",
dry_run=True)
self.assertEqual(job_id, 12345)
self.assertHandler(rlib2.R_2_networks)
self.assertDryRun()
def testModifyNetwork(self):
self.rapi.AddResponse("12346")
job_id = self.client.ModifyNetwork("mynetwork", gateway="192.168.0.10",
dry_run=True)
self.assertEqual(job_id, 12346)
self.assertHandler(rlib2.R_2_networks_name_modify)
def testDeleteNetwork(self):
self.rapi.AddResponse("12347")
job_id = self.client.DeleteNetwork("newnetwork", dry_run=True)
self.assertEqual(job_id, 12347)
self.assertHandler(rlib2.R_2_networks_name)
self.assertDryRun()
def testConnectNetwork(self):
self.rapi.AddResponse("12348")
job_id = self.client.ConnectNetwork("mynetwork", "default",
"bridged", "br0", dry_run=True)
self.assertEqual(job_id, 12348)
self.assertHandler(rlib2.R_2_networks_name_connect)
self.assertDryRun()
def testDisconnectNetwork(self):
self.rapi.AddResponse("12349")
job_id = self.client.DisconnectNetwork("mynetwork", "default", dry_run=True)
self.assertEqual(job_id, 12349)
self.assertHandler(rlib2.R_2_networks_name_disconnect)
self.assertDryRun()
def testGetNetworkTags(self):
self.rapi.AddResponse("[]")
self.assertEqual([], self.client.GetNetworkTags("fooNetwork"))
self.assertHandler(rlib2.R_2_networks_name_tags)
self.assertItems(["fooNetwork"])
def testAddNetworkTags(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234,
self.client.AddNetworkTags("fooNetwork", ["awesome"], dry_run=True))
self.assertHandler(rlib2.R_2_networks_name_tags)
self.assertItems(["fooNetwork"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testDeleteNetworkTags(self):
self.rapi.AddResponse("25826")
self.assertEqual(25826, self.client.DeleteNetworkTags("foo", ["awesome"],
dry_run=True))
self.assertHandler(rlib2.R_2_networks_name_tags)
self.assertItems(["foo"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testModifyInstance(self):
self.rapi.AddResponse("23681")
job_id = self.client.ModifyInstance("inst7210", os_name="linux")
self.assertEqual(job_id, 23681)
self.assertItems(["inst7210"])
self.assertHandler(rlib2.R_2_instances_name_modify)
self.assertEqual(serializer.LoadJson(self.rapi.GetLastRequestData()),
{ "os_name": "linux", })
def testModifyCluster(self):
for mnh in [None, False, True]:
self.rapi.AddResponse("14470")
self.assertEqual(14470,
self.client.ModifyCluster(maintain_node_health=mnh))
self.assertHandler(rlib2.R_2_cluster_modify)
self.assertItems([])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(len(data), 1)
self.assertEqual(data["maintain_node_health"], mnh)
self.assertEqual(self.rapi.CountPending(), 0)
def testRedistributeConfig(self):
self.rapi.AddResponse("3364")
job_id = self.client.RedistributeConfig()
self.assertEqual(job_id, 3364)
self.assertItems([])
self.assertHandler(rlib2.R_2_redist_config)
def testActivateInstanceDisks(self):
self.rapi.AddResponse("23547")
job_id = self.client.ActivateInstanceDisks("inst28204")
self.assertEqual(job_id, 23547)
self.assertItems(["inst28204"])
self.assertHandler(rlib2.R_2_instances_name_activate_disks)
self.assertFalse(self.rapi.GetLastHandler().queryargs)
def testActivateInstanceDisksIgnoreSize(self):
self.rapi.AddResponse("11044")
job_id = self.client.ActivateInstanceDisks("inst28204", ignore_size=True)
self.assertEqual(job_id, 11044)
self.assertItems(["inst28204"])
self.assertHandler(rlib2.R_2_instances_name_activate_disks)
self.assertQuery("ignore_size", ["1"])
def testDeactivateInstanceDisks(self):
self.rapi.AddResponse("14591")
job_id = self.client.DeactivateInstanceDisks("inst28234")
self.assertEqual(job_id, 14591)
self.assertItems(["inst28234"])
self.assertHandler(rlib2.R_2_instances_name_deactivate_disks)
self.assertFalse(self.rapi.GetLastHandler().queryargs)
def testRecreateInstanceDisks(self):
self.rapi.AddResponse("13553")
job_id = self.client.RecreateInstanceDisks("inst23153")
self.assertEqual(job_id, 13553)
self.assertItems(["inst23153"])
self.assertHandler(rlib2.R_2_instances_name_recreate_disks)
self.assertFalse(self.rapi.GetLastHandler().queryargs)
def testGetInstanceConsole(self):
self.rapi.AddResponse("26876")
job_id = self.client.GetInstanceConsole("inst21491")
self.assertEqual(job_id, 26876)
self.assertItems(["inst21491"])
self.assertHandler(rlib2.R_2_instances_name_console)
self.assertFalse(self.rapi.GetLastHandler().queryargs)
self.assertFalse(self.rapi.GetLastRequestData())
def testGrowInstanceDisk(self):
for idx, wait_for_sync in enumerate([None, False, True]):
amount = 128 + (512 * idx)
self.assertEqual(self.rapi.CountPending(), 0)
self.rapi.AddResponse("30783")
self.assertEqual(30783,
self.client.GrowInstanceDisk("eze8ch", idx, amount,
wait_for_sync=wait_for_sync))
self.assertHandler(rlib2.R_2_instances_name_disk_grow)
self.assertItems(["eze8ch", str(idx)])
data = serializer.LoadJson(self.rapi.GetLastRequestData())
if wait_for_sync is None:
self.assertEqual(len(data), 1)
self.assert_("wait_for_sync" not in data)
else:
self.assertEqual(len(data), 2)
self.assertEqual(data["wait_for_sync"], wait_for_sync)
self.assertEqual(data["amount"], amount)
self.assertEqual(self.rapi.CountPending(), 0)
def testGetGroupTags(self):
self.rapi.AddResponse("[]")
self.assertEqual([], self.client.GetGroupTags("fooGroup"))
self.assertHandler(rlib2.R_2_groups_name_tags)
self.assertItems(["fooGroup"])
def testAddGroupTags(self):
self.rapi.AddResponse("1234")
self.assertEqual(1234,
self.client.AddGroupTags("fooGroup", ["awesome"], dry_run=True))
self.assertHandler(rlib2.R_2_groups_name_tags)
self.assertItems(["fooGroup"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testDeleteGroupTags(self):
self.rapi.AddResponse("25826")
self.assertEqual(25826, self.client.DeleteGroupTags("foo", ["awesome"],
dry_run=True))
self.assertHandler(rlib2.R_2_groups_name_tags)
self.assertItems(["foo"])
self.assertDryRun()
self.assertQuery("tag", ["awesome"])
def testQuery(self):
for idx, what in enumerate(constants.QR_VIA_RAPI):
for idx2, qfilter in enumerate([None, ["?", "name"]]):
job_id = 11010 + (idx << 4) + (idx2 << 16)
fields = sorted(query.ALL_FIELDS[what].keys())[:10]
self.rapi.AddResponse(str(job_id))
self.assertEqual(self.client.Query(what, fields, qfilter=qfilter),
job_id)
self.assertItems([what])
self.assertHandler(rlib2.R_2_query)
self.assertFalse(self.rapi.GetLastHandler().queryargs)
data = serializer.LoadJson(self.rapi.GetLastRequestData())
self.assertEqual(data["fields"], fields)
if qfilter is None:
self.assertTrue("qfilter" not in data)
else:
self.assertEqual(data["qfilter"], qfilter)
self.assertEqual(self.rapi.CountPending(), 0)
def testQueryFields(self):
exp_result = objects.QueryFieldsResponse(fields=[
objects.QueryFieldDefinition(name="pnode", title="PNode",
kind=constants.QFT_NUMBER),
objects.QueryFieldDefinition(name="other", title="Other",
kind=constants.QFT_BOOL),
])
for what in constants.QR_VIA_RAPI:
for fields in [None, ["name", "_unknown_"], ["&", "?|"]]:
self.rapi.AddResponse(serializer.DumpJson(exp_result.ToDict()))
result = self.client.QueryFields(what, fields=fields)
self.assertItems([what])
self.assertHandler(rlib2.R_2_query_fields)
self.assertFalse(self.rapi.GetLastRequestData())
queryargs = self.rapi.GetLastHandler().queryargs
if fields is None:
self.assertFalse(queryargs)
else:
self.assertEqual(queryargs, {
"fields": [",".join(fields)],
})
self.assertEqual(objects.QueryFieldsResponse.FromDict(result).ToDict(),
exp_result.ToDict())
self.assertEqual(self.rapi.CountPending(), 0)
def testWaitForJobCompletionNoChange(self):
resp = serializer.DumpJson({
"status": constants.JOB_STATUS_WAITING,
})
for retries in [1, 5, 25]:
for _ in range(retries):
self.rapi.AddResponse(resp)
self.assertFalse(self.client.WaitForJobCompletion(22789, period=None,
retries=retries))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["22789"])
self.assertEqual(self.rapi.CountPending(), 0)
def testWaitForJobCompletionAlreadyFinished(self):
self.rapi.AddResponse(serializer.DumpJson({
"status": constants.JOB_STATUS_SUCCESS,
}))
self.assertTrue(self.client.WaitForJobCompletion(22793, period=None,
retries=1))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["22793"])
self.assertEqual(self.rapi.CountPending(), 0)
def testWaitForJobCompletionEmptyResponse(self):
self.rapi.AddResponse("{}")
self.assertFalse(self.client.WaitForJobCompletion(22793, period=None,
retries=10))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["22793"])
self.assertEqual(self.rapi.CountPending(), 0)
def testWaitForJobCompletionOutOfRetries(self):
for retries in [3, 10, 21]:
for _ in range(retries):
self.rapi.AddResponse(serializer.DumpJson({
"status": constants.JOB_STATUS_RUNNING,
}))
self.assertFalse(self.client.WaitForJobCompletion(30948, period=None,
retries=retries - 1))
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["30948"])
self.assertEqual(self.rapi.CountPending(), 1)
self.rapi.ResetResponses()
def testWaitForJobCompletionSuccessAndFailure(self):
for retries in [1, 4, 13]:
for (success, end_status) in [(False, constants.JOB_STATUS_ERROR),
(True, constants.JOB_STATUS_SUCCESS)]:
for _ in range(retries):
self.rapi.AddResponse(serializer.DumpJson({
"status": constants.JOB_STATUS_RUNNING,
}))
self.rapi.AddResponse(serializer.DumpJson({
"status": end_status,
}))
result = self.client.WaitForJobCompletion(3187, period=None,
retries=retries + 1)
self.assertEqual(result, success)
self.assertHandler(rlib2.R_2_jobs_id)
self.assertItems(["3187"])
self.assertEqual(self.rapi.CountPending(), 0)
class RapiTestRunner(unittest.TextTestRunner):
def run(self, *args):
global _used_handlers
assert _used_handlers is None
_used_handlers = set()
try:
# Run actual tests
result = unittest.TextTestRunner.run(self, *args)
diff = (set(connector.CONNECTOR.values()) - _used_handlers -
_KNOWN_UNUSED)
if diff:
raise AssertionError("The following RAPI resources were not used by the"
" RAPI client: %r" % utils.CommaJoin(diff))
finally:
# Reset global variable
_used_handlers = None
return result
if __name__ == "__main__":
client.UsesRapiClient(testutils.GanetiTestProgram)(testRunner=RapiTestRunner)
| badp/ganeti | test/py/ganeti.rapi.client_unittest.py | Python | gpl-2.0 | 59,059 |
# -*- coding: utf-8 -*-
###############################################################################
#
# FinalizeOAuth
# Completes the OAuth process by retrieving a Foursquare access token for a user, after they have visited the authorization URL returned by the InitializeOAuth choreo and clicked "allow."
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class FinalizeOAuth(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the FinalizeOAuth Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(FinalizeOAuth, self).__init__(temboo_session, '/Library/Foursquare/OAuth/FinalizeOAuth')
def new_input_set(self):
return FinalizeOAuthInputSet()
def _make_result_set(self, result, path):
return FinalizeOAuthResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return FinalizeOAuthChoreographyExecution(session, exec_id, path)
class FinalizeOAuthInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the FinalizeOAuth
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccountName(self, value):
"""
Set the value of the AccountName input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(FinalizeOAuthInputSet, self)._set_input('AccountName', value)
def set_AppKeyName(self, value):
"""
Set the value of the AppKeyName input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(FinalizeOAuthInputSet, self)._set_input('AppKeyName', value)
def set_AppKeyValue(self, value):
"""
Set the value of the AppKeyValue input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(FinalizeOAuthInputSet, self)._set_input('AppKeyValue', value)
def set_CallbackID(self, value):
"""
Set the value of the CallbackID input for this Choreo. ((required, string) The callback token returned by the InitializeOAuth Choreo. Used to retrieve the authorization code after the user authorizes.)
"""
super(FinalizeOAuthInputSet, self)._set_input('CallbackID', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((required, string) The Client ID provided by Foursquare after registering your application.)
"""
super(FinalizeOAuthInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((required, string) The Client Secret provided by Foursquare after registering your application.)
"""
super(FinalizeOAuthInputSet, self)._set_input('ClientSecret', value)
def set_Timeout(self, value):
"""
Set the value of the Timeout input for this Choreo. ((optional, integer) The amount of time (in seconds) to poll your Temboo callback URL to see if your app's user has allowed or denied the request for access. Defaults to 20. Max is 60.)
"""
super(FinalizeOAuthInputSet, self)._set_input('Timeout', value)
class FinalizeOAuthResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the FinalizeOAuth Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_AccessToken(self):
"""
Retrieve the value for the "AccessToken" output from this Choreo execution. ((string) The access token for the user that has granted access to your application.)
"""
return self._output.get('AccessToken', None)
class FinalizeOAuthChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return FinalizeOAuthResultSet(response, path)
| willprice/arduino-sphere-project | scripts/example_direction_finder/temboo/Library/Foursquare/OAuth/FinalizeOAuth.py | Python | gpl-2.0 | 5,006 |
#
# Copyright (C) 2014 Uninett AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Forms for PortAdmin"""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms_foundation.layout import Layout, Row, Column, Submit
class SearchForm(forms.Form):
"""Form for searching for ip-devices and interfaces"""
query = forms.CharField(
label='',
widget=forms.TextInput(
attrs={'placeholder': 'Search for ip device or interface'}))
def __init__(self, *args, **kwargs):
super(SearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = 'portadmin-index'
self.helper.form_method = 'GET'
self.helper.layout = Layout(
Row(
Column('query', css_class='medium-9'),
Column(Submit('submit', 'Search', css_class='postfix'),
css_class='medium-3'),
css_class='collapse'
)
)
| UNINETT/nav | python/nav/web/portadmin/forms.py | Python | gpl-2.0 | 1,575 |
# -*- coding: utf-8 -*-
from django.conf.urls import url
from . import views
urlpatterns = [
url(r"^/android/setup$", views.android_setup_view, name="notif_android_setup"),
url(r"^/chrome/setup$", views.chrome_setup_view, name="notif_chrome_setup"),
url(r"^/chrome/getdata$", views.chrome_getdata_view, name="notif_chrome_getdata"),
url(r"^/gcm/post$", views.gcm_post_view, name="notif_gcm_post"), url(r"^/gcm/list$", views.gcm_list_view, name="notif_gcm_list")
]
| jacobajit/ion | intranet/apps/notifications/urls.py | Python | gpl-2.0 | 483 |
# create a Session object by sessionmaker
import os
import ConfigParser
import sqlalchemy.orm
# get path to taskmanager. it is assumed that this script is in the lib directory of
# the taskmanager package.
tmpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/..' ) )
etcpath = '%s/etc' % tmpath # for configuration files
# library is in the same folder
from hDatabase import Base
class hDBSessionMaker( object ):
def __init__( self, configFileName=None, createTables=False, echo=False ):
if not configFileName:
# use default config file
etcpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/../etc' ) )
# default config file for database connection
configFileName = "{etcPath}/serversettings.cfg".format(etcPath=etcpath)
# read config file
if os.path.exists( configFileName ):
config = ConfigParser.ConfigParser()
config.read( configFileName )
else:
sys.stderr.write( "ERROR: Could not find Config file {c}!".format( c=configFileName) )
sys.exit( -1 )
databaseDialect = config.get( 'DATABASE', 'database_dialect' )
databaseHost = config.get( 'DATABASE', 'database_host' )
databasePort = config.get( 'DATABASE', 'database_port' )
databaseName = config.get( 'DATABASE', 'database_name' )
databaseUsername = config.get( 'DATABASE', 'database_username' )
databasePassword = config.get( 'DATABASE', 'database_password' )
## @var engine
#The engine that is connected to the database
#use "echo=True" for SQL printing statements to stdout
self.engine = sqlalchemy.create_engine( "{dialect}://{user}:{password}@{host}:{port}/{name}".format( dialect=databaseDialect,
user=databaseUsername,
password=databasePassword,
host=databaseHost,
port=databasePort,
name=databaseName),
pool_size=50, # number of connections to keep open inside the connection pool
max_overflow=100, # number of connections to allow in connection pool "overflow", that is connections that can be opened above and beyond the pool_size setting, which defaults to five.
pool_recycle=3600, # this setting causes the pool to recycle connections after the given number of seconds has passed.
echo=False )
# Create all tables in the engine. This is equivalent to "Create Table"
# statements in raw SQL.
Base.metadata.create_all( self.engine )
## @var DBsession
# define a Session class which will serve as a factory for new Session objects
#
# http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html:
# Session is a regular Python class which can be directly instantiated. However, to standardize how sessions are
# configured and acquired, the sessionmaker class is normally used to create a top level Session configuration
# which can then be used throughout an application without the need to repeat the configurational arguments.
# sessionmaker() is a Session factory. A factory is just something that produces a new object when called.
#
# Thread local factory for sessions. See http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html#contextual-thread-local-sessions
#
SessionFactory = sqlalchemy.orm.sessionmaker( bind = self.engine )
self.DBSession = sqlalchemy.orm.scoped_session( SessionFactory )
| call-me-jimi/taskmanager | taskmanager/lib/hDBSessionMaker.py | Python | gpl-2.0 | 4,630 |
# -*- coding: utf-8 -*-
""" Validators for wx widgets.
Copyright (c) Karol Będkowski, 2006-2013
This file is part of wxGTD
This is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation, version 2.
"""
__author__ = "Karol Będkowski"
__copyright__ = "Copyright (c) Karol Będkowski, 2006-2013"
__version__ = '2013-04-21'
__all__ = ['ValidatorDv', 'Validator', 'ValidatorDate', 'ValidatorTime',
'ValidatorColorStr']
from .validator import Validator, ValidatorDv, ValidatorDate, ValidatorTime, \
ValidatorColorStr
| KarolBedkowski/wxgtd | wxgtd/wxtools/validators/__init__.py | Python | gpl-2.0 | 638 |
from ._Recognized_objects import *
| grandcat/robotics_g7 | object_recognition/src/object_recognition/msg/__init__.py | Python | gpl-2.0 | 35 |
#!/usr/bin/env python
# File created February 29, 2012
from __future__ import division
__author__ = "William Walters"
__copyright__ = "Copyright 2011, The QIIME Project"
__credits__ = ["William Walters", "Emily TerAvest"]
__license__ = "GPL"
__version__ = "1.8.0-dev"
__maintainer__ = "William Walters"
__email__ = "William.A.Walters@colorado.edu"
from os.path import join, basename
from skbio.parse.sequences import parse_fasta
from skbio.sequence import DNA
from qiime.split_libraries import local_align_primer_seq
from qiime.check_id_map import process_id_map
def get_rev_primer_seqs(mapping_fp):
""" Parses mapping file to get dictionary of SampleID:Rev primer
mapping_fp: mapping filepath
"""
hds, mapping_data, run_description, errors, warnings = \
process_id_map(mapping_fp, has_barcodes=False,
disable_primer_check=True)
if errors:
for curr_err in errors:
if curr_err.startswith("Duplicate SampleID"):
raise ValueError('Errors were found with mapping file, ' +
'please run validate_mapping_file.py to ' +
'identify problems.')
# create dict of dicts with SampleID:{each header:mapping data}
id_map = {}
for curr_data in mapping_data:
id_map[curr_data[0]] = {}
for header in range(len(hds)):
for curr_data in mapping_data:
id_map[curr_data[0]][hds[header]] = curr_data[header]
reverse_primers = {}
for curr_id in id_map.keys():
try:
reverse_primers[curr_id] =\
[str(DNA(curr_rev_primer).rc()) for curr_rev_primer in
id_map[curr_id]['ReversePrimer'].split(',')]
except KeyError:
raise KeyError("Reverse primer not found in mapping file, " +
"please include a 'ReversePrimer' column.")
# Check for valid reverse primers
# Will have been detected as warnings from mapping file
for curr_err in errors:
if curr_err.startswith("Invalid DNA sequence detected"):
raise ValueError("Problems found with reverse primers, please " +
"check mapping file with validate_mapping_file.py")
return reverse_primers
def get_output_filepaths(output_dir,
fasta_fp):
""" Returns output fasta filepath and log filepath
fasta_fp: fasta filepath
output_dir: output directory
"""
fasta_extensions = ['.fa', '.fasta', '.fna']
curr_fasta_out = basename(fasta_fp)
for fasta_extension in fasta_extensions:
curr_fasta_out = curr_fasta_out.replace(fasta_extension, '')
curr_fasta_out += "_rev_primer_truncated.fna"
output_fp = join(output_dir, curr_fasta_out)
log_fp = join(output_dir, "rev_primer_truncation.log")
return output_fp, log_fp
def truncate_rev_primers(fasta_f,
output_fp,
reverse_primers,
truncate_option='truncate_only',
primer_mismatches=2):
""" Locally aligns reverse primers, trucates or removes seqs
fasta_f: open file of fasta file
output_fp: open filepath to write truncated fasta to
reverse_primers: dictionary of SampleID:reverse primer sequence
truncate_option: either truncate_only, truncate_remove
primer_mismatches: number of allowed primer mismatches
"""
log_data = {
'sample_id_not_found': 0,
'reverse_primer_not_found': 0,
'total_seqs': 0,
'seqs_written': 0
}
for label, seq in parse_fasta(fasta_f):
curr_label = label.split('_')[0]
log_data['total_seqs'] += 1
# Check fasta label for valid SampleID, if not found, just write seq
try:
curr_rev_primer = reverse_primers[curr_label]
except KeyError:
log_data['sample_id_not_found'] += 1
output_fp.write('>%s\n%s\n' % (label, seq))
log_data['seqs_written'] += 1
continue
mm_tests = {}
for rev_primer in curr_rev_primer:
rev_primer_mm, rev_primer_index =\
local_align_primer_seq(rev_primer, seq)
mm_tests[rev_primer_mm] = rev_primer_index
rev_primer_mm = min(mm_tests.keys())
rev_primer_index = mm_tests[rev_primer_mm]
if rev_primer_mm > primer_mismatches:
if truncate_option == "truncate_remove":
log_data['reverse_primer_not_found'] += 1
else:
log_data['reverse_primer_not_found'] += 1
log_data['seqs_written'] += 1
output_fp.write('>%s\n%s\n' % (label, seq))
else:
# Check for zero seq length after truncation, will not write seq
if rev_primer_index > 0:
log_data['seqs_written'] += 1
output_fp.write('>%s\n%s\n' % (label, seq[0:rev_primer_index]))
return log_data
def write_log_file(log_data,
log_f):
""" Writes log file
log_data: dictionary of details about reverse primer removal
log_f: open filepath to write log details
"""
log_f.write("Details for removal of reverse primers\n")
log_f.write("Original fasta filepath: %s\n" % log_data['fasta_fp'])
log_f.write("Total seqs in fasta: %d\n" % log_data['total_seqs'])
log_f.write("Mapping filepath: %s\n" % log_data['mapping_fp'])
log_f.write("Truncation option: %s\n" % log_data['truncate_option'])
log_f.write("Mismatches allowed: %d\n" % log_data['primer_mismatches'])
log_f.write("Total seqs written: %d\n" % log_data['seqs_written'])
log_f.write("SampleIDs not found: %d\n" % log_data['sample_id_not_found'])
log_f.write("Reverse primers not found: %d\n" %
log_data['reverse_primer_not_found'])
def truncate_reverse_primer(fasta_fp,
mapping_fp,
output_dir=".",
truncate_option='truncate_only',
primer_mismatches=2):
""" Main program function for finding, removing reverse primer seqs
fasta_fp: fasta filepath
mapping_fp: mapping filepath
output_dir: output directory
truncate_option: truncation option, either truncate_only, truncate_remove
primer_mismatches: Number is mismatches allowed in reverse primer"""
reverse_primers = get_rev_primer_seqs(open(mapping_fp, "U"))
output_fp, log_fp = get_output_filepaths(output_dir, fasta_fp)
log_data = truncate_rev_primers(open(fasta_fp, "U"),
open(
output_fp, "w"), reverse_primers, truncate_option,
primer_mismatches)
log_data['fasta_fp'] = fasta_fp
log_data['mapping_fp'] = mapping_fp
log_data['truncate_option'] = truncate_option
log_data['primer_mismatches'] = primer_mismatches
write_log_file(log_data, open(log_fp, "w"))
| wasade/qiime | qiime/truncate_reverse_primer.py | Python | gpl-2.0 | 7,049 |
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- http://www.mdanalysis.org
# Copyright (c) 2006-2016 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
from __future__ import absolute_import
from numpy.testing import (
dec,
assert_,
assert_equal,
)
from unittest import skip
import MDAnalysis as mda
from MDAnalysisTests.datafiles import PSF, DCD
from MDAnalysisTests import parser_not_found
class TestSegmentGroup(object):
# Legacy tests from before 363
@dec.skipif(parser_not_found('DCD'),
'DCD parser not available. Are you using python 3?')
def setUp(self):
"""Set up the standard AdK system in implicit solvent."""
self.universe = mda.Universe(PSF, DCD)
self.g = self.universe.atoms.segments
def test_newSegmentGroup(self):
"""test that slicing a SegmentGroup returns a new SegmentGroup (Issue 135)"""
g = self.universe.atoms.segments
newg = g[:]
assert_(isinstance(newg, mda.core.groups.SegmentGroup))
assert_equal(len(newg), len(g))
def test_n_atoms(self):
assert_equal(self.g.n_atoms, 3341)
def test_n_residues(self):
assert_equal(self.g.n_residues, 214)
def test_resids_dim(self):
assert_equal(len(self.g.resids), len(self.g))
for seg, resids in zip(self.g, self.g.resids):
assert_(len(resids) == len(seg.residues))
assert_equal(seg.residues.resids, resids)
def test_resnums_dim(self):
assert_equal(len(self.g.resnums), len(self.g))
for seg, resnums in zip(self.g, self.g.resnums):
assert_(len(resnums) == len(seg.residues))
assert_equal(seg.residues.resnums, resnums)
def test_segids_dim(self):
assert_equal(len(self.g.segids), len(self.g))
def test_set_segids(self):
s = self.universe.select_atoms('all').segments
s.segids = 'ADK'
assert_equal(self.universe.segments.segids, ['ADK'],
err_msg="failed to set_segid on segments")
def test_set_segid_updates_self(self):
g = self.universe.select_atoms("resid 10:18").segments
g.segids = 'ADK'
assert_equal(g.segids, ['ADK'],
err_msg="old selection was not changed in place after set_segid")
def test_atom_order(self):
assert_equal(self.universe.segments.atoms.indices,
sorted(self.universe.segments.atoms.indices))
| kain88-de/mdanalysis | testsuite/MDAnalysisTests/core/test_segmentgroup.py | Python | gpl-2.0 | 3,335 |
import urllib2, urllib, sys, os, re, random, copy
import htmlcleaner
import httplib2
from BeautifulSoup import BeautifulSoup, Tag, NavigableString
import xbmc,xbmcplugin,xbmcgui,xbmcaddon
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
from scrapers import CommonScraper
net = Net()
class VidicsServiceSracper(CommonScraper):
def __init__(self, settingsid, DB=None, REG=None):
if DB:
self.DB=DB
if REG:
self.REG=REG
self.addon_id = 'script.module.donnie'
self.service='vidics'
self.name = 'vidics.ch'
self.raiseError = False
self.referrer = 'http://www.vidics.ch/'
self.base_url = 'http://www.vidics.ch/'
self.user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3'
self.provides = []
self._streams = []
self._episodes = []
self.AZ = ['1', 'A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y', 'Z']
self.settingsid = settingsid
self._loadsettings()
def _getShows(self, silent=False):
uri = '/Category-TvShows/Genre-Any/Letter-Any/LatestFirst/1.htm'
self.log("Getting All shows for %s", self.service)
pDialog = xbmcgui.DialogProgress()
if not silent:
pDialog.create('Downloading TV Shows from ' + self.service)
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
soup = BeautifulSoup(pagedata)
table = soup.find('table', {'class' : 'pagination'});
pagelinks = table.findAll('a');
pages = pagelinks[len(pagelinks)-1]
pages = re.search('\d+',pages['href']).group(0)
row = self.DB.query("SELECT current, full FROM rw_update_status WHERE identifier='tvshows' AND provider=?", [self.service])
if len(row) > 0:
offset = int(pages) - int(row[1])
current = int(row[0]) + offset - 1
else:
current = pages
for page in reversed(range(1,int(current)+1)):
percent = int((100 * (int(pages) - page))/int(pages))
if not self._getShowsByPg(str(page), pages, pDialog, percent, silent):
break
if not silent:
if (pDialog.iscanceled()):
print 'Canceled download'
return
if not silent:
pDialog.close()
self.update_cache_status("tvshows")
self.log('Dowload complete!', level=0)
def _getShowsByPg(self, page, pages, pDialog, percent, silent):
self.log("getting TV Shows by %s", page)
uri = "/Category-TvShows/Genre-Any/Letter-Any/LatestFirst/%s.htm" % page
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
soup = BeautifulSoup(pagedata)
shows = soup.findAll('a', {'itemprop' : 'url', 'class': 'blue'})
for show in shows:
genres = []
try:
name = show.find('span', {'itemprop' : 'name'}).string
year = show.find('span', {'itemprop' : 'copyrightYear'}).string
href = show['href']
name = "%s (%s)" % (name, year)
if not silent:
pDialog.update(percent, self.service + ' page: ' + str(page), name)
character = self.getInitialChr(name)
self.addShowToDB(name, href, character, year, genres)
except Exception, e:
self.log("********Donnie Error: %s, %s" % (self.service, e))
if page == 1:
self.DB.execute("DELETE FROM rw_update_status WHERE provider=? and identifier=?", [self.service, 'tvshows'])
else:
self.DB.execute("REPLACE INTO rw_update_status(provider, identifier, current, full) VALUES(?, ?, ?, ?)", [self.service, 'tvshows', page, pages])
self.DB.commit()
return True
'''def _getNewEpisodes(self, silent=False):
self.log("Getting new episodes for %s", self.service)
episodes = []
pagedata = self.getURL('latest_episodes.xml', append_base_url=True)
if pagedata=='':
return False
soup = BeautifulSoup(pagedata)
links = soup.findAll('item')
for link in links:
title = re.sub(r' Season: (\d+?), Episode: (\d+?) -', r'\1x\2', link.find('title').string)
episode = [self.service, title, '']
episodes.append(episode)
return episodes'''
def _getEpisodes(self, showid, show, url, pDialog, percent, silent, createFiles=True):
self.log("Getting episodes for %s", show)
pagedata = self.getURL(url, append_base_url=True)
if pagedata=='':
return False
soup = BeautifulSoup(pagedata)
links = soup.findAll('a', {'class' : 'episode'})
p1 = re.compile('style="color: gray;"')
p2 = re.compile('-Season-(.+?)-Episode-(.+?)$')
p3 = re.compile(' - (.+?) \(')
for link in links:
try:
if not p1.search(str(link)):
href = link['href']
temp = p2.search(href)
season = temp.group(1)
episode = temp.group(2).zfill(2)
try:
name = link.find('span')
name = p3.search(name.string).group(1)
except:
name = "Episode %s" % episode
if not silent:
display = "%sx%s %s" % (season, episode, name)
pDialog.update(percent, show, display)
self.addEpisodeToDB(showid, show, name, season, episode, href, createFiles=createFiles)
except Exception, e:
self.log("********Donnie Error: %s, %s" % (self.service, e))
self.DB.commit()
return True
def _getMovies(self, silent=False):
uri = '/Category-Movies/Genre-Any/Letter-Any/LatestFirst/1.htm'
self.log("Getting All movies for %s", self.service)
pDialog = xbmcgui.DialogProgress()
if not silent:
pDialog.create('Downloading Movies from ' + self.service)
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
soup = BeautifulSoup(pagedata)
table = soup.find('table', {'class' : 'pagination'});
pagelinks = table.findAll('a');
pages = pagelinks[len(pagelinks)-1]
pages = re.search('\d+',pages['href']).group(0)
row = self.DB.query("SELECT current, full FROM rw_update_status WHERE identifier='movies' AND provider=?", [self.service])
if len(row) > 0:
offset = int(pages) - int(row[1])
current = int(row[0]) + offset - 1
else:
current = pages
for page in reversed(range(1,int(current)+1)):
percent = int((100 * (int(pages) - page))/int(pages))
if not self._getMoviesByPg(str(page), pages, pDialog, percent, silent):
break
if not silent:
if (pDialog.iscanceled()):
print 'Canceled download'
return
if not silent:
pDialog.close()
self.update_cache_status("movies")
self.log('Dowload complete!', level=0)
def _getMoviesByPg(self, page, pages, pDialog, percent, silent):
self.log("Getting Movies by %s", page)
uri = "/Category-Movies/Genre-Any/Letter-Any/LatestFirst/%s.htm" % page
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
soup = BeautifulSoup(pagedata)
movies = soup.findAll('a', {'itemprop' : 'url', 'class': 'blue'})
for movie in movies:
genres = []
try:
href = movie['href']
year = movie.find('span', {'itemprop' : 'copyrightYear'}).string
name = movie.find('span', {'itemprop' : 'name'}).string
name = "%s (%s)" % (name, year)
character = self.getInitialChr(name)
if not silent:
pDialog.update(percent, self.service + ' page: ' + str(page), name)
self.addMovieToDB(name, href, self.service + '://' + href, character, year, genres)
except Exception, e:
self.log("********Donnie Error: %s, %s" % (self.service, e))
if page == 1:
self.DB.execute("DELETE FROM rw_update_status WHERE provider=? and identifier=?", [self.service, 'movies'])
else:
self.DB.execute("REPLACE INTO rw_update_status(provider, identifier, current, full) VALUES(?, ?, ?, ?)", [self.service, 'movies', page, pages])
self.DB.commit()
return True
def _getStreams(self, episodeid=None, movieid=None):
streams = []
url = self.getServices(episodeid=episodeid, movieid=movieid)
if not url:
return streams
if self.ENABLE_MIRROR_CACHING:
if url:
self.log(url)
cache_url = url
else:
return streams
cached = self.checkStreamCache(cache_url)
if len(cached) > 0:
self.log("Loading streams from cache")
for temp in cached:
self.getStreamByPriority(temp[0], temp[1])
return cached
self.log("Locating streams for provided by service: %s", self.service)
pagedata = self.getURL(url, append_base_url=True)
if pagedata=='':
return
soup = BeautifulSoup(pagedata)
spans = soup.findAll('div', {'class' : 'movie_link'})
for span in spans:
print span
a = span.find('a', { "rel" : 'nofollow' })
if a:
host = self.whichHost(str(a.string))
print host
#host = host.find('script').string
raw_url = a['href']
print raw_url
if self.checkProviders(host):
#streams.append(['Vidics - ' + host, self.service + '://' + raw_url])
self.getStreamByPriority('Vidics - ' + host, self.service + '://' + raw_url)
if self.ENABLE_MIRROR_CACHING:
self.cacheStreamLink(cache_url, 'Vidics - ' + host, self.service + '://' + raw_url)
self.DB.commit()
#return streams
def getStreamByPriority(self, link, stream):
self.log(link)
host = re.search('- (.+?)$', link).group(1)
SQL = "INSERT INTO rw_stream_list(stream, url, priority, machineid) " \
"SELECT ?, ?, priority, ? " \
"FROM rw_providers " \
"WHERE mirror=? and provider=?"
self.DB.execute(SQL, [link, stream, self.REG.getSetting('machine-id'), host, self.service])
def _getServicePriority(self, link):
self.log(link)
host = re.search('- (.+?)$', link).group(1)
row = self.DB.query("SELECT priority FROM rw_providers WHERE mirror=? and provider=?", [host, self.service])
return row[0]
def sortStreams(self, random):
streams = sorted(random, key=lambda s: s[0])
return streams
def whichHost(self, host):
table = { 'Movpod' : 'movepod.in',
'Gorillavid' : 'gorillavid.in',
'Daclips' : 'daclips.com',
'Videoweed' : 'videoweed.es',
'Novamov' : 'novamov.com',
'Nowvideo.c..' : 'nowvideo.com',
'Moveshare' : 'moveshare.net',
'Divxstage' : 'divxstage.eu',
'Sharesix' : 'sharesix.com',
'Filenuke' : 'filenuke.com',
'Ilenuke' : 'filenuke.com',
'Uploadc' : 'uploadc.com',
'Putlocker' : 'putlocker.com',
'Sockshare' : 'sockshare.com',
'80upload' : '180upload.com',
'Illionuplo..' : 'billionuploads.com',
'Ovreel' : 'movreel.com',
'Emuploads' : 'lemuploads.com',
}
try:
host_url = table[host]
return host_url
except:
return 'Unknown'
def _resolveStream(self, stream):
import urlresolver
resolved_url = ''
raw_url = stream.replace(self.service + '://', '')
link_url = self.base_url + raw_url
h = httplib2.Http()
h.follow_redirects = False
(response, body) = h.request(link_url)
resolved_url = urlresolver.HostedMediaFile(url=response['location']).resolve()
#self.logHost(self.service, raw_url)
return resolved_url
def _resolveIMDB(self, uri):
imdb = ''
self.log("Resolving IMDB for %s", uri)
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
try:
imdb = re.search('http://www.imdb.com/title/(.+?)/', pagedata).group(1)
except:
return False
return self.padIMDB(imdb)
| jolid/script.module.donnie | lib/donnie/vidics.py | Python | gpl-2.0 | 10,939 |
from flask import Flask
app = Flask(__name__)
import views | miqueiaspenha/gerenciadordeprovas | quiz/__init__.py | Python | gpl-2.0 | 60 |
#! /usr/bin/env python
#
# CRC32 WJ103
#
import zlib
def crc32(filename):
'''calculate CRC-32 checksum of file'''
f = open(filename, 'r')
if not f:
return ''
crc = 0
while 1:
buf = f.read(16384)
if not buf:
break
crc = zlib.crc32(buf, crc)
f.close()
str_crc = '%x' % crc
# print 'TD: CRC32 : %s' % str_crc
return str_crc
if __name__ == '__main__':
import sys
for file in sys.argv[1:]:
print '%s %s' % (crc32(file), file)
# EOB
| walterdejong/synctool | contrib/attic/crc32.py | Python | gpl-2.0 | 463 |
# subrepo.py - sub-repository handling for Mercurial
#
# Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import errno, os, re, shutil, posixpath, sys
import xml.dom.minidom
import stat, subprocess, tarfile
from i18n import _
import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod
hg = None
propertycache = util.propertycache
nullstate = ('', '', 'empty')
def _expandedabspath(path):
'''
get a path or url and if it is a path expand it and return an absolute path
'''
expandedpath = util.urllocalpath(util.expandpath(path))
u = util.url(expandedpath)
if not u.scheme:
path = util.normpath(os.path.abspath(u.path))
return path
def _getstorehashcachename(remotepath):
'''get a unique filename for the store hash cache of a remote repository'''
return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
def _calcfilehash(filename):
data = ''
if os.path.exists(filename):
fd = open(filename, 'rb')
data = fd.read()
fd.close()
return util.sha1(data).hexdigest()
class SubrepoAbort(error.Abort):
"""Exception class used to avoid handling a subrepo error more than once"""
def __init__(self, *args, **kw):
error.Abort.__init__(self, *args, **kw)
self.subrepo = kw.get('subrepo')
self.cause = kw.get('cause')
def annotatesubrepoerror(func):
def decoratedmethod(self, *args, **kargs):
try:
res = func(self, *args, **kargs)
except SubrepoAbort, ex:
# This exception has already been handled
raise ex
except error.Abort, ex:
subrepo = subrelpath(self)
errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
# avoid handling this exception by raising a SubrepoAbort exception
raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
cause=sys.exc_info())
return res
return decoratedmethod
def state(ctx, ui):
"""return a state dict, mapping subrepo paths configured in .hgsub
to tuple: (source from .hgsub, revision from .hgsubstate, kind
(key in types dict))
"""
p = config.config()
def read(f, sections=None, remap=None):
if f in ctx:
try:
data = ctx[f].data()
except IOError, err:
if err.errno != errno.ENOENT:
raise
# handle missing subrepo spec files as removed
ui.warn(_("warning: subrepo spec file %s not found\n") % f)
return
p.parse(f, data, sections, remap, read)
else:
raise util.Abort(_("subrepo spec file %s not found") % f)
if '.hgsub' in ctx:
read('.hgsub')
for path, src in ui.configitems('subpaths'):
p.set('subpaths', path, src, ui.configsource('subpaths', path))
rev = {}
if '.hgsubstate' in ctx:
try:
for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
l = l.lstrip()
if not l:
continue
try:
revision, path = l.split(" ", 1)
except ValueError:
raise util.Abort(_("invalid subrepository revision "
"specifier in .hgsubstate line %d")
% (i + 1))
rev[path] = revision
except IOError, err:
if err.errno != errno.ENOENT:
raise
def remap(src):
for pattern, repl in p.items('subpaths'):
# Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
# does a string decode.
repl = repl.encode('string-escape')
# However, we still want to allow back references to go
# through unharmed, so we turn r'\\1' into r'\1'. Again,
# extra escapes are needed because re.sub string decodes.
repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
try:
src = re.sub(pattern, repl, src, 1)
except re.error, e:
raise util.Abort(_("bad subrepository pattern in %s: %s")
% (p.source('subpaths', pattern), e))
return src
state = {}
for path, src in p[''].items():
kind = 'hg'
if src.startswith('['):
if ']' not in src:
raise util.Abort(_('missing ] in subrepo source'))
kind, src = src.split(']', 1)
kind = kind[1:]
src = src.lstrip() # strip any extra whitespace after ']'
if not util.url(src).isabs():
parent = _abssource(ctx._repo, abort=False)
if parent:
parent = util.url(parent)
parent.path = posixpath.join(parent.path or '', src)
parent.path = posixpath.normpath(parent.path)
joined = str(parent)
# Remap the full joined path and use it if it changes,
# else remap the original source.
remapped = remap(joined)
if remapped == joined:
src = remap(src)
else:
src = remapped
src = remap(src)
state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
return state
def writestate(repo, state):
"""rewrite .hgsubstate in (outer) repo with these subrepo states"""
lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
repo.wwrite('.hgsubstate', ''.join(lines), '')
def submerge(repo, wctx, mctx, actx, overwrite):
"""delegated from merge.applyupdates: merging of .hgsubstate file
in working context, merging context and ancestor context"""
if mctx == actx: # backwards?
actx = wctx.p1()
s1 = wctx.substate
s2 = mctx.substate
sa = actx.substate
sm = {}
repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
def debug(s, msg, r=""):
if r:
r = "%s:%s:%s" % r
repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
for s, l in sorted(s1.iteritems()):
a = sa.get(s, nullstate)
ld = l # local state with possible dirty flag for compares
if wctx.sub(s).dirty():
ld = (l[0], l[1] + "+")
if wctx == actx: # overwrite
a = ld
if s in s2:
r = s2[s]
if ld == r or r == a: # no change or local is newer
sm[s] = l
continue
elif ld == a: # other side changed
debug(s, "other changed, get", r)
wctx.sub(s).get(r, overwrite)
sm[s] = r
elif ld[0] != r[0]: # sources differ
if repo.ui.promptchoice(
_(' subrepository sources for %s differ\n'
'use (l)ocal source (%s) or (r)emote source (%s)?'
'$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
debug(s, "prompt changed, get", r)
wctx.sub(s).get(r, overwrite)
sm[s] = r
elif ld[1] == a[1]: # local side is unchanged
debug(s, "other side changed, get", r)
wctx.sub(s).get(r, overwrite)
sm[s] = r
else:
debug(s, "both sides changed")
option = repo.ui.promptchoice(
_(' subrepository %s diverged (local revision: %s, '
'remote revision: %s)\n'
'(M)erge, keep (l)ocal or keep (r)emote?'
'$$ &Merge $$ &Local $$ &Remote')
% (s, l[1][:12], r[1][:12]), 0)
if option == 0:
wctx.sub(s).merge(r)
sm[s] = l
debug(s, "merge with", r)
elif option == 1:
sm[s] = l
debug(s, "keep local subrepo revision", l)
else:
wctx.sub(s).get(r, overwrite)
sm[s] = r
debug(s, "get remote subrepo revision", r)
elif ld == a: # remote removed, local unchanged
debug(s, "remote removed, remove")
wctx.sub(s).remove()
elif a == nullstate: # not present in remote or ancestor
debug(s, "local added, keep")
sm[s] = l
continue
else:
if repo.ui.promptchoice(
_(' local changed subrepository %s which remote removed\n'
'use (c)hanged version or (d)elete?'
'$$ &Changed $$ &Delete') % s, 0):
debug(s, "prompt remove")
wctx.sub(s).remove()
for s, r in sorted(s2.items()):
if s in s1:
continue
elif s not in sa:
debug(s, "remote added, get", r)
mctx.sub(s).get(r)
sm[s] = r
elif r != sa[s]:
if repo.ui.promptchoice(
_(' remote changed subrepository %s which local removed\n'
'use (c)hanged version or (d)elete?'
'$$ &Changed $$ &Delete') % s, 0) == 0:
debug(s, "prompt recreate", r)
wctx.sub(s).get(r)
sm[s] = r
# record merged .hgsubstate
writestate(repo, sm)
return sm
def _updateprompt(ui, sub, dirty, local, remote):
if dirty:
msg = (_(' subrepository sources for %s differ\n'
'use (l)ocal source (%s) or (r)emote source (%s)?\n'
'$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
else:
msg = (_(' subrepository sources for %s differ (in checked out '
'version)\n'
'use (l)ocal source (%s) or (r)emote source (%s)?\n'
'$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
return ui.promptchoice(msg, 0)
def reporelpath(repo):
"""return path to this (sub)repo as seen from outermost repo"""
parent = repo
while util.safehasattr(parent, '_subparent'):
parent = parent._subparent
p = parent.root.rstrip(os.sep)
return repo.root[len(p) + 1:]
def subrelpath(sub):
"""return path to this subrepo as seen from outermost repo"""
if util.safehasattr(sub, '_relpath'):
return sub._relpath
if not util.safehasattr(sub, '_repo'):
return sub._path
return reporelpath(sub._repo)
def _abssource(repo, push=False, abort=True):
"""return pull/push path of repo - either based on parent repo .hgsub info
or on the top repo config. Abort or return None if no source found."""
if util.safehasattr(repo, '_subparent'):
source = util.url(repo._subsource)
if source.isabs():
return str(source)
source.path = posixpath.normpath(source.path)
parent = _abssource(repo._subparent, push, abort=False)
if parent:
parent = util.url(util.pconvert(parent))
parent.path = posixpath.join(parent.path or '', source.path)
parent.path = posixpath.normpath(parent.path)
return str(parent)
else: # recursion reached top repo
if util.safehasattr(repo, '_subtoppath'):
return repo._subtoppath
if push and repo.ui.config('paths', 'default-push'):
return repo.ui.config('paths', 'default-push')
if repo.ui.config('paths', 'default'):
return repo.ui.config('paths', 'default')
if repo.sharedpath != repo.path:
# chop off the .hg component to get the default path form
return os.path.dirname(repo.sharedpath)
if abort:
raise util.Abort(_("default path for subrepository not found"))
def itersubrepos(ctx1, ctx2):
"""find subrepos in ctx1 or ctx2"""
# Create a (subpath, ctx) mapping where we prefer subpaths from
# ctx1. The subpaths from ctx2 are important when the .hgsub file
# has been modified (in ctx2) but not yet committed (in ctx1).
subpaths = dict.fromkeys(ctx2.substate, ctx2)
subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
for subpath, ctx in sorted(subpaths.iteritems()):
yield subpath, ctx.sub(subpath)
def subrepo(ctx, path):
"""return instance of the right subrepo class for subrepo in path"""
# subrepo inherently violates our import layering rules
# because it wants to make repo objects from deep inside the stack
# so we manually delay the circular imports to not break
# scripts that don't use our demand-loading
global hg
import hg as h
hg = h
scmutil.pathauditor(ctx._repo.root)(path)
state = ctx.substate[path]
if state[2] not in types:
raise util.Abort(_('unknown subrepo type %s') % state[2])
return types[state[2]](ctx, path, state[:2])
# subrepo classes need to implement the following abstract class:
class abstractsubrepo(object):
def storeclean(self, path):
"""
returns true if the repository has not changed since it was last
cloned from or pushed to a given repository.
"""
return False
def dirty(self, ignoreupdate=False):
"""returns true if the dirstate of the subrepo is dirty or does not
match current stored state. If ignoreupdate is true, only check
whether the subrepo has uncommitted changes in its dirstate.
"""
raise NotImplementedError
def basestate(self):
"""current working directory base state, disregarding .hgsubstate
state and working directory modifications"""
raise NotImplementedError
def checknested(self, path):
"""check if path is a subrepository within this repository"""
return False
def commit(self, text, user, date):
"""commit the current changes to the subrepo with the given
log message. Use given user and date if possible. Return the
new state of the subrepo.
"""
raise NotImplementedError
def remove(self):
"""remove the subrepo
(should verify the dirstate is not dirty first)
"""
raise NotImplementedError
def get(self, state, overwrite=False):
"""run whatever commands are needed to put the subrepo into
this state
"""
raise NotImplementedError
def merge(self, state):
"""merge currently-saved state with the new state."""
raise NotImplementedError
def push(self, opts):
"""perform whatever action is analogous to 'hg push'
This may be a no-op on some systems.
"""
raise NotImplementedError
def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
return []
def status(self, rev2, **opts):
return [], [], [], [], [], [], []
def diff(self, ui, diffopts, node2, match, prefix, **opts):
pass
def outgoing(self, ui, dest, opts):
return 1
def incoming(self, ui, source, opts):
return 1
def files(self):
"""return filename iterator"""
raise NotImplementedError
def filedata(self, name):
"""return file data"""
raise NotImplementedError
def fileflags(self, name):
"""return file flags"""
return ''
def archive(self, ui, archiver, prefix, match=None):
if match is not None:
files = [f for f in self.files() if match(f)]
else:
files = self.files()
total = len(files)
relpath = subrelpath(self)
ui.progress(_('archiving (%s)') % relpath, 0,
unit=_('files'), total=total)
for i, name in enumerate(files):
flags = self.fileflags(name)
mode = 'x' in flags and 0755 or 0644
symlink = 'l' in flags
archiver.addfile(os.path.join(prefix, self._path, name),
mode, symlink, self.filedata(name))
ui.progress(_('archiving (%s)') % relpath, i + 1,
unit=_('files'), total=total)
ui.progress(_('archiving (%s)') % relpath, None)
return total
def walk(self, match):
'''
walk recursively through the directory tree, finding all files
matched by the match function
'''
pass
def forget(self, ui, match, prefix):
return ([], [])
def revert(self, ui, substate, *pats, **opts):
ui.warn('%s: reverting %s subrepos is unsupported\n' \
% (substate[0], substate[2]))
return []
class hgsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
self._path = path
self._state = state
r = ctx._repo
root = r.wjoin(path)
create = False
if not os.path.exists(os.path.join(root, '.hg')):
create = True
util.makedirs(root)
self._repo = hg.repository(r.baseui, root, create=create)
for s, k in [('ui', 'commitsubrepos')]:
v = r.ui.config(s, k)
if v:
self._repo.ui.setconfig(s, k, v)
self._repo.ui.setconfig('ui', '_usedassubrepo', 'True')
self._initrepo(r, state[0], create)
def storeclean(self, path):
clean = True
lock = self._repo.lock()
itercache = self._calcstorehash(path)
try:
for filehash in self._readstorehashcache(path):
if filehash != itercache.next():
clean = False
break
except StopIteration:
# the cached and current pull states have a different size
clean = False
if clean:
try:
itercache.next()
# the cached and current pull states have a different size
clean = False
except StopIteration:
pass
lock.release()
return clean
def _calcstorehash(self, remotepath):
'''calculate a unique "store hash"
This method is used to to detect when there are changes that may
require a push to a given remote path.'''
# sort the files that will be hashed in increasing (likely) file size
filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
yield '# %s\n' % _expandedabspath(remotepath)
for relname in filelist:
absname = os.path.normpath(self._repo.join(relname))
yield '%s = %s\n' % (relname, _calcfilehash(absname))
def _getstorehashcachepath(self, remotepath):
'''get a unique path for the store hash cache'''
return self._repo.join(os.path.join(
'cache', 'storehash', _getstorehashcachename(remotepath)))
def _readstorehashcache(self, remotepath):
'''read the store hash cache for a given remote repository'''
cachefile = self._getstorehashcachepath(remotepath)
if not os.path.exists(cachefile):
return ''
fd = open(cachefile, 'r')
pullstate = fd.readlines()
fd.close()
return pullstate
def _cachestorehash(self, remotepath):
'''cache the current store hash
Each remote repo requires its own store hash cache, because a subrepo
store may be "clean" versus a given remote repo, but not versus another
'''
cachefile = self._getstorehashcachepath(remotepath)
lock = self._repo.lock()
storehash = list(self._calcstorehash(remotepath))
cachedir = os.path.dirname(cachefile)
if not os.path.exists(cachedir):
util.makedirs(cachedir, notindexed=True)
fd = open(cachefile, 'w')
fd.writelines(storehash)
fd.close()
lock.release()
@annotatesubrepoerror
def _initrepo(self, parentrepo, source, create):
self._repo._subparent = parentrepo
self._repo._subsource = source
if create:
fp = self._repo.opener("hgrc", "w", text=True)
fp.write('[paths]\n')
def addpathconfig(key, value):
if value:
fp.write('%s = %s\n' % (key, value))
self._repo.ui.setconfig('paths', key, value)
defpath = _abssource(self._repo, abort=False)
defpushpath = _abssource(self._repo, True, abort=False)
addpathconfig('default', defpath)
if defpath != defpushpath:
addpathconfig('default-push', defpushpath)
fp.close()
@annotatesubrepoerror
def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
os.path.join(prefix, self._path), explicitonly)
@annotatesubrepoerror
def status(self, rev2, **opts):
try:
rev1 = self._state[1]
ctx1 = self._repo[rev1]
ctx2 = self._repo[rev2]
return self._repo.status(ctx1, ctx2, **opts)
except error.RepoLookupError, inst:
self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
% (inst, subrelpath(self)))
return [], [], [], [], [], [], []
@annotatesubrepoerror
def diff(self, ui, diffopts, node2, match, prefix, **opts):
try:
node1 = node.bin(self._state[1])
# We currently expect node2 to come from substate and be
# in hex format
if node2 is not None:
node2 = node.bin(node2)
cmdutil.diffordiffstat(ui, self._repo, diffopts,
node1, node2, match,
prefix=posixpath.join(prefix, self._path),
listsubrepos=True, **opts)
except error.RepoLookupError, inst:
self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
% (inst, subrelpath(self)))
@annotatesubrepoerror
def archive(self, ui, archiver, prefix, match=None):
self._get(self._state + ('hg',))
total = abstractsubrepo.archive(self, ui, archiver, prefix, match)
rev = self._state[1]
ctx = self._repo[rev]
for subpath in ctx.substate:
s = subrepo(ctx, subpath)
submatch = matchmod.narrowmatcher(subpath, match)
total += s.archive(
ui, archiver, os.path.join(prefix, self._path), submatch)
return total
@annotatesubrepoerror
def dirty(self, ignoreupdate=False):
r = self._state[1]
if r == '' and not ignoreupdate: # no state recorded
return True
w = self._repo[None]
if r != w.p1().hex() and not ignoreupdate:
# different version checked out
return True
return w.dirty() # working directory changed
def basestate(self):
return self._repo['.'].hex()
def checknested(self, path):
return self._repo._checknested(self._repo.wjoin(path))
@annotatesubrepoerror
def commit(self, text, user, date):
# don't bother committing in the subrepo if it's only been
# updated
if not self.dirty(True):
return self._repo['.'].hex()
self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
n = self._repo.commit(text, user, date)
if not n:
return self._repo['.'].hex() # different version checked out
return node.hex(n)
@annotatesubrepoerror
def remove(self):
# we can't fully delete the repository as it may contain
# local-only history
self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
hg.clean(self._repo, node.nullid, False)
def _get(self, state):
source, revision, kind = state
if revision not in self._repo:
self._repo._subsource = source
srcurl = _abssource(self._repo)
other = hg.peer(self._repo, {}, srcurl)
if len(self._repo) == 0:
self._repo.ui.status(_('cloning subrepo %s from %s\n')
% (subrelpath(self), srcurl))
parentrepo = self._repo._subparent
shutil.rmtree(self._repo.path)
other, cloned = hg.clone(self._repo._subparent.baseui, {},
other, self._repo.root,
update=False)
self._repo = cloned.local()
self._initrepo(parentrepo, source, create=True)
self._cachestorehash(srcurl)
else:
self._repo.ui.status(_('pulling subrepo %s from %s\n')
% (subrelpath(self), srcurl))
cleansub = self.storeclean(srcurl)
remotebookmarks = other.listkeys('bookmarks')
self._repo.pull(other)
bookmarks.updatefromremote(self._repo.ui, self._repo,
remotebookmarks, srcurl)
if cleansub:
# keep the repo clean after pull
self._cachestorehash(srcurl)
@annotatesubrepoerror
def get(self, state, overwrite=False):
self._get(state)
source, revision, kind = state
self._repo.ui.debug("getting subrepo %s\n" % self._path)
hg.updaterepo(self._repo, revision, overwrite)
@annotatesubrepoerror
def merge(self, state):
self._get(state)
cur = self._repo['.']
dst = self._repo[state[1]]
anc = dst.ancestor(cur)
def mergefunc():
if anc == cur and dst.branch() == cur.branch():
self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
hg.update(self._repo, state[1])
elif anc == dst:
self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
else:
self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
hg.merge(self._repo, state[1], remind=False)
wctx = self._repo[None]
if self.dirty():
if anc != dst:
if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
mergefunc()
else:
mergefunc()
else:
mergefunc()
@annotatesubrepoerror
def push(self, opts):
force = opts.get('force')
newbranch = opts.get('new_branch')
ssh = opts.get('ssh')
# push subrepos depth-first for coherent ordering
c = self._repo['']
subs = c.substate # only repos that are committed
for s in sorted(subs):
if c.sub(s).push(opts) == 0:
return False
dsturl = _abssource(self._repo, True)
if not force:
if self.storeclean(dsturl):
self._repo.ui.status(
_('no changes made to subrepo %s since last push to %s\n')
% (subrelpath(self), dsturl))
return None
self._repo.ui.status(_('pushing subrepo %s to %s\n') %
(subrelpath(self), dsturl))
other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
res = self._repo.push(other, force, newbranch=newbranch)
# the repo is now clean
self._cachestorehash(dsturl)
return res
@annotatesubrepoerror
def outgoing(self, ui, dest, opts):
return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
@annotatesubrepoerror
def incoming(self, ui, source, opts):
return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
@annotatesubrepoerror
def files(self):
rev = self._state[1]
ctx = self._repo[rev]
return ctx.manifest()
def filedata(self, name):
rev = self._state[1]
return self._repo[rev][name].data()
def fileflags(self, name):
rev = self._state[1]
ctx = self._repo[rev]
return ctx.flags(name)
def walk(self, match):
ctx = self._repo[None]
return ctx.walk(match)
@annotatesubrepoerror
def forget(self, ui, match, prefix):
return cmdutil.forget(ui, self._repo, match,
os.path.join(prefix, self._path), True)
@annotatesubrepoerror
def revert(self, ui, substate, *pats, **opts):
# reverting a subrepo is a 2 step process:
# 1. if the no_backup is not set, revert all modified
# files inside the subrepo
# 2. update the subrepo to the revision specified in
# the corresponding substate dictionary
ui.status(_('reverting subrepo %s\n') % substate[0])
if not opts.get('no_backup'):
# Revert all files on the subrepo, creating backups
# Note that this will not recursively revert subrepos
# We could do it if there was a set:subrepos() predicate
opts = opts.copy()
opts['date'] = None
opts['rev'] = substate[1]
pats = []
if not opts.get('all'):
pats = ['set:modified()']
self.filerevert(ui, *pats, **opts)
# Update the repo to the revision specified in the given substate
self.get(substate, overwrite=True)
def filerevert(self, ui, *pats, **opts):
ctx = self._repo[opts['rev']]
parents = self._repo.dirstate.parents()
if opts.get('all'):
pats = ['set:modified()']
else:
pats = []
cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
class svnsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
self._path = path
self._state = state
self._ctx = ctx
self._ui = ctx._repo.ui
self._exe = util.findexe('svn')
if not self._exe:
raise util.Abort(_("'svn' executable not found for subrepo '%s'")
% self._path)
def _svncommand(self, commands, filename='', failok=False):
cmd = [self._exe]
extrakw = {}
if not self._ui.interactive():
# Making stdin be a pipe should prevent svn from behaving
# interactively even if we can't pass --non-interactive.
extrakw['stdin'] = subprocess.PIPE
# Starting in svn 1.5 --non-interactive is a global flag
# instead of being per-command, but we need to support 1.4 so
# we have to be intelligent about what commands take
# --non-interactive.
if commands[0] in ('update', 'checkout', 'commit'):
cmd.append('--non-interactive')
cmd.extend(commands)
if filename is not None:
path = os.path.join(self._ctx._repo.origroot, self._path, filename)
cmd.append(path)
env = dict(os.environ)
# Avoid localized output, preserve current locale for everything else.
lc_all = env.get('LC_ALL')
if lc_all:
env['LANG'] = lc_all
del env['LC_ALL']
env['LC_MESSAGES'] = 'C'
p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True, env=env, **extrakw)
stdout, stderr = p.communicate()
stderr = stderr.strip()
if not failok:
if p.returncode:
raise util.Abort(stderr or 'exited with code %d' % p.returncode)
if stderr:
self._ui.warn(stderr + '\n')
return stdout, stderr
@propertycache
def _svnversion(self):
output, err = self._svncommand(['--version', '--quiet'], filename=None)
m = re.search(r'^(\d+)\.(\d+)', output)
if not m:
raise util.Abort(_('cannot retrieve svn tool version'))
return (int(m.group(1)), int(m.group(2)))
def _wcrevs(self):
# Get the working directory revision as well as the last
# commit revision so we can compare the subrepo state with
# both. We used to store the working directory one.
output, err = self._svncommand(['info', '--xml'])
doc = xml.dom.minidom.parseString(output)
entries = doc.getElementsByTagName('entry')
lastrev, rev = '0', '0'
if entries:
rev = str(entries[0].getAttribute('revision')) or '0'
commits = entries[0].getElementsByTagName('commit')
if commits:
lastrev = str(commits[0].getAttribute('revision')) or '0'
return (lastrev, rev)
def _wcrev(self):
return self._wcrevs()[0]
def _wcchanged(self):
"""Return (changes, extchanges, missing) where changes is True
if the working directory was changed, extchanges is
True if any of these changes concern an external entry and missing
is True if any change is a missing entry.
"""
output, err = self._svncommand(['status', '--xml'])
externals, changes, missing = [], [], []
doc = xml.dom.minidom.parseString(output)
for e in doc.getElementsByTagName('entry'):
s = e.getElementsByTagName('wc-status')
if not s:
continue
item = s[0].getAttribute('item')
props = s[0].getAttribute('props')
path = e.getAttribute('path')
if item == 'external':
externals.append(path)
elif item == 'missing':
missing.append(path)
if (item not in ('', 'normal', 'unversioned', 'external')
or props not in ('', 'none', 'normal')):
changes.append(path)
for path in changes:
for ext in externals:
if path == ext or path.startswith(ext + os.sep):
return True, True, bool(missing)
return bool(changes), False, bool(missing)
def dirty(self, ignoreupdate=False):
if not self._wcchanged()[0]:
if self._state[1] in self._wcrevs() or ignoreupdate:
return False
return True
def basestate(self):
lastrev, rev = self._wcrevs()
if lastrev != rev:
# Last committed rev is not the same than rev. We would
# like to take lastrev but we do not know if the subrepo
# URL exists at lastrev. Test it and fallback to rev it
# is not there.
try:
self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
return lastrev
except error.Abort:
pass
return rev
@annotatesubrepoerror
def commit(self, text, user, date):
# user and date are out of our hands since svn is centralized
changed, extchanged, missing = self._wcchanged()
if not changed:
return self.basestate()
if extchanged:
# Do not try to commit externals
raise util.Abort(_('cannot commit svn externals'))
if missing:
# svn can commit with missing entries but aborting like hg
# seems a better approach.
raise util.Abort(_('cannot commit missing svn entries'))
commitinfo, err = self._svncommand(['commit', '-m', text])
self._ui.status(commitinfo)
newrev = re.search('Committed revision ([0-9]+).', commitinfo)
if not newrev:
if not commitinfo.strip():
# Sometimes, our definition of "changed" differs from
# svn one. For instance, svn ignores missing files
# when committing. If there are only missing files, no
# commit is made, no output and no error code.
raise util.Abort(_('failed to commit svn changes'))
raise util.Abort(commitinfo.splitlines()[-1])
newrev = newrev.groups()[0]
self._ui.status(self._svncommand(['update', '-r', newrev])[0])
return newrev
@annotatesubrepoerror
def remove(self):
if self.dirty():
self._ui.warn(_('not removing repo %s because '
'it has changes.\n' % self._path))
return
self._ui.note(_('removing subrepo %s\n') % self._path)
def onerror(function, path, excinfo):
if function is not os.remove:
raise
# read-only files cannot be unlinked under Windows
s = os.stat(path)
if (s.st_mode & stat.S_IWRITE) != 0:
raise
os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
os.remove(path)
path = self._ctx._repo.wjoin(self._path)
shutil.rmtree(path, onerror=onerror)
try:
os.removedirs(os.path.dirname(path))
except OSError:
pass
@annotatesubrepoerror
def get(self, state, overwrite=False):
if overwrite:
self._svncommand(['revert', '--recursive'])
args = ['checkout']
if self._svnversion >= (1, 5):
args.append('--force')
# The revision must be specified at the end of the URL to properly
# update to a directory which has since been deleted and recreated.
args.append('%s@%s' % (state[0], state[1]))
status, err = self._svncommand(args, failok=True)
if not re.search('Checked out revision [0-9]+.', status):
if ('is already a working copy for a different URL' in err
and (self._wcchanged()[:2] == (False, False))):
# obstructed but clean working copy, so just blow it away.
self.remove()
self.get(state, overwrite=False)
return
raise util.Abort((status or err).splitlines()[-1])
self._ui.status(status)
@annotatesubrepoerror
def merge(self, state):
old = self._state[1]
new = state[1]
wcrev = self._wcrev()
if new != wcrev:
dirty = old == wcrev or self._wcchanged()[0]
if _updateprompt(self._ui, self, dirty, wcrev, new):
self.get(state, False)
def push(self, opts):
# push is a no-op for SVN
return True
@annotatesubrepoerror
def files(self):
output = self._svncommand(['list', '--recursive', '--xml'])[0]
doc = xml.dom.minidom.parseString(output)
paths = []
for e in doc.getElementsByTagName('entry'):
kind = str(e.getAttribute('kind'))
if kind != 'file':
continue
name = ''.join(c.data for c
in e.getElementsByTagName('name')[0].childNodes
if c.nodeType == c.TEXT_NODE)
paths.append(name.encode('utf-8'))
return paths
def filedata(self, name):
return self._svncommand(['cat'], name)[0]
class gitsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
self._state = state
self._ctx = ctx
self._path = path
self._relpath = os.path.join(reporelpath(ctx._repo), path)
self._abspath = ctx._repo.wjoin(path)
self._subparent = ctx._repo
self._ui = ctx._repo.ui
self._ensuregit()
def _ensuregit(self):
try:
self._gitexecutable = 'git'
out, err = self._gitnodir(['--version'])
except OSError, e:
if e.errno != 2 or os.name != 'nt':
raise
self._gitexecutable = 'git.cmd'
out, err = self._gitnodir(['--version'])
m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
if not m:
self._ui.warn(_('cannot retrieve git version'))
return
version = (int(m.group(1)), m.group(2), m.group(3))
# git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
# despite the docstring comment. For now, error on 1.4.0, warn on
# 1.5.0 but attempt to continue.
if version < (1, 5, 0):
raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
elif version < (1, 6, 0):
self._ui.warn(_('git subrepo requires at least 1.6.0 or later'))
def _gitcommand(self, commands, env=None, stream=False):
return self._gitdir(commands, env=env, stream=stream)[0]
def _gitdir(self, commands, env=None, stream=False):
return self._gitnodir(commands, env=env, stream=stream,
cwd=self._abspath)
def _gitnodir(self, commands, env=None, stream=False, cwd=None):
"""Calls the git command
The methods tries to call the git command. versions prior to 1.6.0
are not supported and very probably fail.
"""
self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
# unless ui.quiet is set, print git's stderr,
# which is mostly progress and useful info
errpipe = None
if self._ui.quiet:
errpipe = open(os.devnull, 'w')
p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
cwd=cwd, env=env, close_fds=util.closefds,
stdout=subprocess.PIPE, stderr=errpipe)
if stream:
return p.stdout, None
retdata = p.stdout.read().strip()
# wait for the child to exit to avoid race condition.
p.wait()
if p.returncode != 0 and p.returncode != 1:
# there are certain error codes that are ok
command = commands[0]
if command in ('cat-file', 'symbolic-ref'):
return retdata, p.returncode
# for all others, abort
raise util.Abort('git %s error %d in %s' %
(command, p.returncode, self._relpath))
return retdata, p.returncode
def _gitmissing(self):
return not os.path.exists(os.path.join(self._abspath, '.git'))
def _gitstate(self):
return self._gitcommand(['rev-parse', 'HEAD'])
def _gitcurrentbranch(self):
current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
if err:
current = None
return current
def _gitremote(self, remote):
out = self._gitcommand(['remote', 'show', '-n', remote])
line = out.split('\n')[1]
i = line.index('URL: ') + len('URL: ')
return line[i:]
def _githavelocally(self, revision):
out, code = self._gitdir(['cat-file', '-e', revision])
return code == 0
def _gitisancestor(self, r1, r2):
base = self._gitcommand(['merge-base', r1, r2])
return base == r1
def _gitisbare(self):
return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
def _gitupdatestat(self):
"""This must be run before git diff-index.
diff-index only looks at changes to file stat;
this command looks at file contents and updates the stat."""
self._gitcommand(['update-index', '-q', '--refresh'])
def _gitbranchmap(self):
'''returns 2 things:
a map from git branch to revision
a map from revision to branches'''
branch2rev = {}
rev2branch = {}
out = self._gitcommand(['for-each-ref', '--format',
'%(objectname) %(refname)'])
for line in out.split('\n'):
revision, ref = line.split(' ')
if (not ref.startswith('refs/heads/') and
not ref.startswith('refs/remotes/')):
continue
if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
continue # ignore remote/HEAD redirects
branch2rev[ref] = revision
rev2branch.setdefault(revision, []).append(ref)
return branch2rev, rev2branch
def _gittracking(self, branches):
'return map of remote branch to local tracking branch'
# assumes no more than one local tracking branch for each remote
tracking = {}
for b in branches:
if b.startswith('refs/remotes/'):
continue
bname = b.split('/', 2)[2]
remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
if remote:
ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
tracking['refs/remotes/%s/%s' %
(remote, ref.split('/', 2)[2])] = b
return tracking
def _abssource(self, source):
if '://' not in source:
# recognize the scp syntax as an absolute source
colon = source.find(':')
if colon != -1 and '/' not in source[:colon]:
return source
self._subsource = source
return _abssource(self)
def _fetch(self, source, revision):
if self._gitmissing():
source = self._abssource(source)
self._ui.status(_('cloning subrepo %s from %s\n') %
(self._relpath, source))
self._gitnodir(['clone', source, self._abspath])
if self._githavelocally(revision):
return
self._ui.status(_('pulling subrepo %s from %s\n') %
(self._relpath, self._gitremote('origin')))
# try only origin: the originally cloned repo
self._gitcommand(['fetch'])
if not self._githavelocally(revision):
raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
(revision, self._relpath))
@annotatesubrepoerror
def dirty(self, ignoreupdate=False):
if self._gitmissing():
return self._state[1] != ''
if self._gitisbare():
return True
if not ignoreupdate and self._state[1] != self._gitstate():
# different version checked out
return True
# check for staged changes or modified files; ignore untracked files
self._gitupdatestat()
out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
return code == 1
def basestate(self):
return self._gitstate()
@annotatesubrepoerror
def get(self, state, overwrite=False):
source, revision, kind = state
if not revision:
self.remove()
return
self._fetch(source, revision)
# if the repo was set to be bare, unbare it
if self._gitisbare():
self._gitcommand(['config', 'core.bare', 'false'])
if self._gitstate() == revision:
self._gitcommand(['reset', '--hard', 'HEAD'])
return
elif self._gitstate() == revision:
if overwrite:
# first reset the index to unmark new files for commit, because
# reset --hard will otherwise throw away files added for commit,
# not just unmark them.
self._gitcommand(['reset', 'HEAD'])
self._gitcommand(['reset', '--hard', 'HEAD'])
return
branch2rev, rev2branch = self._gitbranchmap()
def checkout(args):
cmd = ['checkout']
if overwrite:
# first reset the index to unmark new files for commit, because
# the -f option will otherwise throw away files added for
# commit, not just unmark them.
self._gitcommand(['reset', 'HEAD'])
cmd.append('-f')
self._gitcommand(cmd + args)
def rawcheckout():
# no branch to checkout, check it out with no branch
self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
self._relpath)
self._ui.warn(_('check out a git branch if you intend '
'to make changes\n'))
checkout(['-q', revision])
if revision not in rev2branch:
rawcheckout()
return
branches = rev2branch[revision]
firstlocalbranch = None
for b in branches:
if b == 'refs/heads/master':
# master trumps all other branches
checkout(['refs/heads/master'])
return
if not firstlocalbranch and not b.startswith('refs/remotes/'):
firstlocalbranch = b
if firstlocalbranch:
checkout([firstlocalbranch])
return
tracking = self._gittracking(branch2rev.keys())
# choose a remote branch already tracked if possible
remote = branches[0]
if remote not in tracking:
for b in branches:
if b in tracking:
remote = b
break
if remote not in tracking:
# create a new local tracking branch
local = remote.split('/', 3)[3]
checkout(['-b', local, remote])
elif self._gitisancestor(branch2rev[tracking[remote]], remote):
# When updating to a tracked remote branch,
# if the local tracking branch is downstream of it,
# a normal `git pull` would have performed a "fast-forward merge"
# which is equivalent to updating the local branch to the remote.
# Since we are only looking at branching at update, we need to
# detect this situation and perform this action lazily.
if tracking[remote] != self._gitcurrentbranch():
checkout([tracking[remote]])
self._gitcommand(['merge', '--ff', remote])
else:
# a real merge would be required, just checkout the revision
rawcheckout()
@annotatesubrepoerror
def commit(self, text, user, date):
if self._gitmissing():
raise util.Abort(_("subrepo %s is missing") % self._relpath)
cmd = ['commit', '-a', '-m', text]
env = os.environ.copy()
if user:
cmd += ['--author', user]
if date:
# git's date parser silently ignores when seconds < 1e9
# convert to ISO8601
env['GIT_AUTHOR_DATE'] = util.datestr(date,
'%Y-%m-%dT%H:%M:%S %1%2')
self._gitcommand(cmd, env=env)
# make sure commit works otherwise HEAD might not exist under certain
# circumstances
return self._gitstate()
@annotatesubrepoerror
def merge(self, state):
source, revision, kind = state
self._fetch(source, revision)
base = self._gitcommand(['merge-base', revision, self._state[1]])
self._gitupdatestat()
out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
def mergefunc():
if base == revision:
self.get(state) # fast forward merge
elif base != self._state[1]:
self._gitcommand(['merge', '--no-commit', revision])
if self.dirty():
if self._gitstate() != revision:
dirty = self._gitstate() == self._state[1] or code != 0
if _updateprompt(self._ui, self, dirty,
self._state[1][:7], revision[:7]):
mergefunc()
else:
mergefunc()
@annotatesubrepoerror
def push(self, opts):
force = opts.get('force')
if not self._state[1]:
return True
if self._gitmissing():
raise util.Abort(_("subrepo %s is missing") % self._relpath)
# if a branch in origin contains the revision, nothing to do
branch2rev, rev2branch = self._gitbranchmap()
if self._state[1] in rev2branch:
for b in rev2branch[self._state[1]]:
if b.startswith('refs/remotes/origin/'):
return True
for b, revision in branch2rev.iteritems():
if b.startswith('refs/remotes/origin/'):
if self._gitisancestor(self._state[1], revision):
return True
# otherwise, try to push the currently checked out branch
cmd = ['push']
if force:
cmd.append('--force')
current = self._gitcurrentbranch()
if current:
# determine if the current branch is even useful
if not self._gitisancestor(self._state[1], current):
self._ui.warn(_('unrelated git branch checked out '
'in subrepo %s\n') % self._relpath)
return False
self._ui.status(_('pushing branch %s of subrepo %s\n') %
(current.split('/', 2)[2], self._relpath))
self._gitcommand(cmd + ['origin', current])
return True
else:
self._ui.warn(_('no branch checked out in subrepo %s\n'
'cannot push revision %s\n') %
(self._relpath, self._state[1]))
return False
@annotatesubrepoerror
def remove(self):
if self._gitmissing():
return
if self.dirty():
self._ui.warn(_('not removing repo %s because '
'it has changes.\n') % self._relpath)
return
# we can't fully delete the repository as it may contain
# local-only history
self._ui.note(_('removing subrepo %s\n') % self._relpath)
self._gitcommand(['config', 'core.bare', 'true'])
for f in os.listdir(self._abspath):
if f == '.git':
continue
path = os.path.join(self._abspath, f)
if os.path.isdir(path) and not os.path.islink(path):
shutil.rmtree(path)
else:
os.remove(path)
def archive(self, ui, archiver, prefix, match=None):
total = 0
source, revision = self._state
if not revision:
return total
self._fetch(source, revision)
# Parse git's native archive command.
# This should be much faster than manually traversing the trees
# and objects with many subprocess calls.
tarstream = self._gitcommand(['archive', revision], stream=True)
tar = tarfile.open(fileobj=tarstream, mode='r|')
relpath = subrelpath(self)
ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
for i, info in enumerate(tar):
if info.isdir():
continue
if match and not match(info.name):
continue
if info.issym():
data = info.linkname
else:
data = tar.extractfile(info).read()
archiver.addfile(os.path.join(prefix, self._path, info.name),
info.mode, info.issym(), data)
total += 1
ui.progress(_('archiving (%s)') % relpath, i + 1,
unit=_('files'))
ui.progress(_('archiving (%s)') % relpath, None)
return total
@annotatesubrepoerror
def status(self, rev2, **opts):
rev1 = self._state[1]
if self._gitmissing() or not rev1:
# if the repo is missing, return no results
return [], [], [], [], [], [], []
modified, added, removed = [], [], []
self._gitupdatestat()
if rev2:
command = ['diff-tree', rev1, rev2]
else:
command = ['diff-index', rev1]
out = self._gitcommand(command)
for line in out.split('\n'):
tab = line.find('\t')
if tab == -1:
continue
status, f = line[tab - 1], line[tab + 1:]
if status == 'M':
modified.append(f)
elif status == 'A':
added.append(f)
elif status == 'D':
removed.append(f)
deleted = unknown = ignored = clean = []
return modified, added, removed, deleted, unknown, ignored, clean
types = {
'hg': hgsubrepo,
'svn': svnsubrepo,
'git': gitsubrepo,
}
| jordigh/mercurial-crew | mercurial/subrepo.py | Python | gpl-2.0 | 56,360 |
from aces.materials.POSCAR import structure as Material
class structure(Material):
def getPOSCAR(self):
return self.getMinimized()
def csetup(self):
from ase.dft.kpoints import ibz_points
#self.bandpoints=ibz_points['hexagonal']
import numpy as np
x=0.5*np.cos(np.arange(8)/8.0*2.0*np.pi)
y=0.5*np.sin(np.arange(8)/8.0*2.0*np.pi)
self.bandpath=['Gamma']
for i in range(8):
if(np.abs(x[i])>0.2):x[i]/=np.abs(x[i])*2.0
if(np.abs(y[i])>0.2):y[i]/=np.abs(y[i])*2.0
self.bandpoints['X'+str(i)]=[x[i],y[i],0.0]
self.bandpath.append('X'+str(i))
self.bandpath.append('Gamma')
#self.bandpath=['Gamma',"X2"]
def getMinimized(self):
return """Mo N
1.0000000000000000
2.9916000366000000 0.0000000000000000 0.0000000000000000
0.0000000000000000 5.1814560994168932 0.0000000000000000
0.0000000000000000 0.0000000000000000 25.0000000000000000
Mo N
2 4
Direct
0.5000000000000000 0.5000000000000000 0.5000000000000000
0.0000000000000000 0.0000000000000000 0.5000000000000000
0.5000000000000000 0.8333333333333335 0.4555099610000000
0.5000000000000000 0.8333333333333335 0.5444900390000000
0.0000000000000000 0.3333333333333333 0.4555099610000000
0.0000000000000000 0.3333333333333333 0.5444900390000000
""" | vanceeasleaf/aces | aces/materials/MoN2_alpha_rect.py | Python | gpl-2.0 | 1,320 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import tcms.core.models.base
from django.conf import settings
import tcms.core.models.fields
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
('testplans', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_pk', models.PositiveIntegerField(null=True, verbose_name=b'object ID', blank=True)),
('name', models.CharField(max_length=50)),
('email', models.EmailField(max_length=254, db_index=True)),
('date_joined', models.DateTimeField(auto_now_add=True)),
],
options={
'db_table': 'tcms_contacts',
},
),
migrations.CreateModel(
name='TestCase',
fields=[
('case_id', models.AutoField(max_length=10, serialize=False, primary_key=True)),
('create_date', models.DateTimeField(auto_now_add=True, db_column=b'creation_date')),
('is_automated', models.IntegerField(default=0, db_column=b'isautomated')),
('is_automated_proposed', models.BooleanField(default=False)),
('script', models.TextField(blank=True)),
('arguments', models.TextField(blank=True)),
('extra_link', models.CharField(default=None, max_length=1024, null=True, blank=True)),
('summary', models.CharField(max_length=255, blank=True)),
('requirement', models.CharField(max_length=255, blank=True)),
('alias', models.CharField(max_length=255, blank=True)),
('estimated_time', tcms.core.models.fields.DurationField(default=0, db_column=b'estimated_time')),
('notes', models.TextField(blank=True)),
],
options={
'db_table': 'test_cases',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.CreateModel(
name='TestCaseAttachment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
options={
'db_table': 'test_case_attachments',
},
),
migrations.CreateModel(
name='TestCaseBug',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('bug_id', models.CharField(max_length=25)),
('summary', models.CharField(max_length=255, null=True, blank=True)),
('description', models.TextField(null=True, blank=True)),
],
options={
'db_table': 'test_case_bugs',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.CreateModel(
name='TestCaseBugSystem',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('description', models.TextField(blank=True)),
('url_reg_exp', models.CharField(max_length=8192)),
('validate_reg_exp', models.CharField(max_length=128)),
],
options={
'db_table': 'test_case_bug_systems',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.CreateModel(
name='TestCaseCategory',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, db_column=b'category_id')),
('name', models.CharField(max_length=255)),
('description', models.TextField(blank=True)),
('product', models.ForeignKey(related_name='category', to='management.Product')),
],
options={
'db_table': 'test_case_categories',
'verbose_name_plural': 'test case categories',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.CreateModel(
name='TestCaseComponent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('case', models.ForeignKey(to='testcases.TestCase')),
('component', models.ForeignKey(to='management.Component')),
],
options={
'db_table': 'test_case_components',
},
),
migrations.CreateModel(
name='TestCaseEmailSettings',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('notify_on_case_update', models.BooleanField(default=False)),
('notify_on_case_delete', models.BooleanField(default=False)),
('auto_to_case_author', models.BooleanField(default=False)),
('auto_to_case_tester', models.BooleanField(default=False)),
('auto_to_run_manager', models.BooleanField(default=False)),
('auto_to_run_tester', models.BooleanField(default=False)),
('auto_to_case_run_assignee', models.BooleanField(default=False)),
('case', models.OneToOneField(related_name='email_settings', to='testcases.TestCase')),
],
),
migrations.CreateModel(
name='TestCasePlan',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('sortkey', models.IntegerField(null=True, blank=True)),
('case', models.ForeignKey(to='testcases.TestCase')),
('plan', models.ForeignKey(to='testplans.TestPlan')),
],
options={
'db_table': 'test_case_plans',
},
),
migrations.CreateModel(
name='TestCaseStatus',
fields=[
('id', models.AutoField(max_length=6, serialize=False, primary_key=True, db_column=b'case_status_id')),
('name', models.CharField(max_length=255)),
('description', models.TextField(null=True, blank=True)),
],
options={
'db_table': 'test_case_status',
'verbose_name': 'Test case status',
'verbose_name_plural': 'Test case status',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.CreateModel(
name='TestCaseTag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user', models.IntegerField(default=b'0', db_column=b'userid')),
('case', models.ForeignKey(to='testcases.TestCase')),
('tag', models.ForeignKey(to='management.TestTag')),
],
options={
'db_table': 'test_case_tags',
},
),
migrations.CreateModel(
name='TestCaseText',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('case_text_version', models.IntegerField()),
('create_date', models.DateTimeField(auto_now_add=True, db_column=b'creation_ts')),
('action', models.TextField(blank=True)),
('effect', models.TextField(blank=True)),
('setup', models.TextField(blank=True)),
('breakdown', models.TextField(blank=True)),
('action_checksum', models.CharField(max_length=32)),
('effect_checksum', models.CharField(max_length=32)),
('setup_checksum', models.CharField(max_length=32)),
('breakdown_checksum', models.CharField(max_length=32)),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, db_column=b'who')),
('case', models.ForeignKey(related_name='text', to='testcases.TestCase')),
],
options={
'ordering': ['case', '-case_text_version'],
'db_table': 'test_case_texts',
},
bases=(models.Model, tcms.core.models.base.UrlMixin),
),
migrations.AddField(
model_name='testcasebug',
name='bug_system',
field=models.ForeignKey(default=1, to='testcases.TestCaseBugSystem'),
),
migrations.AddField(
model_name='testcasebug',
name='case',
field=models.ForeignKey(related_name='case_bug', to='testcases.TestCase'),
),
]
| MrSenko/Nitrate | tcms/testcases/migrations/0001_initial.py | Python | gpl-2.0 | 9,258 |
#!/usr/bin/python
## func command line interface & client lib
##
## Copyright 2007,2008 Red Hat, Inc
## +AUTHORS
##
## This software may be freely redistributed under the terms of the GNU
## general public license.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import sys
import command
import func.module_loader as module_loader
from func.overlord import client,base_command
class FuncCommandLine(command.Command):
name = "func"
usage = "func [--options] \"hostname glob\" module method [arg1] [arg2] ... "
subCommandClasses = []
def __init__(self):
modules = module_loader.load_modules('func/overlord/cmd_modules/', base_command.BaseCommand)
for x in modules.keys():
self.subCommandClasses.append(modules[x].__class__)
command.Command.__init__(self)
def do(self, args):
pass
def addOptions(self):
self.parser.add_option('', '--version', action="store_true",
help="show version information")
# just some ugly goo to try to guess if arg[1] is hostnamegoo or
# a command name
def _isGlob(self, str):
if str.find("*") or str.find("?") or str.find("[") or str.find("]"):
return True
return False
def handleArguments(self, args):
if len(args) < 2:
sys.stderr.write("see the func manpage for usage\n")
sys.exit(411)
minion_string = args[0]
# try to be clever about this for now
if client.is_minion(minion_string) or self._isGlob(minion_string):
self.server_spec = minion_string
args.pop(0)
# if it doesn't look like server, assume it
# is a sub command? that seems wrong, what about
# typo's and such? How to catch that? -akl
# maybe a class variable self.data on Command?
def handleOptions(self, options):
if options.version:
#FIXME
sys.stderr.write("version is NOT IMPLEMENTED YET\n")
| kadamski/func | func/overlord/func_command.py | Python | gpl-2.0 | 2,133 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2010 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""
Rule that checks for an object with a particular tag.
"""
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ...ggettext import gettext as _
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from . import Rule
#-------------------------------------------------------------------------
#
# HasTag
#
#-------------------------------------------------------------------------
class HasTagBase(Rule):
"""
Rule that checks for an object with a particular tag.
"""
labels = [ 'Tag:' ]
name = 'Objects with the <tag>'
description = "Matches objects with the given tag"
category = _('General filters')
def prepare(self, db):
"""
Prepare the rule. Things we want to do just once.
"""
self.tag_handle = None
tag = db.get_tag_from_name(self.list[0])
if tag is not None:
self.tag_handle = tag.get_handle()
def apply(self, db, obj):
"""
Apply the rule. Return True for a match.
"""
if self.tag_handle is None:
return False
return self.tag_handle in obj.get_tag_list()
| arunkgupta/gramps | gramps/gen/filters/rules/_hastagbase.py | Python | gpl-2.0 | 2,181 |
#!/usr/bin/python3.4
#############################################################################
#
# Dictionnary DB managing script. Add/Del/Search definitions
# Copyright (C) 2014 bertrand
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#############################################################################
###############
### Imports ###
import sys
import psycopg2 as PSQL
import textwrap as txtwrp
#####################
### Configuration ###
config = {
'VERSION_MAJOR' : '0',
'VERSION_MINOR' : '1',
'dbname' : 'bertrand',
'user' : 'bertrand'
}
#############
### USAGE ###
def usage():
print("Tool to insert/remove entries in the dicotionnnary.")
print("Version: " + config['VERSION_MAJOR'] + "." + config['VERSION_MINOR'])
print("Usage: " + sys.argv[0] + " <command> <options>")
print("")
print("Commands:")
print(" add Add definition to dictionnary.")
print(" del Remove definition from dictionnary.")
print(" help Print general help or command specific help.")
print(" search Search definition in dictionnary.")
print("")
###########
### ADD ###
def add():
argc = len(sys.argv)
if argc < 3:
__help_cmd(sys.argv[1])
return
req = {
'fields' : '',
'name' : '',
'def' : '',
'url' : ''
}
i=2
while i < argc:
if sys.argv[i] == "-d":
i += 1
req['def'] = sys.argv[i]
elif sys.argv[i] == "-f":
i += 1
req['fields'] = sys.argv[i]
elif sys.argv[i] == '-n':
i += 1
req['name'] = sys.argv[i]
elif sys.argv[i] == "-u":
i += 1
req['url'] = sys.argv[i]
else:
print("Unknown option '" + sys.argv[i] + "'")
__help_cmd(sys.argv[1])
return
i += 1
if req['fields'] == '':
print("Please specify fields with option '-f'.")
__help_cmd(sys.argv[1])
return
elif req['name'] == '':
print("Please specify fields with option '-f'.")
__help_cmd(sys.argv[1])
return
elif req['def'] == '':
print("Please specify definition with option '-d'.")
__help_cmd(sys.argv[1])
return
conn = PSQL.connect("dbname=" + config['dbname'] + " user=" + config['user'])
cur = conn.cursor()
req = cur.mogrify("INSERT INTO dico (fields,name,def,url) VALUES (%s, %s, %s, %s)",
("{" + req['fields'] + "}", req['name'], req['def'], req['url']))
print(req)
cur.execute(req)
conn.commit()
cur.close()
conn.close()
###########
### DEL ###
def delete():
try:
defid = sys.argv[2]
except IndexError:
print("Missing argument.")
__help_cmd(sys.argv[1])
return
conn = PSQL.connect("dbname=" + config['dbname'] + " user=" + config['user'])
cur = conn.cursor()
req = cur.mogrify("DELETE FROM dico WHERE id=%s", (defid,))
print(req)
cur.execute(req)
conn.commit()
cur.close()
conn.close()
#####################
### HELP COMMANDS ###
def help_cmd():
try:
cmd = sys.argv[2]
except:
cmd = ''
__help_cmd(cmd)
def __help_cmd(cmd):
if cmd == '' :
usage()
elif cmd == "add" :
print("Command '" + cmd + "': Add definition to dictionnary.")
print("Usage: " + sys.argv[0] + " " + cmd + " <options>")
print("")
print("Options:")
print(" -d <str> Definition.")
print(" -f <str,str,..> List of fields.")
print(" -n <str> Name of the entry")
print(" -u <url> One url to a more complete definition.")
print("")
elif cmd == "del" :
print("Command '" + cmd + "': Delete definition from dictionnary.")
print("Usage: " + sys.argv[0] + " " + cmd + " <id>")
print("")
print("Param:")
print(" id ID of the definition to delete.")
print("")
elif cmd == "help" :
print("Command '" + cmd + "': Print help.")
print("Usage: " + sys.argv[0] + " " + cmd + " [command]")
print("")
print("Giving NO 'command' this will print the general help.")
print("Giving 'command' this will print the command specific help. ")
print("")
elif cmd == "search" :
print("Command '" + cmd + "': Search definition in dictionnary.")
print("Usage: " + sys.argv[0] + " " + cmd + " <options>")
print("")
print("Options:")
print(" -a Print all definitions in the table.")
print(" -f <str,str,...> Print definitions matching the set of given fields.")
print(" -i <id> Print definition matching the given ID.")
print(" -n <str> Print definition mathing the given entry name.")
print("")
else:
print("Unknown command: '" + cmd + "'")
usage()
##############
### SEARCH ###
def search():
try:
opt = sys.argv[2]
except IndexError:
__help_cmd(sys.argv[1])
return
else:
if not opt in ('-a', '-f', '-i', '-n'):
print("Unknown option '" + sys.argv[2] + "'")
__help_cmd(sys.argv[1])
return
conn = PSQL.connect("dbname=" + config['dbname'] + " user=" + config['user'])
cur = conn.cursor()
try:
if opt == "-a":
req = cur.mogrify("SELECT id,fields,name,def,url FROM dico")
elif opt == "-f":
optarg = sys.argv[3]
req = __search_build_req_fields(optarg.split(','))
elif opt == '-i':
optarg = sys.argv[3]
req = cur.mogrify("SELECT id,fields,name,def,url FROM dico WHERE id=%s", (optarg,))
elif opt == "-n":
optarg = sys.argv[3]
req = cur.mogrify("SELECT id,fields,name,def,url FROM dico WHERE name=%s", (optarg,))
except IndexError:
print("Missing argument.")
__help_cmd(sys.argv[1])
else:
print(req)
cur.execute(req)
print_rows(cur.fetchall())
conn.commit()
finally:
cur.close()
conn.close()
def __search_build_req_fields(fields):
# How do you like your SQL injection?
# I like mine crispy and with a python '+' ;)
# http://initd.org/psycopg/docs/usage.html
# http://xkcd.com/327/
# That will do for now ...
req = "SELECT id,fields,name,def,url FROM dico WHERE "
req += "'" + fields[0] + "'=ANY(fields)"
for f in fields[1:]:
req += " OR '" + f + "'=ANY(fields)"
return req
###################################
### PRINT PSQL REQUESTS RESULTS ###
def print_rows(rows):
for row in rows:
print("---------------------")
print("ID : ", row[0])
__print_row_wrapped("FIELDS : ", row[1])
__print_row_wrapped("NAME : ", row[2])
__print_row_wrapped("DEF : ", row[3])
__print_row_wrapped("URL : ", row[4])
print("")
def __print_row_wrapped(label, value):
labellen = len(label)
wrapped = txtwrp.wrap(value)
print(label, wrapped[0])
for i in range(1, len(wrapped)):
print(' ' * labellen, wrapped[i])
############
### MAIN ###
commands = {
'add' : add,
'del' : delete,
'help' : help_cmd,
'search' : search
}
try:
cmd = sys.argv[1]
except KeyError:
print("Unknown command: " + cmd)
usage()
sys.exit()
except IndexError:
usage()
sys.exit()
else:
commands[cmd]()
| bertrandF/DictionaryDB | db.py | Python | gpl-2.0 | 8,296 |
#
# Copyright (c) 2008--2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import PAM
import sys
from spacewalk.common.rhnLog import log_error
from spacewalk.common.rhnException import rhnException
__username = None
__password = None
def __pam_conv(auth, query_list):
global __username, __password
# Build a list of responses to be passed back to PAM
resp = []
for query, type in query_list:
if type == PAM.PAM_PROMPT_ECHO_ON:
# Prompt for a username
resp.append((__username, 0))
elif type == PAM.PAM_PROMPT_ECHO_OFF:
# Prompt for a password
resp.append((__password, 0))
else:
# Unknown PAM type
log_error("Got unknown PAM type %s (query=%s)" % (type, query))
return None
return resp
def check_password(username, password, service):
global __username, __password
auth = PAM.pam()
auth.start(service, username, __pam_conv)
# Save the username and passwords in the globals, the conversation
# function needs access to them
__username = username
__password = password
try:
try:
auth.authenticate()
auth.acct_mgmt()
finally:
# Something to be always executed - cleanup
__username = __password = None
except PAM.error, e:
resp, code = e.args[:2]
log_error("Password check failed (%s): %s" % (code, resp))
return 0
except:
raise rhnException('Internal PAM error'), None, sys.exc_info()[2]
else:
# Good password
return 1
| moio/spacewalk | backend/server/rhnAuthPAM.py | Python | gpl-2.0 | 2,144 |