code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
#!/bin/env python
# plot results from ynthetictest.py
# by Dan Stowell, spring 2013
import os.path
import csv
from math import log, exp, pi, sqrt, ceil, floor
from numpy import mean, std, shape
import numpy as np
import random
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from mpl_toolkits.mplot3d import Axes3D
import itertools
#annotdir = os.path.expanduser("~/svn/stored_docs/python/markovrenewal/output")
annotdir = "output"
plotfontsize = "large" #"xx-small"
namelookup = {
'fsn':'Fsn', 'ftrans':'Ftrans', 'fsigtrans':'Fsigtrans', 'msecs':'Run time (msecs)', \
'birthdens_mism':'Error in assumed birth density (ratio)',
'deathprob_mism':'Error in assumed death probability (ratio)',
'snr_mism':'Error in assumed SNR (dB)',
'gen_mism':'Proportion of errors in transition probabilities',
'misseddetectionprob':'Missed detection probability',
'noisecorr':'Amount of signal correlation imposed on noise',
'snr':'SNR (dB)',
'birthdens':'birth intensity',
#'':'',
}
def readable_name(name):
return namelookup.get(name, name)
def fmt_chooser(currentcombi, groupcols, groupingvals):
fmt = 'k'
if groupcols[0]=='mmrpmode' and currentcombi[0]=='greedy':
if (len(groupcols)>1) and groupingvals[groupcols[1]].index(currentcombi[1])>0:
fmt += ':'
else:
fmt += '-.'
else:
if (len(groupcols)>1) and groupingvals[groupcols[1]].index(currentcombi[1])>0:
fmt += '--'
else:
fmt += '-'
return fmt
def ynth_csv_to_ciplot(csvpath, outpath, groupcols, summarycols, filtercols=None, xjitter=0.):
"""
groupcols: used for discrete grouping of data, with the first one becoming the x-axis in a plot, remaining ones as multiple lines;
summarycols: the name(s) of the columns to be made into y-values. one separate plot will be made for each.
filtercols: {key->listofallowed...} select rows only where particular STRING values are found. otherwise, summaries are pooled over all values.
"""
data = ynth_csv_loaddata(csvpath, groupcols, summarycols, filtercols)
# data is {'groupingvals':{ col: list }, 'summarydata':{ tupleofgroupvals: { summarycol:{'mean': _, 'stderr': _} } } }
csvname = os.path.splitext(os.path.basename(csvpath))[0]
if isinstance(summarycols, basestring): summarycols = [summarycols]
if isinstance(groupcols, basestring): groupcols = [groupcols]
# one plot for each summarycol
for summarycol in summarycols:
fig = plt.figure()
# Now, we're going to use the first grouper as the x-axis.
# This means we want to iterate over all combinations of the other groupers, drawing a line each time.
for linegroupcombi in itertools.product(*[data['groupingvals'][col] for col in groupcols[1:]]):
linedata = []
for xval in data['groupingvals'][groupcols[0]]:
fullgroupcombi = (xval,) + tuple(linegroupcombi)
ourdata = data['summarydata'][fullgroupcombi][summarycol]
if xjitter != 0:
xval += random.gauss(0,xjitter)
linedata.append({'xval':xval, 'mean': ourdata['mean'], 'stderr_up': ourdata['stderr'], 'stderr_dn': ourdata['stderr']})
# draw a line
linelabel = ', '.join([linegroupcombi[0]] + ["%s %s" % (readable_name(groupcols[lgi+2]), lg) for lgi, lg in enumerate(linegroupcombi[1:])])
plt.errorbar([x['xval'] for x in linedata], \
[x['mean'] for x in linedata], \
([x['stderr_dn'] for x in linedata], [x['stderr_up'] for x in linedata]), \
label=linelabel, fmt=fmt_chooser(linegroupcombi, groupcols[1:], data['groupingvals']))
#plt.title("%s_%s" % (whichstat, runtype), fontsize=plotfontsize)
plt.xlabel(readable_name(groupcols[0]), fontsize=plotfontsize)
plt.ylabel(readable_name(summarycol), fontsize=plotfontsize)
plt.xticks(data['groupingvals'][groupcols[0]], fontsize=plotfontsize)
xdatamax = max(data['groupingvals'][groupcols[0]])
xdatamin = min(data['groupingvals'][groupcols[0]])
plt.xlim(xmin=xdatamin-(xdatamax-xdatamin)*0.05, xmax=xdatamax+(xdatamax-xdatamin)*0.05)
#yuck if groupcols[0] in ['deathprob_mism', 'birthdens_mism']:
#yuck plt.xscale('log')
if summarycol in ['msecs']:
plt.yscale('log')
else:
plt.ylim(ymin=0.2, ymax=1) #rescale(0.3), ymax=rescale(1.001))
#plt.yticks(map(rescale, yticks), yticks, fontsize=plotfontsize)
plt.yticks(fontsize=plotfontsize)
plt.legend(loc=(0.02, 0.05), prop={'size':'medium'})
outfilepath = "%s/%s_%s.pdf" % (outpath, csvname, summarycol)
plt.savefig(outfilepath, papertype='A4', format='pdf')
print("Written file %s" % outfilepath)
# LATER: consider how to avoid filename collisions - just allow user to specify a lbl?
def ynth_csv_to_surfaceplot(csvpath, outpath, groupcols, summarycols, filtercols=None):
"""
groupcols: used for discrete grouping of data, with the first one becoming the x-axis in a plot, second as y-axis;
summarycols: the name(s) of the columns to be made into y-values. one separate plot will be made for each.
filtercols: {key->listofallowed...} select rows only where particular STRING values are found. otherwise, summaries are pooled over all values.
"""
data = ynth_csv_loaddata(csvpath, groupcols, summarycols, filtercols)
# data is {'groupingvals':{ col: list }, 'summarydata':{ tupleofgroupvals: { summarycol:{'mean': _, 'stderr': _} } } }
csvname = os.path.splitext(os.path.basename(csvpath))[0]
if isinstance(summarycols, basestring): summarycols = [summarycols]
if isinstance(groupcols, basestring): groupcols = [groupcols]
if len(groupcols) != 2: raise ValueError("for surface plot, exactly 2 groupcols must be specified (used as X and Y).")
# one plot for each summarycol
for summarycol in summarycols:
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d') # 3D here
# NOW DO A SURFACE PLOT
data['groupingvals'][groupcols[0]].sort()
ydata = map(float, data['groupingvals'][groupcols[1]])
ydata.sort()
data['groupingvals'][groupcols[1]].sort(cmp=lambda a,b: cmp(float(a), float(b)))
z = [[data['summarydata'][(x,y)][summarycol]['mean'] for x in data['groupingvals'][groupcols[0]]] for y in data['groupingvals'][groupcols[1]]]
ymesh = np.array([data['groupingvals'][groupcols[0]] for _ in range(len(data['groupingvals'][groupcols[1]]))])
xmesh = np.array([ydata for _ in range(len(data['groupingvals'][groupcols[0]]))]).T
z = np.array(z)
ax.plot_surface(xmesh, ymesh, z, rstride=1, cstride=1)
"""
plt.imshow(z, interpolation='nearest', cmap=cm.binary)
"""
"""
# Now, we're going to use the first grouper as the x-axis.
# This means we want to iterate over all combinations of the other groupers, drawing a line each time.
for linegroupcombi in itertools.product(*[data['groupingvals'][col] for col in groupcols[1:]]):
linedata = []
for xval in data['groupingvals'][groupcols[0]]:
fullgroupcombi = (xval,) + tuple(linegroupcombi)
ourdata = data['summarydata'][fullgroupcombi][summarycol]
if xjitter != 0:
xval += random.gauss(0,xjitter)
linedata.append({'xval':xval, 'mean': ourdata['mean'], 'stderr_up': ourdata['stderr'], 'stderr_dn': ourdata['stderr']})
# draw a line
linelabel = ', '.join([linegroupcombi[0]] + ["%s %s" % (readable_name(groupcols[lgi+2]), lg) for lgi, lg in enumerate(linegroupcombi[1:])])
plt.errorbar([x['xval'] for x in linedata], \
[x['mean'] for x in linedata], \
([x['stderr_dn'] for x in linedata], [x['stderr_up'] for x in linedata]), \
label=linelabel, fmt=fmt_chooser(linegroupcombi, groupcols[1:], data['groupingvals']))
"""
#plt.title("%s_%s" % (whichstat, runtype), fontsize=plotfontsize)
"""
plt.xlabel(readable_name(groupcols[0]), fontsize=plotfontsize)
plt.ylabel(readable_name(groupcols[1]), fontsize=plotfontsize)
plt.title(readable_name(summarycol), fontsize=plotfontsize)
plt.xticks(range(len(data['groupingvals'][groupcols[0]])), data['groupingvals'][groupcols[0]], fontsize=plotfontsize)
plt.yticks(range(len(data['groupingvals'][groupcols[1]])), data['groupingvals'][groupcols[1]], fontsize=plotfontsize)
"""
"""
xdatamax = max(data['groupingvals'][groupcols[0]])
xdatamin = min(data['groupingvals'][groupcols[0]])
plt.xlim(xmin=xdatamin-(xdatamax-xdatamin)*0.05, xmax=xdatamax+(xdatamax-xdatamin)*0.05)
ydatamax = max(data['groupingvals'][groupcols[0]])
ydatamin = min(data['groupingvals'][groupcols[0]])
plt.ylim(ymin=ydatamin-(ydatamax-ydatamin)*0.05, ymax=ydatamax+(ydatamax-ydatamin)*0.05)
if summarycol in ['msecs']:
plt.zscale('log')
else:
plt.zlim(ymin=0.2, ymax=1) #rescale(0.3), ymax=rescale(1.001))
plt.zticks(fontsize=plotfontsize)
#plt.legend(loc=(0.02, 0.05), prop={'size':'medium'})
"""
#can't for 3d: plt.colorbar()
outfilepath = "%s/%s_%s_surf.pdf" % (outpath, csvname, summarycol)
plt.savefig(outfilepath, papertype='A4', format='pdf')
print("Written file %s" % outfilepath)
def ynth_csv_loaddata(csvpath, groupcols, summarycols, filtercols=None):
# load the csv data, applying filtering as we load, and floatifying the summarycols and groupcols
# also build up some lists of the values found in the groupcols
if isinstance(groupcols, basestring):
groupcols = [groupcols]
if isinstance(summarycols, basestring):
summarycols = [summarycols]
rdr = csv.DictReader(open(csvpath, 'rb'))
groupingvals = {col:set() for col in groupcols}
rawgroupeddata = {} # a dict where a TUPLE of groupedvals maps to a dict containing mean and ci
for row in rdr:
# filtering
skiprow = False
if filtercols:
for (filtercol, allowedvals) in filtercols.items():
if row[filtercol] not in allowedvals:
skiprow = True
break
if skiprow: continue
# floatify
# CANNOT (eg for mmrpmode): for col in groupcols: row[col] = float(row[col])
row[groupcols[0]] = float(row[groupcols[0]])
for col in summarycols: row[col] = float(row[col])
# record the grouping values
for col in groupcols: groupingvals[col].add(row[col])
# and of course store the datum
groupindex = tuple(row[col] for col in groupcols)
if groupindex not in rawgroupeddata:
rawgroupeddata[groupindex] = []
rawgroupeddata[groupindex].append(row)
# then construct the summary results: a dict where a TUPLE of groupedvals maps to a dict containing mean and ci
summarydata = {}
for groupindex, datalist in rawgroupeddata.items():
ourstats = {}
for whichsummarycol in summarycols:
numlist = [datum[whichsummarycol] for datum in datalist]
themean = mean(numlist)
stderr = std(numlist) / sqrt(len(numlist))
ourstats[whichsummarycol] = {'mean':themean, 'stderr':stderr}
summarydata[groupindex] = ourstats
# return the groupcol listing and the big dict of summary data
for col in groupcols:
groupingvals[col] = list(groupingvals[col])
groupingvals[col].sort()
return {'groupingvals':groupingvals, 'summarydata':summarydata}
################################################################################################################
if __name__ == '__main__':
# NOTE: filtercols must list string values not floats
ynth_csv_to_ciplot("%s/ynth_varying1.csv" % annotdir, "%s/pdf" % annotdir, \
groupcols=['snr', 'mmrpmode', 'birthdens'], summarycols=['fsn', 'fsigtrans', 'msecs'], filtercols=None, xjitter=0.1)
#ynth_csv_to_ciplot("%s/ynth_varying100.csv" % annotdir, "%s/pdf" % annotdir, \
# groupcols=['snr', 'mmrpmode', 'birthdens'], summarycols=['fsn', 'fsigtrans', 'msecs'], filtercols=None, xjitter=1.1)
ynth_csv_to_ciplot("%s/ynth_sens_snr.csv" % annotdir, "%s/pdf" % annotdir, \
groupcols=['snr_mism', 'mmrpmode' #, 'snr'
], summarycols=['fsn', 'fsigtrans'], filtercols=None)
ynth_csv_to_ciplot("%s/ynth_sens_birth.csv" % annotdir, "%s/pdf" % annotdir, \
groupcols=['birthdens_mism', 'mmrpmode'], summarycols=['fsn', 'fsigtrans'], filtercols=None)
ynth_csv_to_ciplot("%s/ynth_sens_death.csv" % annotdir, "%s/pdf" % annotdir, \
groupcols=['deathprob_mism', 'mmrpmode'], summarycols=['fsn', 'fsigtrans'], filtercols=None)
ynth_csv_to_ciplot("%s/ynth_sens_noisecorr.csv" % annotdir, "%s/pdf" % annotdir, \
groupcols=['noisecorr', 'mmrpmode'], summarycols=['fsn', 'fsigtrans', 'msecs'], filtercols=None) # added msecs to noisecorr since long
ynth_csv_to_ciplot("%s/ynth_sens_missed.csv" % annotdir, "%s/pdf" % annotdir, \
groupcols=['misseddetectionprob', 'mmrpmode'], summarycols=['fsn', 'fsigtrans'], filtercols=None)
ynth_csv_to_ciplot("%s/ynth_sens_tt.csv" % annotdir, "%s/pdf" % annotdir, \
groupcols=['gen_mism', 'mmrpmode'], summarycols=['fsn', 'fsigtrans'], filtercols=None)
# ynth_csv_to_surfaceplot("%s/ynth_sens_snr.csv" % annotdir, "%s/pdf" % annotdir, \
# groupcols=['snr_mism', 'snr'], summarycols=['fsn', 'fsigtrans'], filtercols={'mmrpmode':['full']}) # full inference only
|
danstowell/markovrenewal
|
experiments/plotynth.py
|
Python
|
gpl-2.0
| 12,698
|
#
# Copyright (C) 2013 Sean Poyser
#
#
# This code is a derivative of the YouTube plugin for XBMC
# released under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 3
# Copyright (C) 2010-2012 Tobias Ussing And Henrik Mosgaard Jensen
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with XBMC; see the file COPYING. If not, write to
# the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# http://www.gnu.org/copyleft/gpl.html
#
# 5: "240p h263 flv container",
# 18: "360p h264 mp4 container | 270 for rtmpe?",
# 22: "720p h264 mp4 container",
# 26: "???",
# 33: "???",
# 34: "360p h264 flv container",
# 35: "480p h264 flv container",
# 37: "1080p h264 mp4 container",
# 38: "720p vp8 webm container",
# 43: "360p h264 flv container",
# 44: "480p vp8 webm container",
# 45: "720p vp8 webm container",
# 46: "520p vp8 webm stereo",
# 59: "480 for rtmpe",
# 78: "seems to be around 400 for rtmpe",
# 82: "360p h264 stereo",
# 83: "240p h264 stereo",
# 84: "720p h264 stereo",
# 85: "520p h264 stereo",
# 100: "360p vp8 webm stereo",
# 101: "480p vp8 webm stereo",
# 102: "720p vp8 webm stereo",
# 120: "hd720",
# 121: "hd1080"
import re
import urllib2
import urllib
import cgi
import HTMLParser
try: import simplejson as json
except ImportError: import json
MAX_REC_DEPTH = 5
def Clean(text):
text = text.replace('–', '-')
text = text.replace('’', '\'')
text = text.replace('“', '"')
text = text.replace('”', '"')
text = text.replace(''', '\'')
text = text.replace('<b>', '')
text = text.replace('</b>', '')
text = text.replace('&', '&')
text = text.replace('\ufeff', '')
return text
def PlayVideo(id, forcePlayer=False):
import xbmcgui
import sys
import utils
busy = utils.showBusy()
video, links = GetVideoInformation(id)
if busy:
busy.close()
if 'best' not in video:
return False
url = video['best']
title = video['title']
image = video['thumbnail']
liz = xbmcgui.ListItem(title, iconImage=image, thumbnailImage=image)
liz.setInfo( type="Video", infoLabels={ "Title": title} )
if forcePlayer or len(sys.argv) < 2 or int(sys.argv[1]) == -1:
import xbmc
pl = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
pl.clear()
pl.add(url, liz)
xbmc.Player().play(pl)
else:
import xbmcplugin
liz.setPath(url)
xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, liz)
return True
def GetVideoInformation(id):
#id = 'H7iQ4sAf0OE' #test for HLSVP
#id = 'ofHlUJuw8Ak' #test for stereo
#id = 'ifZkeuSrNRc' #account closed
#id = 'M7FIvfx5J10'
#id = 'n-D1EB74Ckg' #vevo
#id = 'lVMWEheQ2hU' #vevo
video = {}
links = []
try: video, links = GetVideoInfo(id)
except : pass
return video, links
def GetVideoInfo(id):
url = 'http://www.youtube.com/watch?v=%s&safeSearch=none' % id
html = FetchPage(url)
video, links = Scrape(html)
video['videoid'] = id
video['thumbnail'] = "http://i.ytimg.com/vi/%s/0.jpg" % video['videoid']
video['title'] = GetVideoTitle(html)
if len(links) == 0:
if 'hlsvp' in video:
video['best'] = video['hlsvp']
else:
video['best'] = links[0][1]
return video, links
def GetVideoTitle(html):
try: return Clean(re.compile('<meta name="title" content="(.+?)">').search(html).groups(1)[0])
except: pass
return 'YouTube Video'
def Scrape(html):
stereo = [82, 83, 84, 85, 100, 101, 102]
video = {}
links = []
flashvars = ExtractFlashVars(html)
if not flashvars.has_key(u"url_encoded_fmt_stream_map"):
return video, links
if flashvars.has_key(u"ttsurl"):
video[u"ttsurl"] = flashvars[u"ttsurl"]
if flashvars.has_key(u"hlsvp"):
video[u"hlsvp"] = flashvars[u"hlsvp"]
for url_desc in flashvars[u"url_encoded_fmt_stream_map"].split(u","):
url_desc_map = cgi.parse_qs(url_desc)
if not (url_desc_map.has_key(u"url") or url_desc_map.has_key(u"stream")):
continue
key = int(url_desc_map[u"itag"][0])
url = u""
if url_desc_map.has_key(u"url"):
url = urllib.unquote(url_desc_map[u"url"][0])
elif url_desc_map.has_key(u"conn") and url_desc_map.has_key(u"stream"):
url = urllib.unquote(url_desc_map[u"conn"][0])
if url.rfind("/") < len(url) -1:
url = url + "/"
url = url + urllib.unquote(url_desc_map[u"stream"][0])
elif url_desc_map.has_key(u"stream") and not url_desc_map.has_key(u"conn"):
url = urllib.unquote(url_desc_map[u"stream"][0])
if url_desc_map.has_key(u"sig"):
url = url + u"&signature=" + url_desc_map[u"sig"][0]
elif url_desc_map.has_key(u"s"):
sig = url_desc_map[u"s"][0]
#url = url + u"&signature=" + DecryptSignature(sig)
flashvars = ExtractFlashVars(html, assets=True)
js = flashvars[u"js"]
url += u"&signature=" + DecryptSignatureNew(sig, js)
if key not in stereo:
links.append([key, url])
#links.sort(reverse=True)
return video, links
def DecryptSignature(s):
''' use decryption solution by Youtube-DL project '''
if len(s) == 88:
return s[48] + s[81:67:-1] + s[82] + s[66:62:-1] + s[85] + s[61:48:-1] + s[67] + s[47:12:-1] + s[3] + s[11:3:-1] + s[2] + s[12]
elif len(s) == 87:
return s[62] + s[82:62:-1] + s[83] + s[61:52:-1] + s[0] + s[51:2:-1]
elif len(s) == 86:
return s[2:63] + s[82] + s[64:82] + s[63]
elif len(s) == 85:
return s[76] + s[82:76:-1] + s[83] + s[75:60:-1] + s[0] + s[59:50:-1] + s[1] + s[49:2:-1]
elif len(s) == 84:
return s[83:36:-1] + s[2] + s[35:26:-1] + s[3] + s[25:3:-1] + s[26]
elif len(s) == 83:
return s[6] + s[3:6] + s[33] + s[7:24] + s[0] + s[25:33] + s[53] + s[34:53] + s[24] + s[54:]
elif len(s) == 82:
return s[36] + s[79:67:-1] + s[81] + s[66:40:-1] + s[33] + s[39:36:-1] + s[40] + s[35] + s[0] + s[67] + s[32:0:-1] + s[34]
elif len(s) == 81:
return s[6] + s[3:6] + s[33] + s[7:24] + s[0] + s[25:33] + s[2] + s[34:53] + s[24] + s[54:81]
elif len(s) == 92:
return s[25] + s[3:25] + s[0] + s[26:42] + s[79] + s[43:79] + s[91] + s[80:83];
#else:
# print ('Unable to decrypt signature, key length %d not supported; retrying might work' % (len(s)))
def ExtractFlashVars(data, assets=False):
flashvars = {}
found = False
for line in data.split("\n"):
if line.strip().find(";ytplayer.config = ") > 0:
found = True
p1 = line.find(";ytplayer.config = ") + len(";ytplayer.config = ") - 1
p2 = line.rfind(";")
if p1 <= 0 or p2 <= 0:
continue
data = line[p1 + 1:p2]
break
data = RemoveAdditionalEndingDelimiter(data)
if found:
data = json.loads(data)
if assets:
flashvars = data['assets']
else:
flashvars = data['args']
return flashvars
def FetchPage(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
req.add_header('Referer', 'http://www.youtube.com/')
return urllib2.urlopen(req).read().decode("utf-8")
def replaceHTMLCodes(txt):
# Fix missing ; in &#<number>;
txt = re.sub("(&#[0-9]+)([^;^0-9]+)", "\\1;\\2", txt)
txt = HTMLParser.HTMLParser().unescape(txt)
txt = txt.replace("&", "&")
return txt
def RemoveAdditionalEndingDelimiter(data):
pos = data.find("};")
if pos != -1:
data = data[:pos + 1]
return data
####################################################
global playerData
global allLocalFunNamesTab
global allLocalVarNamesTab
def _extractVarLocalFuns(match):
varName, objBody = match.groups()
output = ''
for func in objBody.split( '},' ):
output += re.sub(
r'^([^:]+):function\(([^)]*)\)',
r'function %s__\1(\2,*args)' % varName,
func
) + '\n'
return output
def _jsToPy(jsFunBody):
pythonFunBody = re.sub(r'var ([^=]+)={(.*?)}};', _extractVarLocalFuns, jsFunBody)
pythonFunBody = re.sub(r'function (\w*)\$(\w*)', r'function \1_S_\2', pythonFunBody)
pythonFunBody = pythonFunBody.replace('function', 'def').replace('{', ':\n\t').replace('}', '').replace(';', '\n\t').replace('var ', '')
pythonFunBody = pythonFunBody.replace('.reverse()', '[::-1]')
lines = pythonFunBody.split('\n')
for i in range(len(lines)):
# a.split("") -> list(a)
match = re.search('(\w+?)\.split\(""\)', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), 'list(' + match.group(1) + ')')
# a.length -> len(a)
match = re.search('(\w+?)\.length', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), 'len(' + match.group(1) + ')')
# a.slice(3) -> a[3:]
match = re.search('(\w+?)\.slice\((\w+?)\)', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), match.group(1) + ('[%s:]' % match.group(2)) )
# a.join("") -> "".join(a)
match = re.search('(\w+?)\.join\(("[^"]*?")\)', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), match.group(2) + '.join(' + match.group(1) + ')' )
# a.splice(b,c) -> del a[b:c]
match = re.search('(\w+?)\.splice\(([^,]+),([^)]+)\)', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), 'del ' + match.group(1) + '[' + match.group(2) + ':' + match.group(3) + ']' )
pythonFunBody = "\n".join(lines)
pythonFunBody = re.sub(r'(\w+)\.(\w+)\(', r'\1__\2(', pythonFunBody)
pythonFunBody = re.sub(r'([^=])(\w+)\[::-1\]', r'\1\2.reverse()', pythonFunBody)
return pythonFunBody
def _jsToPy1(jsFunBody):
pythonFunBody = jsFunBody.replace('function', 'def').replace('{', ':\n\t').replace('}', '').replace(';', '\n\t').replace('var ', '')
pythonFunBody = pythonFunBody.replace('.reverse()', '[::-1]')
lines = pythonFunBody.split('\n')
for i in range(len(lines)):
# a.split("") -> list(a)
match = re.search('(\w+?)\.split\(""\)', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), 'list(' + match.group(1) + ')')
# a.length -> len(a)
match = re.search('(\w+?)\.length', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), 'len(' + match.group(1) + ')')
# a.slice(3) -> a[3:]
match = re.search('(\w+?)\.slice\(([0-9]+?)\)', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), match.group(1) + ('[%s:]' % match.group(2)) )
# a.join("") -> "".join(a)
match = re.search('(\w+?)\.join\(("[^"]*?")\)', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), match.group(2) + '.join(' + match.group(1) + ')' )
return "\n".join(lines)
def _getLocalFunBody(funName):
# get function body
funName = funName.replace('$', '\\$')
match = re.search('(function %s\([^)]+?\){[^}]+?})' % funName, playerData)
if match:
return match.group(1)
return ''
def _getAllLocalSubFunNames(mainFunBody):
match = re.compile('[ =(,](\w+?)\([^)]*?\)').findall( mainFunBody )
if len(match):
# first item is name of main function, so omit it
funNameTab = set( match[1:] )
return funNameTab
return set()
def _extractLocalVarNames(mainFunBody):
valid_funcs = ( 'reverse', 'split', 'splice', 'slice', 'join' )
match = re.compile( r'[; =(,](\w+)\.(\w+)\(' ).findall( mainFunBody )
local_vars = []
for name in match:
if name[1] not in valid_funcs:
local_vars.append( name[0] )
return set(local_vars)
def _getLocalVarObjBody(varName):
match = re.search( r'var %s={.*?}};' % varName, playerData )
if match:
return match.group(0)
return ''
def DecryptSignatureNew(s, playerUrl):
if not playerUrl.startswith('http:'):
playerUrl = 'http:' + playerUrl
#print "Decrypt_signature sign_len[%d] playerUrl[%s]" % (len(s), playerUrl)
global allLocalFunNamesTab
global allLocalVarNamesTab
global playerData
allLocalFunNamesTab = []
allLocalVarNamesTab = []
playerData = ''
request = urllib2.Request(playerUrl)
#res = core._fetchPage({u"link": playerUrl})
#playerData = res["content"]
try:
playerData = urllib2.urlopen(request).read()
playerData = playerData.decode('utf-8', 'ignore')
except Exception, e:
#print str(e)
print 'Failed to decode playerData'
return ''
# get main function name
match = re.search("signature=([$a-zA-Z]+)\([^)]\)", playerData)
if match:
mainFunName = match.group(1)
else:
print('Failed to get main signature function name')
return ''
_mainFunName = mainFunName.replace('$','_S_')
fullAlgoCode = _getfullAlgoCode(mainFunName)
# wrap all local algo function into one function extractedSignatureAlgo()
algoLines = fullAlgoCode.split('\n')
for i in range(len(algoLines)):
algoLines[i] = '\t' + algoLines[i]
fullAlgoCode = 'def extractedSignatureAlgo(param):'
fullAlgoCode += '\n'.join(algoLines)
fullAlgoCode += '\n\treturn %s(param)' % _mainFunName
fullAlgoCode += '\noutSignature = extractedSignatureAlgo( inSignature )\n'
# after this function we should have all needed code in fullAlgoCode
#print '---------------------------------------'
#print '| ALGO FOR SIGNATURE DECRYPTION |'
#print '---------------------------------------'
#print fullAlgoCode
#print '---------------------------------------'
try:
algoCodeObj = compile(fullAlgoCode, '', 'exec')
except:
print 'Failed to obtain decryptSignature code'
return ''
# for security allow only flew python global function in algo code
vGlobals = {"__builtins__": None, 'len': len, 'list': list}
# local variable to pass encrypted sign and get decrypted sign
vLocals = { 'inSignature': s, 'outSignature': '' }
# execute prepared code
try:
exec(algoCodeObj, vGlobals, vLocals)
except:
print 'decryptSignature code failed to exceute correctly'
return ''
#print 'Decrypted signature = [%s]' % vLocals['outSignature']
return vLocals['outSignature']
# Note, this method is using a recursion
def _getfullAlgoCode(mainFunName, recDepth=0):
global playerData
global allLocalFunNamesTab
global allLocalVarNamesTab
if MAX_REC_DEPTH <= recDepth:
print '_getfullAlgoCode: Maximum recursion depth exceeded'
return
funBody = _getLocalFunBody(mainFunName)
if funBody != '':
funNames = _getAllLocalSubFunNames(funBody)
if len(funNames):
for funName in funNames:
funName_ = funName.replace('$','_S_')
if funName not in allLocalFunNamesTab:
funBody=funBody.replace(funName,funName_)
allLocalFunNamesTab.append(funName)
#print 'Add local function %s to known functions' % mainFunName
funbody = _getfullAlgoCode(funName, recDepth+1) + "\n" + funBody
varNames = _extractLocalVarNames(funBody)
if len(varNames):
for varName in varNames:
if varName not in allLocalVarNamesTab:
allLocalVarNamesTab.append(varName)
funBody = _getLocalVarObjBody(varName) + "\n" + funBody
# convert code from javascript to python
funBody = _jsToPy(funBody)
return '\n' + funBody + '\n'
return funBody
|
quequino/Revolution
|
plugin.program.vpnicity/yt.py
|
Python
|
gpl-2.0
| 16,924
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-ç
import sys
import calcoo
import calcoohija
import csv
if __name__ == "__main__":
calc = calcoohija.CalculadoraHija()
with open(sys.argv[1]) as fichero:
reader = csv.reader(fichero)
for operandos in reader:
operacion = operandos[0]
if operacion == "suma":
resultado = calc.suma(int(operandos[1]), int(operandos[2]))
for numero in operandos[3:]:
resultado = calc.suma(int(resultado), int(numero))
print(resultado)
elif operacion == "resta":
resultado = calc.resta(int(operandos[1]), int(operandos[2]))
for numero in operandos[3:]:
resultado = calc.resta(int(resultado), int(numero))
print(resultado)
elif operacion == "multiplica":
resultado = calc.producto(int(operandos[1]), int(operandos[2]))
for numero in operandos[3:]:
resultado = calc.producto(int(resultado), int(numero))
print (resultado)
elif operacion == "divide":
resultado = calc.division(int(operandos[1]), int(operandos[2]))
for numero in operandos[3:]:
resultado = calc.division(int(resultado), int(numero))
print (resultado)
|
cescudero/ptavi-p2
|
calcplusplus.py
|
Python
|
gpl-2.0
| 1,398
|
#
# Copyright (c) 2004 Conectiva, Inc.
#
# Written by Gustavo Niemeyer <niemeyer@conectiva.com>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from smart.const import BLOCKSIZE
from smart import *
class Uncompressor(object):
_handlers = []
def addHandler(self, handler):
self._handlers.append(handler())
addHandler = classmethod(addHandler)
def getHandler(self, localpath):
for handler in self._handlers:
if handler.query(localpath):
return handler
getHandler = classmethod(getHandler)
def uncompress(self, localpath):
for handler in self._handlers:
if handler.query(localpath):
return handler.uncompress(localpath)
else:
raise Error, _("Unknown compressed file: %s") % localpath
class UncompressorHandler(object):
def query(self, localpath):
return None
def getTargetPath(self, localpath):
return None
def uncompress(self, localpath):
raise Error, _("Unsupported file type")
class BZ2Handler(UncompressorHandler):
def query(self, localpath):
if localpath.endswith(".bz2"):
return True
def getTargetPath(self, localpath):
return localpath[:-4]
def uncompress(self, localpath):
import bz2
try:
input = bz2.BZ2File(localpath)
output = open(self.getTargetPath(localpath), "w")
data = input.read(BLOCKSIZE)
while data:
output.write(data)
data = input.read(BLOCKSIZE)
except (IOError, OSError), e:
raise Error, "%s: %s" % (localpath, e)
Uncompressor.addHandler(BZ2Handler)
class GZipHandler(UncompressorHandler):
def query(self, localpath):
if localpath.endswith(".gz"):
return True
def getTargetPath(self, localpath):
return localpath[:-3]
def uncompress(self, localpath):
import gzip
try:
input = gzip.GzipFile(localpath)
output = open(self.getTargetPath(localpath), "w")
data = input.read(BLOCKSIZE)
while data:
output.write(data)
data = input.read(BLOCKSIZE)
except (IOError, OSError), e:
raise Error, "%s: %s" % (localpath, e)
Uncompressor.addHandler(GZipHandler)
|
colloquium/spacewalk
|
client/solaris/smartpm/smart/uncompress.py
|
Python
|
gpl-2.0
| 3,089
|
class PGeoException(Exception):
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self, message)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
rv['status_code'] = self.status_code
return rv
def get_message(self):
return self.message
def get_status_code(self):
return self.status_code
errors = {
510: 'Error fetching available data providers.',
511: 'Data provider is not currently supported.',
512: 'Source type is not currently supported.',
513: 'Error while parsing the payload of the request.',
# geoserver
520: "There is already a store named",
521: "No coverage store named",
522: "Layer file doesn't exists",
523: "Error creating workspace",
# Data processing
550: "Error processing data",
}
|
geobricks/pgeo
|
pgeo/error/custom_exceptions.py
|
Python
|
gpl-2.0
| 1,045
|
"""text_file
provides the TextFile class, which gives an interface to text files
that (optionally) takes care of stripping comments, ignoring blank
lines, and joining lines with backslashes."""
__revision__ = "$Id$"
from types import *
import sys, os, string
class TextFile:
"""Provides a file-like object that takes care of all the things you
commonly want to do when processing a text file that has some
line-by-line syntax: strip comments (as long as "#" is your
comment character), skip blank lines, join adjacent lines by
escaping the newline (ie. backslash at end of line), strip
leading and/or trailing whitespace. All of these are optional
and independently controllable.
Provides a 'warn()' method so you can generate warning messages that
report physical line number, even if the logical line in question
spans multiple physical lines. Also provides 'unreadline()' for
implementing line-at-a-time lookahead.
Constructor is called as:
TextFile (filename=None, file=None, **options)
It bombs (RuntimeError) if both 'filename' and 'file' are None;
'filename' should be a string, and 'file' a file object (or
something that provides 'readline()' and 'close()' methods). It is
recommended that you supply at least 'filename', so that TextFile
can include it in warning messages. If 'file' is not supplied,
TextFile creates its own using the 'open()' builtin.
The options are all boolean, and affect the value returned by
'readline()':
strip_comments [default: true]
strip from "#" to end-of-line, as well as any whitespace
leading up to the "#" -- unless it is escaped by a backslash
lstrip_ws [default: false]
strip leading whitespace from each line before returning it
rstrip_ws [default: true]
strip trailing whitespace (including line terminator!) from
each line before returning it
skip_blanks [default: true}
skip lines that are empty *after* stripping comments and
whitespace. (If both lstrip_ws and rstrip_ws are false,
then some lines may consist of solely whitespace: these will
*not* be skipped, even if 'skip_blanks' is true.)
join_lines [default: false]
if a backslash is the last non-newline character on a line
after stripping comments and whitespace, join the following line
to it to form one "logical line"; if N consecutive lines end
with a backslash, then N+1 physical lines will be joined to
form one logical line.
collapse_join [default: false]
strip leading whitespace from lines that are joined to their
predecessor; only matters if (join_lines and not lstrip_ws)
Note that since 'rstrip_ws' can strip the trailing newline, the
semantics of 'readline()' must differ from those of the builtin file
object's 'readline()' method! In particular, 'readline()' returns
None for end-of-file: an empty string might just be a blank line (or
an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is
not."""
default_options = { 'strip_comments': 1,
'skip_blanks': 1,
'lstrip_ws': 0,
'rstrip_ws': 1,
'join_lines': 0,
'collapse_join': 0,
}
def __init__ (self, filename=None, file=None, **options):
"""Construct a new TextFile object. At least one of 'filename'
(a string) and 'file' (a file-like object) must be supplied.
They keyword argument options are described above and affect
the values returned by 'readline()'."""
if filename is None and file is None:
raise RuntimeError, \
"you must supply either or both of 'filename' and 'file'"
# set values for all options -- either from client option hash
# or fallback to default_options
for opt in self.default_options.keys():
if opt in options:
setattr (self, opt, options[opt])
else:
setattr (self, opt, self.default_options[opt])
# sanity check client option hash
for opt in options.keys():
if opt not in self.default_options:
raise KeyError, "invalid TextFile option '%s'" % opt
if file is None:
self.open (filename)
else:
self.filename = filename
self.file = file
self.current_line = 0 # assuming that file is at BOF!
# 'linebuf' is a stack of lines that will be emptied before we
# actually read from the file; it's only populated by an
# 'unreadline()' operation
self.linebuf = []
def open (self, filename):
"""Open a new file named 'filename'. This overrides both the
'filename' and 'file' arguments to the constructor."""
self.filename = filename
self.file = open (self.filename, 'r')
self.current_line = 0
def close (self):
"""Close the current file and forget everything we know about it
(filename, current line number)."""
self.file.close ()
self.file = None
self.filename = None
self.current_line = None
def gen_error (self, msg, line=None):
outmsg = []
if line is None:
line = self.current_line
outmsg.append(self.filename + ", ")
if type (line) in (ListType, TupleType):
outmsg.append("lines %d-%d: " % tuple (line))
else:
outmsg.append("line %d: " % line)
outmsg.append(str(msg))
return string.join(outmsg, "")
def error (self, msg, line=None):
raise ValueError, "error: " + self.gen_error(msg, line)
def warn (self, msg, line=None):
"""Print (to stderr) a warning message tied to the current logical
line in the current file. If the current logical line in the
file spans multiple physical lines, the warning refers to the
whole range, eg. "lines 3-5". If 'line' supplied, it overrides
the current line number; it may be a list or tuple to indicate a
range of physical lines, or an integer for a single physical
line."""
sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n")
def readline (self):
"""Read and return a single logical line from the current file (or
from an internal buffer if lines have previously been "unread"
with 'unreadline()'). If the 'join_lines' option is true, this
may involve reading multiple physical lines concatenated into a
single string. Updates the current line number, so calling
'warn()' after 'readline()' emits a warning about the physical
line(s) just read. Returns None on end-of-file, since the empty
string can occur if 'rstrip_ws' is true but 'strip_blanks' is
not."""
# If any "unread" lines waiting in 'linebuf', return the top
# one. (We don't actually buffer read-ahead data -- lines only
# get put in 'linebuf' if the client explicitly does an
# 'unreadline()'.
if self.linebuf:
line = self.linebuf[-1]
del self.linebuf[-1]
return line
buildup_line = ''
while 1:
# read the line, make it None if EOF
line = self.file.readline()
if line == '': line = None
if self.strip_comments and line:
# Look for the first "#" in the line. If none, never
# mind. If we find one and it's the first character, or
# is not preceded by "\", then it starts a comment --
# strip the comment, strip whitespace before it, and
# carry on. Otherwise, it's just an escaped "#", so
# unescape it (and any other escaped "#"'s that might be
# lurking in there) and otherwise leave the line alone.
pos = string.find (line, "#")
if pos == -1: # no "#" -- no comments
pass
# It's definitely a comment -- either "#" is the first
# character, or it's elsewhere and unescaped.
elif pos == 0 or line[pos-1] != "\\":
# Have to preserve the trailing newline, because it's
# the job of a later step (rstrip_ws) to remove it --
# and if rstrip_ws is false, we'd better preserve it!
# (NB. this means that if the final line is all comment
# and has no trailing newline, we will think that it's
# EOF; I think that's OK.)
eol = (line[-1] == '\n') and '\n' or ''
line = line[0:pos] + eol
# If all that's left is whitespace, then skip line
# *now*, before we try to join it to 'buildup_line' --
# that way constructs like
# hello \\
# # comment that should be ignored
# there
# result in "hello there".
if string.strip(line) == "":
continue
else: # it's an escaped "#"
line = string.replace (line, "\\#", "#")
# did previous line end with a backslash? then accumulate
if self.join_lines and buildup_line:
# oops: end of file
if line is None:
self.warn ("continuation line immediately precedes "
"end-of-file")
return buildup_line
if self.collapse_join:
line = string.lstrip (line)
line = buildup_line + line
# careful: pay attention to line number when incrementing it
if type (self.current_line) is ListType:
self.current_line[1] = self.current_line[1] + 1
else:
self.current_line = [self.current_line,
self.current_line+1]
# just an ordinary line, read it as usual
else:
if line is None: # eof
return None
# still have to be careful about incrementing the line number!
if type (self.current_line) is ListType:
self.current_line = self.current_line[1] + 1
else:
self.current_line = self.current_line + 1
# strip whitespace however the client wants (leading and
# trailing, or one or the other, or neither)
if self.lstrip_ws and self.rstrip_ws:
line = string.strip (line)
elif self.lstrip_ws:
line = string.lstrip (line)
elif self.rstrip_ws:
line = string.rstrip (line)
# blank line (whether we rstrip'ed or not)? skip to next line
# if appropriate
if (line == '' or line == '\n') and self.skip_blanks:
continue
if self.join_lines:
if line[-1] == '\\':
buildup_line = line[:-1]
continue
if line[-2:] == '\\\n':
buildup_line = line[0:-2] + '\n'
continue
# well, I guess there's some actual content there: return it
return line
# readline ()
def readlines (self):
"""Read and return the list of all logical lines remaining in the
current file."""
lines = []
while 1:
line = self.readline()
if line is None:
return lines
lines.append (line)
def unreadline (self, line):
"""Push 'line' (a string) onto an internal buffer that will be
checked by future 'readline()' calls. Handy for implementing
a parser with line-at-a-time lookahead."""
self.linebuf.append (line)
if __name__ == "__main__":
test_data = """# test file
line 3 \\
# intervening comment
continues on next line
"""
# result 1: no fancy options
result1 = map (lambda x: x + "\n", string.split (test_data, "\n")[0:-1])
# result 2: just strip comments
result2 = ["\n",
"line 3 \\\n",
" continues on next line\n"]
# result 3: just strip blank lines
result3 = ["# test file\n",
"line 3 \\\n",
"# intervening comment\n",
" continues on next line\n"]
# result 4: default, strip comments, blank lines, and trailing whitespace
result4 = ["line 3 \\",
" continues on next line"]
# result 5: strip comments and blanks, plus join lines (but don't
# "collapse" joined lines
result5 = ["line 3 continues on next line"]
# result 6: strip comments and blanks, plus join lines (and
# "collapse" joined lines
result6 = ["line 3 continues on next line"]
def test_input (count, description, file, expected_result):
result = file.readlines ()
# result = string.join (result, '')
if result == expected_result:
print "ok %d (%s)" % (count, description)
else:
print "not ok %d (%s):" % (count, description)
print "** expected:"
print expected_result
print "** received:"
print result
filename = "test.txt"
out_file = open (filename, "w")
out_file.write (test_data)
out_file.close ()
in_file = TextFile (filename, strip_comments=0, skip_blanks=0,
lstrip_ws=0, rstrip_ws=0)
test_input (1, "no processing", in_file, result1)
in_file = TextFile (filename, strip_comments=1, skip_blanks=0,
lstrip_ws=0, rstrip_ws=0)
test_input (2, "strip comments", in_file, result2)
in_file = TextFile (filename, strip_comments=0, skip_blanks=1,
lstrip_ws=0, rstrip_ws=0)
test_input (3, "strip blanks", in_file, result3)
in_file = TextFile (filename)
test_input (4, "default processing", in_file, result4)
in_file = TextFile (filename, strip_comments=1, skip_blanks=1,
join_lines=1, rstrip_ws=1)
test_input (5, "join lines without collapsing", in_file, result5)
in_file = TextFile (filename, strip_comments=1, skip_blanks=1,
join_lines=1, rstrip_ws=1, collapse_join=1)
test_input (6, "join lines with collapsing", in_file, result6)
os.remove (filename)
|
2ndy/RaspIM
|
usr/lib/python2.6/distutils/text_file.py
|
Python
|
gpl-2.0
| 15,086
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 21 04:00:41 2016
@author: irnakat
"""
# test IOfile
import IOfile
from TFCalculator import TFCalculator as TFC
import TFDisplayTools
# validity test for SH PSV case using S wave as an input
# filename
fname = 'sampleinput_linear_elastic_1layer_halfspace.dat'
fname2 = 'sampleinput_psv_s_linear_elastic_1layer_halfspace.dat'
# input file reading
datash = IOfile.parsing_input_file(fname)
datapsvs = IOfile.parsing_input_file(fname2)
# kramer
print 'TF calculatoin using kramer approach'
theclass1 = TFC(datash)
theclass1.tf_kramer286_sh() # check/verify kramer calculation
print 'calculation has been finished!'
# knopoff sh complete
print 'TF calculation using complete knopoff sh approach'
theclass3 = TFC(datash)
theclass3.tf_knopoff_sh_adv()
print 'calculation has been finished!'
# knopoff psv-s
print 'TF calculation using complete knopoff psv-s approach'
theclass4 = TFC(datapsvs)
theclass4.tf_knopoff_psv_adv()
print theclass4.tf[1][19]
print 'calculation has been finished!'
# kennet sh
print 'TF calculation using kennet sh method'
theclass5 = TFC(datash)
theclass5.tf_kennet_sh()
print 'calculation has been finished!'
TFDisplayTools.TFPlot(theclass1,theclass3,theclass5,theclass4, \
label=['Kramer SH','Knopoff SH','Kennet SH','Knopoff PSV'])
TFDisplayTools.PhasePlot(theclass1,theclass3,theclass5,theclass4, \
label=['Kramer SH','Knopoff SH','Kennet SH','Knopoff PSV'])
|
blueray45/GSRT
|
test01_validity_test_SH-PSV_input_S.py
|
Python
|
gpl-2.0
| 1,502
|
#!/usr/bin/env python
"""
plot magnetic lattice
"""
import matplotlib.pylab as plt
import numpy as np
f12 = 'AWDall.lat'
data12 = np.loadtxt(f12)
plt.plot(data12[:,0], data12[:,1], 'r-',
data12[:,0], data12[:,2], 'b-',
linewidth=2)
plt.xlim([110,240])
plt.ylim([1.5,1.53])
plt.legend([r'$a_u$',r'$a_d$'],1)
plt.xlabel(r'$z\,\mathrm{[m]}$',fontsize=18)
plt.ylabel(r'undulator parameter',fontsize=18)
plt.show()
|
Archman/pandora
|
python/scripts/plotaw.py
|
Python
|
gpl-2.0
| 420
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import with_statement
from collections import defaultdict
from copy import deepcopy
import axiom_rules
import fact_groups
import instantiate
import pddl
import sas_tasks
import simplify
import timers
# TODO: The translator may generate trivial derived variables which are always true,
# for example if there ia a derived predicate in the input that only depends on
# (non-derived) variables which are detected as always true.
# Such a situation was encountered in the PSR-STRIPS-DerivedPredicates domain.
# Such "always-true" variables should best be compiled away, but it is
# not clear what the best place to do this should be. Similar
# simplifications might be possible elsewhere, for example if a
# derived variable is synonymous with another variable (derived or
# non-derived).
ALLOW_CONFLICTING_EFFECTS = True
USE_PARTIAL_ENCODING = True
DETECT_UNREACHABLE = True
## Setting the following variable to True can cause a severe
## performance penalty due to weaker relevance analysis (see issue7).
ADD_IMPLIED_PRECONDITIONS = False
removed_implied_effect_counter = 0
simplified_effect_condition_counter = 0
added_implied_precondition_counter = 0
def strips_to_sas_dictionary(groups, assert_partial):
dictionary = {}
for var_no, group in enumerate(groups):
for val_no, atom in enumerate(group):
dictionary.setdefault(atom, []).append((var_no, val_no))
if assert_partial:
assert all(len(sas_pairs) == 1
for sas_pairs in dictionary.itervalues())
return [len(group) + 1 for group in groups], dictionary
def translate_strips_conditions_aux(conditions, dictionary, ranges):
condition = {}
for fact in conditions:
if fact.negated:
# we handle negative conditions later, because then we
# can recognize when the negative condition is already
# ensured by a positive condition
continue
for var, val in dictionary.get(fact, ()):
# The default () here is a bit of a hack. For goals (but
# only for goals!), we can get static facts here. They
# cannot be statically false (that would have been
# detected earlier), and hence they are statically true
# and don't need to be translated.
# TODO: This would not be necessary if we dealt with goals
# in the same way we deal with operator preconditions etc.,
# where static facts disappear during grounding. So change
# this when the goal code is refactored (also below). (**)
if (condition.get(var) is not None and
val not in condition.get(var)):
# Conflicting conditions on this variable: Operator invalid.
return None
condition[var] = set([val])
for fact in conditions:
if fact.negated:
## Note Here we use a different solution than in Sec. 10.6.4
## of the thesis. Compare the last sentences of the third
## paragraph of the section.
## We could do what is written there. As a test case,
## consider Airport ADL tasks with only one airport, where
## (occupied ?x) variables are encoded in a single variable,
## and conditions like (not (occupied ?x)) do occur in
## preconditions.
## However, here we avoid introducing new derived predicates
## by treat the negative precondition as a disjunctive precondition
## and expanding it by "multiplying out" the possibilities.
## This can lead to an exponential blow-up so it would be nice
## to choose the behaviour as an option.
done = False
new_condition = {}
atom = pddl.Atom(fact.predicate, fact.args) # force positive
for var, val in dictionary.get(atom, ()):
# see comment (**) above
poss_vals = set(range(ranges[var]))
poss_vals.remove(val)
if condition.get(var) is None:
assert new_condition.get(var) is None
new_condition[var] = poss_vals
else:
# constrain existing condition on var
prev_possible_vals = condition.get(var)
done = True
prev_possible_vals.intersection_update(poss_vals)
if len(prev_possible_vals) == 0:
# Conflicting conditions on this variable:
# Operator invalid.
return None
if not done and len(new_condition) != 0:
# we did not enforce the negative condition by constraining
# an existing condition on one of the variables representing
# this atom. So we need to introduce a new condition:
# We can select any from new_condition and currently prefer the
# smalles one.
candidates = sorted(new_condition.items(),
lambda x,y: cmp(len(x[1]),len(y[1])))
var, vals = candidates[0]
condition[var] = vals
def multiply_out(condition): # destroys the input
sorted_conds = sorted(condition.items(),
lambda x,y: cmp(len(x[1]),len(y[1])))
flat_conds = [{}]
for var, vals in sorted_conds:
if len(vals) == 1:
for cond in flat_conds:
cond[var] = vals.pop() # destroys the input here
else:
new_conds = []
for cond in flat_conds:
for val in vals:
new_cond = deepcopy(cond)
new_cond[var] = val
new_conds.append(new_cond)
flat_conds = new_conds
return flat_conds
return multiply_out(condition)
def translate_strips_conditions(conditions, dictionary, ranges,
mutex_dict, mutex_ranges):
if not conditions:
return [{}] # Quick exit for common case.
# Check if the condition violates any mutexes.
if translate_strips_conditions_aux(
conditions, mutex_dict, mutex_ranges) is None:
return None
return translate_strips_conditions_aux(conditions, dictionary, ranges)
# 17/1/2012 -- Author- David Pattison -- This method is a copy of the normal translate_strips_conditions_aux
# but has had the check for conflicting conditions (in this case,
# goal conditions) removed.
def translate_strips_conditions_goal_aux(conditions, dictionary, ranges):
condition = {}
for fact in conditions:
if fact.negated:
# we handle negative conditions later, because then we
# can recognize when the negative condition is already
# ensured by a positive condition
continue
for var, val in dictionary.get(fact, ()):
# The default () here is a bit of a hack. For goals (but
# only for goals!), we can get static facts here. They
# cannot be statically false (that would have been
# detected earlier), and hence they are statically true
# and don't need to be translated.
# TODO: This would not be necessary if we dealt with goals
# in the same way we deal with operator preconditions etc.,
# where static facts disappear during grounding. So change
# this when the goal code is refactored (also below). (**)
# if (condition.get(var) is not None and
# val not in condition.get(var)):
# # Conflicting conditions on this variable: Operator invalid.
# return None
condition[var] = set([val])
for fact in conditions:
if fact.negated:
## Note Here we use a different solution than in Sec. 10.6.4
## of the thesis. Compare the last sentences of the third
## paragraph of the section.
## We could do what is written there. As a test case,
## consider Airport ADL tasks with only one airport, where
## (occupied ?x) variables are encoded in a single variable,
## and conditions like (not (occupied ?x)) do occur in
## preconditions.
## However, here we avoid introducing new derived predicates
## by treat the negative precondition as a disjunctive precondition
## and expanding it by "multiplying out" the possibilities.
## This can lead to an exponential blow-up so it would be nice
## to choose the behaviour as an option.
done = False
new_condition = {}
atom = pddl.Atom(fact.predicate, fact.args) # force positive
for var, val in dictionary.get(atom, ()):
# see comment (**) above
poss_vals = set(range(ranges[var]))
poss_vals.remove(val)
if condition.get(var) is None:
assert new_condition.get(var) is None
new_condition[var] = poss_vals
else:
# constrain existing condition on var
prev_possible_vals = condition.get(var)
done = True
prev_possible_vals.intersection_update(poss_vals)
if len(prev_possible_vals) == 0:
# Conflicting conditions on this variable:
# Operator invalid.
return None
if not done and len(new_condition) != 0:
# we did not enforce the negative condition by constraining
# an existing condition on one of the variables representing
# this atom. So we need to introduce a new condition:
# We can select any from new_condition and currently prefer the
# smalles one.
candidates = sorted(new_condition.items(),
lambda x,y: cmp(len(x[1]),len(y[1])))
var, vals = candidates[0]
condition[var] = vals
def multiply_out(condition): # destroys the input
sorted_conds = sorted(condition.items(),
lambda x,y: cmp(len(x[1]),len(y[1])))
flat_conds = [{}]
for var, vals in sorted_conds:
if len(vals) == 1:
for cond in flat_conds:
cond[var] = vals.pop() # destroys the input here
else:
new_conds = []
for cond in flat_conds:
for val in vals:
new_cond = deepcopy(cond)
new_cond[var] = val
new_conds.append(new_cond)
flat_conds = new_conds
return flat_conds
return multiply_out(condition)
# 17/1/2012 -- Author- David Pattison -- added this method which ignores mutexes in the goal because
# the translator usually rejects these, but AUTOGRAPH and IGRAPH
# need to generate a proposition for every object in the problem
# in order to get their associated DTGs and CG,
# which will probably mean mutex facts being in the goal!
def translate_strips_goal(conditions, dictionary, ranges,
mutex_dict, mutex_ranges):
if not conditions:
return [{}] # Quick exit for common case.
# Check if the condition violates any mutexes.
# if translate_strips_conditions_aux(
# conditions, mutex_dict, mutex_ranges) is None:
# return None
return translate_strips_conditions_goal_aux(conditions, dictionary, ranges)
def translate_strips_operator(operator, dictionary, ranges, mutex_dict, mutex_ranges, implied_facts):
conditions = translate_strips_conditions(operator.precondition, dictionary, ranges, mutex_dict, mutex_ranges)
if conditions is None:
return []
sas_operators = []
for condition in conditions:
op = translate_strips_operator_aux(operator, dictionary, ranges,
mutex_dict, mutex_ranges,
implied_facts, condition)
sas_operators.append(op)
return sas_operators
def translate_strips_operator_aux(operator, dictionary, ranges, mutex_dict,
mutex_ranges, implied_facts, condition):
# NOTE: This function does not really deal with the intricacies of properly
# encoding delete effects for grouped propositions in the presence of
# conditional effects. It should work ok but will bail out in more
# complicated cases even though a conflict does not necessarily exist.
possible_add_conflict = False
effect = {}
for conditions, fact in operator.add_effects:
eff_condition_list = translate_strips_conditions(conditions, dictionary,
ranges, mutex_dict,
mutex_ranges)
if eff_condition_list is None: # Impossible condition for this effect.
continue
eff_condition = [eff_cond.items()
for eff_cond in eff_condition_list]
for var, val in dictionary[fact]:
if condition.get(var) == val:
# Effect implied by precondition.
global removed_implied_effect_counter
removed_implied_effect_counter += 1
# print "Skipping effect of %s..." % operator.name
continue
effect_pair = effect.get(var)
if not effect_pair:
effect[var] = (val, eff_condition)
else:
other_val, eff_conditions = effect_pair
# Don't flag conflict just yet... the operator might be invalid
# because of conflicting add/delete effects (see pipesworld).
if other_val != val:
possible_add_conflict = True
eff_conditions.extend(eff_condition)
for conditions, fact in operator.del_effects:
eff_condition_list = translate_strips_conditions(conditions, dictionary, ranges, mutex_dict, mutex_ranges)
if eff_condition_list is None:
continue
eff_condition = [eff_cond.items()
for eff_cond in eff_condition_list]
for var, val in dictionary[fact]:
none_of_those = ranges[var] - 1
other_val, eff_conditions = effect.setdefault(var, (none_of_those, []))
if other_val != none_of_those:
# Look for matching add effect; ignore this del effect if found.
for cond in eff_condition:
assert cond in eff_conditions or [] in eff_conditions, \
"Add effect with uncertain del effect partner?"
if other_val == val:
if ALLOW_CONFLICTING_EFFECTS:
# Conflicting ADD and DEL effect. This is *only* allowed if
# this is also a precondition, in which case there is *no*
# effect (the ADD takes precedence). We delete the add effect here.
if condition.get(var) != val:
# HACK HACK HACK!
# There used to be an assertion here that actually
# forbid this, but this was wrong in Pipesworld-notankage
# (e.g. task 01). The thing is, it *is* possible for
# an operator with proven (with the given invariants)
# inconsistent preconditions to actually end up here if
# the inconsistency of the preconditions is not obvious at
# the SAS+ encoding level.
#
# Example: Pipes-Notank-01, operator
# (pop-unitarypipe s12 b4 a1 a2 b4 lco lco).
# This has precondition last(b4, s12) and on(b4, a2) which
# is inconsistent because b4 can only be in one place.
# However, the chosen encoding encodes *what is last in s12*
# separately, and so the precondition translates to
# "last(s12) = b4 and on(b4) = a2", which does not look
# inconsistent at first glance.
#
# Something reasonable to do here would be to make a
# decent check that the precondition is indeed inconsistent
# (using *all* mutexes), but that seems tough with this
# convoluted code, so we just warn and reject the operator.
print "Warning: %s rejected. Cross your fingers." % (
operator.name)
return None
assert False
assert eff_conditions == [[]]
del effect[var]
else:
assert not eff_condition[0] and not eff_conditions[0], "Uncertain conflict"
return None # Definite conflict otherwise.
else: # no add effect on this variable
if condition.get(var) != val:
if var in condition:
## HACK HACK HACK! There is a precondition on the variable for
## this delete effect on another value, so there is no need to
## represent the delete effect. Right? Right???
del effect[var]
continue
for index, cond in enumerate(eff_condition_list):
if cond.get(var) != val:
# Need a guard for this delete effect.
assert (var not in condition and
var not in eff_condition[index]), "Oops?"
eff_condition[index].append((var, val))
eff_conditions.extend(eff_condition)
if possible_add_conflict:
operator.dump()
assert not possible_add_conflict, "Conflicting add effects?"
# assert eff_condition != other_condition, "Duplicate effect"
# assert eff_condition and other_condition, "Dominated conditional effect"
if ADD_IMPLIED_PRECONDITIONS:
implied_precondition = set()
for fact in condition.iteritems():
implied_precondition.update(implied_facts[fact])
pre_post = []
for var, (post, eff_condition_lists) in effect.iteritems():
pre = condition.pop(var, -1)
if ranges[var] == 2:
# Apply simplifications for binary variables.
if prune_stupid_effect_conditions(var, post, eff_condition_lists):
global simplified_effect_condition_counter
simplified_effect_condition_counter += 1
if (ADD_IMPLIED_PRECONDITIONS and
pre == -1 and (var, 1 - post) in implied_precondition):
global added_implied_precondition_counter
added_implied_precondition_counter += 1
pre = 1 - post
# print "Added precondition (%d = %d) to %s" % (
# var, pre, operator.name)
for eff_condition in eff_condition_lists:
pre_post.append((var, pre, post, eff_condition))
prevail = condition.items()
return sas_tasks.SASOperator(operator.name, prevail, pre_post, operator.cost)
def prune_stupid_effect_conditions(var, val, conditions):
## (IF <conditions> THEN <var> := <val>) is a conditional effect.
## <var> is guaranteed to be a binary variable.
## <conditions> is in DNF representation (list of lists).
##
## We simplify <conditions> by applying two rules:
## 1. Conditions of the form "var = dualval" where var is the
## effect variable and dualval != val can be omitted.
## (If var != dualval, then var == val because it is binary,
## which mesans that in such situations the effect is a no-op.)
## 2. If conditions contains any empty list, it is equivalent
## to True and we can remove all other disjuncts.
##
## returns True when anything was changed
if conditions == [[]]:
return False ## Quick exit for common case.
assert val in [0, 1]
dual_fact = (var, 1 - val)
simplified = False
for condition in conditions:
# Apply rule 1.
while dual_fact in condition:
# print "*** Removing dual condition"
simplified = True
condition.remove(dual_fact)
# Apply rule 2.
if not condition:
conditions[:] = [[]]
simplified = True
break
return simplified
def translate_strips_axiom(axiom, dictionary, ranges, mutex_dict, mutex_ranges):
conditions = translate_strips_conditions(axiom.condition, dictionary, ranges, mutex_dict, mutex_ranges)
if conditions is None:
return []
if axiom.effect.negated:
[(var, _)] = dictionary[axiom.effect.positive()]
effect = (var, ranges[var] - 1)
else:
[effect] = dictionary[axiom.effect]
axioms = []
for condition in conditions:
axioms.append(sas_tasks.SASAxiom(condition.items(), effect))
return axioms
def translate_strips_operators(actions, strips_to_sas, ranges, mutex_dict, mutex_ranges, implied_facts):
result = []
for action in actions:
sas_ops = translate_strips_operator(action, strips_to_sas, ranges, mutex_dict, mutex_ranges, implied_facts)
result.extend(sas_ops)
return result
def translate_strips_axioms(axioms, strips_to_sas, ranges, mutex_dict, mutex_ranges):
result = []
for axiom in axioms:
sas_axioms = translate_strips_axiom(axiom, strips_to_sas, ranges, mutex_dict, mutex_ranges)
result.extend(sas_axioms)
return result
def translate_task(strips_to_sas, ranges, mutex_dict, mutex_ranges, init, goals,
actions, axioms, metric, implied_facts):
with timers.timing("Processing axioms", block=True):
axioms, axiom_init, axiom_layer_dict = axiom_rules.handle_axioms(
actions, axioms, goals)
init = init + axiom_init
#axioms.sort(key=lambda axiom: axiom.name)
#for axiom in axioms:
# axiom.dump()
init_values = [rang - 1 for rang in ranges]
# Closed World Assumption: Initialize to "range - 1" == Nothing.
for fact in init:
pair = strips_to_sas.get(fact)
pairs = strips_to_sas.get(fact, []) # empty for static init facts
for var, val in pairs:
assert init_values[var] == ranges[var] - 1, "Inconsistent init facts!"
init_values[var] = val
init = sas_tasks.SASInit(init_values)
goal_dict_list = translate_strips_goal(goals, strips_to_sas, ranges, mutex_dict, mutex_ranges)
assert len(goal_dict_list) == 1, "Negative goal not supported"
## we could substitute the negative goal literal in
## normalize.substitute_complicated_goal, using an axiom. We currently
## don't do this, because we don't run into this assertion, if the
## negative goal is part of finite domain variable with only two
## values, which is most of the time the case, and hence refrain from
## introducing axioms (that are not supported by all heuristics)
goal_pairs = goal_dict_list[0].items()
goal = sas_tasks.SASGoal(goal_pairs)
operators = translate_strips_operators(actions, strips_to_sas, ranges, mutex_dict, mutex_ranges, implied_facts)
axioms = translate_strips_axioms(axioms, strips_to_sas, ranges, mutex_dict, mutex_ranges)
axiom_layers = [-1] * len(ranges)
for atom, layer in axiom_layer_dict.iteritems():
assert layer >= 0
[(var, val)] = strips_to_sas[atom]
axiom_layers[var] = layer
variables = sas_tasks.SASVariables(ranges, axiom_layers)
return sas_tasks.SASTask(variables, init, goal, operators, axioms, metric)
def unsolvable_sas_task(msg):
print "%s! Generating unsolvable task..." % msg
write_translation_key([])
write_mutex_key([])
variables = sas_tasks.SASVariables([2], [-1])
init = sas_tasks.SASInit([0])
goal = sas_tasks.SASGoal([(0, 1)])
operators = []
axioms = []
metric = True
return sas_tasks.SASTask(variables, init, goal, operators, axioms, metric)
def pddl_to_sas(task):
with timers.timing("Instantiating", block=True):
relaxed_reachable, atoms, actions, axioms = instantiate.explore(task)
if not relaxed_reachable:
return unsolvable_sas_task("No relaxed solution")
# HACK! Goals should be treated differently.
if isinstance(task.goal, pddl.Conjunction):
goal_list = task.goal.parts
else:
goal_list = [task.goal]
for item in goal_list:
assert isinstance(item, pddl.Literal)
with timers.timing("Computing fact groups", block=True):
groups, mutex_groups, translation_key = fact_groups.compute_groups(
task, atoms, partial_encoding=USE_PARTIAL_ENCODING)
with timers.timing("Building STRIPS to SAS dictionary"):
ranges, strips_to_sas = strips_to_sas_dictionary(
groups, assert_partial=USE_PARTIAL_ENCODING)
with timers.timing("Building dictionary for full mutex groups"):
mutex_ranges, mutex_dict = strips_to_sas_dictionary(
mutex_groups, assert_partial=False)
if ADD_IMPLIED_PRECONDITIONS:
with timers.timing("Building implied facts dictionary..."):
implied_facts = build_implied_facts(strips_to_sas, groups, mutex_groups)
else:
implied_facts = {}
with timers.timing("Translating task", block=True):
sas_task = translate_task(
strips_to_sas, ranges, mutex_dict, mutex_ranges,
task.init, goal_list, actions, axioms, task.use_min_cost_metric,
implied_facts)
print "%d implied effects removed" % removed_implied_effect_counter
print "%d effect conditions simplified" % simplified_effect_condition_counter
print "%d implied preconditions added" % added_implied_precondition_counter
with timers.timing("Building mutex information"):
mutex_key = build_mutex_key(strips_to_sas, mutex_groups)
if DETECT_UNREACHABLE:
with timers.timing("Detecting unreachable propositions", block=True):
try:
simplify.filter_unreachable_propositions(
sas_task, mutex_key, translation_key)
except simplify.Impossible:
return unsolvable_sas_task("Simplified to trivially false goal")
with timers.timing("Writing translation key"):
write_translation_key(translation_key)
with timers.timing("Writing mutex key"):
write_mutex_key(mutex_key)
return sas_task
def build_mutex_key(strips_to_sas, groups):
group_keys = []
for group in groups:
group_key = []
for fact in group:
if strips_to_sas.get(fact):
for var, val in strips_to_sas[fact]:
group_key.append((var, val, str(fact)))
else:
print "not in strips_to_sas, left out:", fact
group_keys.append(group_key)
return group_keys
def build_implied_facts(strips_to_sas, groups, mutex_groups):
## Compute a dictionary mapping facts (FDR pairs) to lists of FDR
## pairs implied by that fact. In other words, in all states
## containing p, all pairs in implied_facts[p] must also be true.
##
## There are two simple cases where a pair p implies a pair q != p
## in our FDR encodings:
## 1. p and q encode the same fact
## 2. p encodes a STRIPS proposition X, q encodes a STRIPS literal
## "not Y", and X and Y are mutex.
##
## The first case cannot arise when we use partial encodings, and
## when we use full encodings, I don't think it would give us any
## additional information to exploit in the operator translation,
## so we only use the second case.
##
## Note that for a pair q to encode a fact "not Y", Y must form a
## fact group of size 1. We call such propositions Y "lonely".
## In the first step, we compute a dictionary mapping each lonely
## proposition to its variable number.
lonely_propositions = {}
for var_no, group in enumerate(groups):
if len(group) == 1:
lonely_prop = group[0]
assert strips_to_sas[lonely_prop] == [(var_no, 0)]
lonely_propositions[lonely_prop] = var_no
## Then we compute implied facts as follows: for each mutex group,
## check if prop is lonely (then and only then "not prop" has a
## representation as an FDR pair). In that case, all other facts
## in this mutex group imply "not prop".
implied_facts = defaultdict(list)
for mutex_group in mutex_groups:
for prop in mutex_group:
prop_var = lonely_propositions.get(prop)
if prop_var is not None:
prop_is_false = (prop_var, 1)
for other_prop in mutex_group:
if other_prop is not prop:
for other_fact in strips_to_sas[other_prop]:
implied_facts[other_fact].append(prop_is_false)
return implied_facts
def write_translation_key(translation_key):
groups_file = file("test.groups", "w")
for var_no, var_key in enumerate(translation_key):
print >> groups_file, "var%d:" % var_no
for value, value_name in enumerate(var_key):
print >> groups_file, " %d: %s" % (value, value_name)
groups_file.close()
def write_mutex_key(mutex_key):
invariants_file = file("all.groups", "w")
print >> invariants_file, "begin_groups"
print >> invariants_file, len(mutex_key)
for group in mutex_key:
#print map(str, group)
no_facts = len(group)
print >> invariants_file, "group"
print >> invariants_file, no_facts
for var, val, fact in group:
#print fact
assert str(fact).startswith("Atom ")
predicate = str(fact)[5:].split("(")[0]
#print predicate
rest = str(fact).split("(")[1]
rest = rest.strip(")").strip()
if not rest == "":
#print "there are args" , rest
args = rest.split(",")
else:
args = []
print_line = "%d %d %s %d " % (var, val, predicate, len(args))
for arg in args:
print_line += str(arg).strip() + " "
#print fact
#print print_line
print >> invariants_file, print_line
print >> invariants_file, "end_groups"
invariants_file.close()
if __name__ == "__main__":
import pddl
timer = timers.Timer()
with timers.timing("Parsing"):
task = pddl.open()
# EXPERIMENTAL!
# import psyco
# psyco.full()
sas_task = pddl_to_sas(task)
with timers.timing("Writing output"):
sas_task.output(file("output.sas", "w"))
print "Done! %s" % timer
|
dpattiso/igraph
|
lama/translate/translate_old.py
|
Python
|
gpl-2.0
| 31,909
|
# This script is an example of how you can run blender from the command line (in background mode with no interface)
# to automate tasks, in this example it creates a text object, camera and light, then renders and/or saves it.
# This example also shows how you can parse command line options to python scripts.
#
# Example usage for this test.
# blender -b -P $HOME/background_job.py -- --text="Hello World" --render="/tmp/hello" --save="/tmp/hello.blend"
# [Ivana:] note that /tmp can be replace by the full path to PWD - ./ does not work
#
# Notice all python args are after the '--' argument.
import Blender
import bpy
def example_function(body_text, save_path, render_path):
sce= bpy.data.scenes.active
txt_data= bpy.data.curves.new('MyText', 'Text3d')
# Text Object
txt_ob = sce.objects.new(txt_data) # add the data to the scene as an object
txt_data.setText(body_text) # set the body text to the command line arg given
txt_data.setAlignment(Blender.Text3d.MIDDLE)# center text
# Camera
cam_data= bpy.data.cameras.new('MyCam') # create new camera data
cam_ob= sce.objects.new(cam_data) # add the camera data to the scene (creating a new object)
sce.objects.camera= cam_ob # set the active camera
cam_ob.loc= 0,0,10
# Lamp
lamp_data= bpy.data.lamps.new('MyLamp')
lamp_ob= sce.objects.new(lamp_data)
lamp_ob.loc= 2,2,5
if save_path:
try:
f= open(save_path, 'w')
f.close()
ok= True
except:
print 'Cannot save to path "%s"' % save_path
ok= False
if ok:
Blender.Save(save_path, 1)
if render_path:
render= sce.render
render.extensions= True
render.renderPath = render_path
#[Ivana:] don't know how to change the format
#render.setImageType(PNG)
render.sFrame= 1
render.eFrame= 1
render.renderAnim()
import sys # to get command line args
import optparse # to parse options for us and print a nice help message
script_name= 'background_job.py'
def main():
# get the args passed to blender after "--", all of which are ignored by blender specifically
# so python may receive its own arguments
argv= sys.argv
if '--' not in argv:
argv = [] # as if no args are passed
else:
argv = argv[argv.index('--')+1: ] # get all args after "--"
# When --help or no args are given, print this help
usage_text = 'Run blender in background mode with this script:'
usage_text += ' blender -b -P ' + script_name + ' -- [options]'
parser = optparse.OptionParser(usage = usage_text)
# Example background utility, add some text and renders or saves it (with options)
# Possible types are: string, int, long, choice, float and complex.
parser.add_option('-t', '--text', dest='body_text', help='This text will be used to render an image', type='string')
parser.add_option('-s', '--save', dest='save_path', help='Save the generated file to the specified path', metavar='FILE')
parser.add_option('-r', '--render', dest='render_path', help='Render an image to the specified path', metavar='FILE')
options, args = parser.parse_args(argv) # In this example we wont use the args
if not argv:
parser.print_help()
return
if not options.body_text:
print 'Error: --text="some string" argument not given, aborting.'
parser.print_help()
return
# Run the example function
example_function(options.body_text, options.save_path, options.render_path)
print 'batch job finished, exiting'
if __name__ == '__main__':
main()
|
ivanamihalek/blender
|
old/bgjob.py
|
Python
|
gpl-2.0
| 3,413
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2/3/2015
@author: Antonio Hermosilla Rodrigo.
@contact: anherro285@gmail.com
@organization: Antonio Hermosilla Rodrigo.
@copyright: (C) 2015 by Antonio Hermosilla Rodrigo
@version: 1.0.0
'''
def UTC2GPS(fecha):
'''
@brief: Método para convertir un objeto de la clase datetime a tiempo GPS
@param fecha datetime: Objeto de la clase datetine con la fecha a transformar en tiempo GPS.
'''
#doy=fecha.strftime('%j')
name=fecha.strftime('%A')
if name=="Sunday" or name=="Domingo":
return 0+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
if name=="Monday" or name=="Lunes":
return 86400+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
if name=="Tuesday" or name=="Martes":
return 172800+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
if name=="Wednesday" or name=="Miércoles":
return 259200+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
if name=="Thursday" or name=="Jueves":
return 345600+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
if name=="Friday" or name=="Viernes":
return 432000+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
if name=="Saturday" or name=="Sábado":
return 518400+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
def main():
from datetime import datetime
print(UTC2GPS(datetime(2014,10,28,17,0,0)))
if __name__=="__main__":
main()
|
tonihr/pyGeo
|
Tiempo/UTC2GPS.py
|
Python
|
gpl-2.0
| 1,716
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'User.is_admin'
db.add_column(u'accounts_user', 'is_admin',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'User.is_admin'
db.delete_column(u'accounts_user', 'is_admin')
models = {
u'accounts.group': {
'Meta': {'object_name': 'Group'},
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'perspectives': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['perspectives.Perspective']", 'symmetrical': 'False'}),
'role': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['accounts.User']", 'symmetrical': 'False'})
},
u'accounts.user': {
'Meta': {'object_name': 'User'},
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'team': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'})
},
u'companies.company': {
'Meta': {'object_name': 'Company'},
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
u'dashboards.dashboard': {
'Meta': {'unique_together': "(('name', 'company'),)", 'object_name': 'Dashboard'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['companies.Company']"}),
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'source': ('django.db.models.fields.TextField', [], {})
},
u'perspectives.perspective': {
'Meta': {'unique_together': "(('name', 'company'),)", 'object_name': 'Perspective'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['companies.Company']"}),
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'dashboards': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['dashboards.Dashboard']", 'symmetrical': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['accounts']
|
Rondineli/django-sso
|
django_sso/accounts/migrations/0003_auto__add_field_user_is_admin.py
|
Python
|
gpl-2.0
| 5,064
|
# Install.py -- File system installation commands
# Copyright (C) 2007-2013 CEA
#
# This file is part of shine
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
from __future__ import print_function
import sys
from Shine.Configuration.Globals import Globals
from Shine.FSUtils import create_lustrefs
from Shine.Lustre.FileSystem import FSRemoteError
from Shine.Commands.Base.Command import Command, CommandHelpException
from Shine.Commands.Base.CommandRCDefs import RC_OK, RC_FAILURE
# Lustre events
from Shine.Commands.Base.FSEventHandler import FSGlobalEventHandler
class Install(Command):
"""
shine install -m /path/to/model.lmf
"""
NAME = "install"
DESCRIPTION = "Install a new file system."
def execute(self):
# Option sanity check
self.forbidden(self.options.fsnames, "-f, see -m")
self.forbidden(self.options.labels, "-l")
self.forbidden(self.options.indexes, "-i")
self.forbidden(self.options.failover, "-F")
rc = RC_OK
if not self.options.model:
raise CommandHelpException("Lustre model file path"
"(-m <model_file>) argument required.", self)
eh = FSGlobalEventHandler(self)
# Use this Shine.FSUtils convenience function.
lmf = self.get_lmf_path()
if lmf:
print("Using Lustre model file %s" % lmf)
else:
raise CommandHelpException("Lustre model file for ``%s'' not found:"
" please use filename or full LMF path.\n"
"Your default model files directory (lmf_dir) is: %s" %
(self.options.model, Globals().get_lmf_dir()), self)
install_nodes = self.options.nodes
excluded_nodes = self.options.excludes
fs_conf, fs = create_lustrefs(self.get_lmf_path(),
event_handler=eh, nodes=install_nodes,
excluded=excluded_nodes)
# Register the filesystem in backend
print("Registering FS %s to backend..." % fs.fs_name)
if self.options.dryrun:
rc = 0
else:
rc = self.register_fs(fs_conf)
if rc:
msg = "Error: failed to register FS to backend (rc=%d)" % rc
print(msg, file=sys.stderr)
else:
print("Filesystem %s registered." % fs.fs_name)
# Helper message.
# If user specified nodes which were not used, warn him about it.
actual_nodes = fs.components.managed().servers()
if not self.check_valid_list(fs_conf.get_fs_name(), \
actual_nodes, "install"):
return RC_FAILURE
# Install file system configuration files; normally, this should
# not be done by the Shine.Lustre.FileSystem object itself, but as
# all proxy methods are currently handled by it, it is more
# convenient this way...
try:
fs.install(fs_conf.get_cfg_filename(),
dryrun=self.options.dryrun)
tuning_conf = Globals().get_tuning_file()
if tuning_conf:
fs.install(tuning_conf, dryrun=self.options.dryrun)
except FSRemoteError as error:
print("WARNING: Due to error, installation skipped on %s"
% error.nodes)
rc = RC_FAILURE
if not install_nodes and not excluded_nodes:
# Give pointer to next user step.
print("Use `shine format -f %s' to initialize the file system." %
fs_conf.get_fs_name())
return rc
def register_fs(self, fs_conf):
# register file system configuration to the backend
fs_conf.register_fs()
fs_conf.register_targets()
|
cea-hpc/shine
|
lib/Shine/Commands/Install.py
|
Python
|
gpl-2.0
| 4,458
|
import string
import random
import time
import json
import re
from Config import config
from Plugin import PluginManager
if "sessions" not in locals().keys(): # To keep sessions between module reloads
sessions = {}
def showPasswordAdvice(password):
error_msgs = []
if not password or not isinstance(password, (str, unicode)):
error_msgs.append("You have enabled <b>UiPassword</b> plugin, but you forgot to set a password!")
elif len(password) < 8:
error_msgs.append("You are using a very short UI password!")
return error_msgs
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
sessions = sessions
last_cleanup = time.time()
def route(self, path):
if path.endswith("favicon.ico"):
return self.actionFile("src/Ui/media/img/favicon.ico")
else:
if config.ui_password:
if time.time() - self.last_cleanup > 60 * 60: # Cleanup expired sessions every hour
self.cleanup()
# Validate session
session_id = self.getCookies().get("session_id")
if session_id not in self.sessions: # Invalid session id, display login
return self.actionLogin()
return super(UiRequestPlugin, self).route(path)
# Action: Login
def actionLogin(self):
template = open("plugins/UiPassword/login.html").read()
self.sendHeader()
posted = self.getPosted()
if posted: # Validate http posted data
if self.checkPassword(posted.get("password")):
# Valid password, create session
session_id = self.randomString(26)
self.sessions[session_id] = {
"added": time.time(),
"keep": posted.get("keep")
}
# Redirect to homepage or referer
url = self.env.get("HTTP_REFERER", "")
if not url or re.sub("\?.*", "", url).endswith("/Login"):
url = "/" + config.homepage
cookie_header = ('Set-Cookie', "session_id=%s;path=/;max-age=2592000;" % session_id) # Max age = 30 days
self.start_response('301 Redirect', [('Location', url), cookie_header])
yield "Redirecting..."
else:
# Invalid password, show login form again
template = template.replace("{result}", "bad_password")
yield template
def checkPassword(self, password):
return password == config.ui_password
def randomString(self, nchars):
return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(nchars))
@classmethod
def cleanup(cls):
cls.last_cleanup = time.time()
for session_id, session in cls.sessions.items():
if session["keep"] and time.time() - session["added"] > 60 * 60 * 24 * 60: # Max 60days for keep sessions
del(cls.sessions[session_id])
elif not session["keep"] and time.time() - session["added"] > 60 * 60 * 24: # Max 24h for non-keep sessions
del(cls.sessions[session_id])
# Action: Display sessions
def actionSessions(self):
self.sendHeader()
yield "<pre>"
yield json.dumps(self.sessions, indent=4)
# Action: Logout
def actionLogout(self):
# Session id has to passed as get parameter or called without referer to avoid remote logout
session_id = self.getCookies().get("session_id")
if not self.env.get("HTTP_REFERER") or session_id == self.get.get("session_id"):
if session_id in self.sessions:
del self.sessions[session_id]
self.start_response('301 Redirect', [
('Location', "/"),
('Set-Cookie', "session_id=deleted; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT")
])
yield "Redirecting..."
else:
self.sendHeader()
yield "Error: Invalid session id"
@PluginManager.registerTo("ConfigPlugin")
class ConfigPlugin(object):
def createArguments(self):
group = self.parser.add_argument_group("UiPassword plugin")
group.add_argument('--ui_password', help='Password to access UiServer', default=None, metavar="password")
return super(ConfigPlugin, self).createArguments()
from Translate import translate as lang
@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
def actionUiLogout(self, to):
permissions = self.getPermissions(to)
if "ADMIN" not in permissions:
return self.response(to, "You don't have permission to run this command")
session_id = self.request.getCookies().get("session_id", "")
message = "<script>document.location.href = '/Logout?session_id=%s'</script>" % session_id
self.cmd("notification", ["done", message])
def addHomepageNotifications(self):
error_msgs = showPasswordAdvice(config.ui_password)
for msg in error_msgs:
self.site.notifications.append(["error", lang[msg]])
return super(UiWebsocketPlugin, self).addHomepageNotifications()
|
l5h5t7/ZeroNet
|
plugins/disabled-UiPassword/UiPasswordPlugin.py
|
Python
|
gpl-2.0
| 5,239
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Subclass of wx.Panel"""
#-----------------------------------------------------------------------------------------
#Import
try:
#wxPython
import wx
import wx.grid
import wx.lib.scrolledpanel
#python std library
import sys
#our modules and packages
except ImportError as err:
print(u"ImportError: {}".format(err))
sys.exit("-1")
#-----------------------------------------------------------------------------------------
class ReclassifyPanel(wx.Panel):
"""
Subclass of wx.Panel.
Represents top-right part of the window.
Contains reclassification table, delete entry
button and add entry button.
"""
def __init__(self, parent, id):
wx.Panel.__init__(self, parent, id)
#TABLE
self.tableCol = 4
self.__buildTable()
#BUTTONS
self.__buildButtonPanel()
#LAYOUT
self.__layout()
#self.SetMinSize((400, -1))
#-----------------------------------------------------------------------------------------
def __buildTable(self):
"""
Creates table for displaying mapset classification.
Table is made using wx.grid.Grid.
:return: void
"""
self.tablePanel = wx.Panel(self, wx.NewId())
self.table = wx.grid.Grid(self.tablePanel)
self.table.SetMinSize((430, -1))
self.table.CreateGrid(0, self.tableCol)
self.table.SetColLabelValue(0, "Lower limit")
self.table.SetColLabelValue(1, "Upper limit")
self.table.SetColLabelValue(2, "Value")
self.table.SetColLabelValue(3, "Label")
self.table.SetDefaultEditor(wx.grid.GridCellFloatEditor(-1, -1))
box = wx.BoxSizer(wx.VERTICAL)
box.Add(self.table, wx.EXPAND, wx.CENTER)
self.tablePanel.SetSizer(box)
#-----------------------------------------------------------------------------------------
def __buildButtonPanel(self):
"""
Creates delete entry button and add entry button.
:return: void
"""
self.buttonPanel = wx.Panel(self, wx.NewId())
self.addButton = wx.Button(self.buttonPanel, wx.NewId(), "Add", size=(100, -1))
self.deleteButton = wx.Button(self.buttonPanel, wx.NewId(), "Delete", size=(100, -1))
self.previewButton = wx.Button(self.buttonPanel, wx.NewId(), "Preview", size=(100, -1))
vBox = wx.BoxSizer(wx.VERTICAL)
vBox.Add(self.addButton, 0, wx.ALIGN_CENTER)
vBox.Add(self.deleteButton, 0, wx.ALIGN_CENTER)
vBox.Add(self.previewButton, 0, wx.ALIGN_CENTER)
self.buttonPanel.SetSizer(vBox)
#-----------------------------------------------------------------------------------------
def __layout(self):
"""
Specifies final layout in Reclassify Panel
:return: void
"""
margin = 5
sBox = wx.StaticBox(self, wx.NewId(), "Reclassification Table")
hBox = wx.StaticBoxSizer(sBox, wx.HORIZONTAL)
hBox.Add(self.tablePanel, 0, wx.ALL | wx.CENTER | wx.EXPAND, margin)
hBox.Add(self.buttonPanel, 0, wx.ALL | wx.ALIGN_TOP, margin)
self.SetSizer(hBox)
#-----------------------------------------------------------------------------------------
if __name__ == "__main__":
pass
|
ctu-yfsg/2015-a-grass-reclass
|
reclassify/Layout/ReclassifyPanel.py
|
Python
|
gpl-2.0
| 3,344
|
from typing import Any, Dict, List, Union
from flask import abort, flash, g, render_template, url_for
from flask_babel import format_number, lazy_gettext as _
from werkzeug.utils import redirect
from werkzeug.wrappers import Response
from openatlas import app
from openatlas.database.connect import Transaction
from openatlas.forms.form import build_move_form
from openatlas.models.entity import Entity
from openatlas.models.node import Node
from openatlas.util.table import Table
from openatlas.util.util import link, required_group, sanitize
def walk_tree(nodes: List[int]) -> List[Dict[str, Any]]:
items = []
for id_ in nodes:
item = g.nodes[id_]
count_subs = f' ({format_number(item.count_subs)})' \
if item.count_subs else ''
items.append({
'id': item.id,
'href': url_for('entity_view', id_=item.id),
'a_attr': {'href': url_for('entity_view', id_=item.id)},
'text':
item.name.replace("'", "'") +
f' {format_number(item.count)}{count_subs}',
'children': walk_tree(item.subs)})
return items
@app.route('/types')
@required_group('readonly')
def node_index() -> str:
nodes: Dict[str, Dict[Entity, str]] = \
{'standard': {}, 'custom': {}, 'places': {}, 'value': {}}
for node in g.nodes.values():
if node.root:
continue
type_ = 'custom'
if node.class_.name == 'administrative_unit':
type_ = 'places'
elif node.standard:
type_ = 'standard'
elif node.value_type:
type_ = 'value'
nodes[type_][node] = render_template(
'forms/tree_select_item.html',
name=sanitize(node.name),
data=walk_tree(Node.get_nodes(node.name)))
return render_template(
'types/index.html',
nodes=nodes,
title=_('types'),
crumbs=[_('types')])
@app.route('/types/delete/<int:id_>', methods=['POST', 'GET'])
@required_group('editor')
def node_delete(id_: int) -> Response:
node = g.nodes[id_]
root = g.nodes[node.root[-1]] if node.root else None
if node.standard or node.subs or node.count or (root and root.locked):
abort(403)
node.delete()
flash(_('entity deleted'), 'info')
return redirect(
url_for('entity_view', id_=root.id) if root else url_for('node_index'))
@app.route('/types/move/<int:id_>', methods=['POST', 'GET'])
@required_group('editor')
def node_move_entities(id_: int) -> Union[str, Response]:
node = g.nodes[id_]
root = g.nodes[node.root[-1]]
if root.value_type: # pragma: no cover
abort(403)
form = build_move_form(node)
if form.validate_on_submit():
Transaction.begin()
Node.move_entities(
node,
getattr(form, str(root.id)).data,
form.checkbox_values.data)
Transaction.commit()
flash(_('Entities were updated'), 'success')
if node.class_.name == 'administrative_unit':
tab = 'places'
elif root.standard:
tab = 'standard'
elif node.value_type: # pragma: no cover
tab = 'value'
else:
tab = 'custom'
return redirect(
f"{url_for('node_index')}#menu-tab-{tab}_collapse-{root.id}")
getattr(form, str(root.id)).data = node.id
return render_template(
'types/move.html',
table=Table(
header=['#', _('selection')],
rows=[[item, item.label.text] for item in form.selection]),
root=root,
form=form,
entity=node,
crumbs=[
[_('types'), url_for('node_index')],
root,
node,
_('move entities')])
@app.route('/types/untyped/<int:id_>')
@required_group('editor')
def show_untyped_entities(id_: int) -> str:
hierarchy = g.nodes[id_]
table = Table(['name', 'class', 'first', 'last', 'description'])
for entity in Node.get_untyped(hierarchy.id):
table.rows.append([
link(entity),
entity.class_.label,
entity.first,
entity.last,
entity.description])
return render_template(
'table.html',
entity=hierarchy,
table=table,
crumbs=[
[_('types'),
url_for('node_index')],
link(hierarchy),
_('untyped entities')])
|
craws/OpenAtlas-Python
|
openatlas/views/types.py
|
Python
|
gpl-2.0
| 4,440
|
#!/usr/bin/env python
# class to allow watching multiple files and
# calling a callback when any change (size or mtime)
#
# We take exclusive use of SIGIO and maintain a global list of
# watched files.
# As we cannot get siginfo in python, we check every file
# every time we get a signal.
# we report change is size, mtime, or ino of the file (given by name)
# Copyright (C) 2011 Neil Brown <neilb@suse.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os, fcntl, signal
dirlist = []
def notified(sig, stack):
for d in dirlist:
fcntl.fcntl(d.fd, fcntl.F_NOTIFY, (fcntl.DN_MODIFY|fcntl.DN_RENAME|
fcntl.DN_CREATE|fcntl.DN_DELETE))
d.check()
class dir():
def __init__(self, dname):
self.dname = dname
self.fd = os.open(dname, 0)
self.files = []
self.callbacks = []
fcntl.fcntl(self.fd, fcntl.F_NOTIFY, (fcntl.DN_MODIFY|fcntl.DN_RENAME|
fcntl.DN_CREATE|fcntl.DN_DELETE))
if not dirlist:
signal.signal(signal.SIGIO, notified)
dirlist.append(self)
def watch(self, fname, callback):
f = file(os.path.join(self.dname, fname), callback)
self.files.append(f)
return f
def watchall(self, callback):
self.callbacks.append(callback)
def check(self):
newlist = []
for c in self.callbacks:
if c():
newlist.append(c)
self.callbacks = newlist
for f in self.files:
f.check()
def cancel(self, victim):
if victim in self.files:
self.files.remove(victim)
class file():
def __init__(self, fname, callback):
self.name = fname
try:
stat = os.stat(self.name)
except OSError:
self.ino = 0
self.size = 0
self.mtime = 0
else:
self.ino = stat.st_ino
self.size = stat.st_size
self.mtime = stat.st_mtime
self.callback = callback
def check(self):
try:
stat = os.stat(self.name)
except OSError:
if self.ino == 0:
return False
self.size = 0
self.mtime = 0
self.ino = 0
else:
if stat.st_size == self.size and stat.st_mtime == self.mtime \
and stat.st_ino == self.ino:
return False
self.size = stat.st_size
self.mtime = stat.st_mtime
self.ino = stat.st_ino
self.callback(self)
return True
def cancel(self):
global dirlist
for d in dirlist:
d.cancel(self)
if __name__ == "__main__" :
import signal
##
def ping(f): print "got ", f.name
d = dir("/tmp/n")
a = d.watch("a", ping)
b = d.watch("b", ping)
c = d.watch("c", ping)
while True:
signal.pause()
|
neilbrown/susman
|
dnotify.py
|
Python
|
gpl-2.0
| 3,660
|
#OBJ2VXP: Converts simple OBJ files to VXP expansions
#Copyright (C) 2004-2015 Foone Turing
#
#This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys
sys.path.append('code')
import pygame
from pygame.constants import *
import sockgui
sockgui.setDataPath('code')
from converterbase import ConverterBase
import os
import time
import obj2vxp
import obj2vxptex
from error import SaveError,LoadError
import ConfigParser
import vxpinstaller
class obj2vxpGUI(ConverterBase):
def __init__(self,screen):
ConverterBase.__init__(self,screen)
ui=self.ui
ys=self.makeTab(10,94,'CFG settings')
ui.add(sockgui.Label(ui,[20,ys+10],'Expansion name:'))
ui.add(sockgui.Label(ui,[20,ys+26],'Author name:'))
ui.add(sockgui.Label(ui,[20,ys+42],'Orig. Author name:'))
ui.add(sockgui.Label(ui,[20,ys+58],'Shortname:'))
ui.add(sockgui.Label(ui,[20,ys+74],'Filename:'))
self.filenamelabel=sockgui.Label(ui,[120,ys+74],'')
ui.add(self.filenamelabel)
self.namebox= sockgui.TextBox(ui,[120,ys+10-3],40)
self.authorbox= sockgui.TextBox(ui,[120,ys+26-3],40)
self.origauthorbox= sockgui.TextBox(ui,[120,ys+42-3],40)
self.shortnamebox= sockgui.TextBox(ui,[120,ys+58-3],40,callback=self.onShortNameChanged)
self.shortnamebox.setAllowedKeys(sockgui.UPPERCASE+sockgui.LOWERCASE+sockgui.DIGITS+'._-')
self.authorbox.setText(self.getAuthor())
ui.add(self.namebox)
ui.add(self.authorbox)
ui.add(self.origauthorbox)
ui.add(sockgui.Button(ui,[330,ys+42-3],'Same',callback=self.copyAuthorToOrigAuthor))
ui.add(self.shortnamebox)
self.namebox.activate()
ys=self.makeTab(ys+94+5,120,'OBJ to convert')
self.files=sockgui.ListBox(ui,[20,ys+10],[62,10],items=self.getOBJList())
if self.files.getNumItems()>0:
self.files.select(0)
ui.add(self.files)
self.enhance_color=sockgui.CheckBox(ui,[100,ys+103],'Enhance Color',self.getEnhanceColor())
self.textured=sockgui.CheckBox(ui,[200,ys+103],'Textured',self.getTextured())
ui.add(sockgui.Button(ui,[20,ys+99],'Refresh list',callback=self.refreshList))
ui.add(self.enhance_color)
ui.add(self.textured)
#ui.add(sockgui.BorderBox(ui,[10,224],[screen.get_width()-20,110]))
ys=self.makeTab(ys+120+5,30,'3dmm IDs')
ui.add(sockgui.Label(ui,[20,ys+10],'ID:'))
self.idbox=sockgui.TextBox(ui,[40,ys+7],10)
self.idbox.setAllowedKeys('0123456789')
ui.add(self.idbox)
ui.add(sockgui.Button(ui,[110,ys+7],'Generate ID',callback=self.generateNewID))
ys=self.makeTab(ys+30+5,66,'Control')
self.install_check=sockgui.CheckBox(ui,[240,ys+13],'Install VXP',self.getInstallCheck())
ui.add(self.install_check)
self.progress=sockgui.ProgressBox(ui,[20,ys+10],[200,16],maxvalue=6)
ui.add(self.progress)
self.errortext=sockgui.Label(ui,[20,ys+32],'')
ui.add(self.errortext)
self.startbutton=sockgui.Button(ui,[20,ys+46],'Create VXP',callback=self.createVXP)
ui.add(self.startbutton)
ui.registerHotKey(K_F5,self.updateListBox)
def refreshList(self,junk):
self.files.setItems(self.getOBJList())
def updateListBox(self,event):
if event.type==KEYUP:
self.refreshList(0)
def statusCallback(self,text):
self.errortext.setText(text)
self.ui.draw()
def createVXP(self,junk):
self.saveSettings()
self.progress.setValue(0)
try:
outfile=str(self.shortnamebox.getText())+'.vxp'
objfile=self.files.getSelectedText()
if objfile is None:
raise SaveError('no OBJ selected')
try:
uniqueid=int(self.idbox.getText())
except ValueError:
raise SaveError('Failed: Bad ID!')
name=str(self.namebox.getText())
author=str(self.authorbox.getText())
origauthor=str(self.origauthorbox.getText())
shortname=str(self.shortnamebox.getText())
enhance=self.enhance_color.isChecked()
self.errortext.setText('Converting...')
if self.textured.isChecked():
ret=obj2vxptex.CreateVXPExpansionFromOBJTextured(name,author,origauthor,outfile,shortname,objfile,
uniqueid,self.progressCallback,self.statusCallback)
else:
ret=obj2vxp.CreateVXPExpansionFromOBJ(name,author,origauthor,outfile,shortname,objfile,
uniqueid,self.progressCallback,enhance,self.statusCallback)
if ret:
self.errortext.setText('VXP saved as %s' % (outfile))
self.idbox.setText('') #So we don't reuse them by mistake.
if self.install_check.isChecked():
vxpinstaller.installVXP(outfile)
self.errortext.setText('VXP saved as %s, and installed.' % (outfile))
else:
self.errortext.setText('Failed: unknown error (!ret)')
except SaveError,e:
self.errortext.setText('Failed: ' + str(e).strip('"'))
except LoadError,e:
self.errortext.setText('Failed: ' + str(e).strip('"'))
except ValueError:
self.errortext.setText('Failed: Bad ID!')
except pygame.error,e:
self.errortext.setText('Failed: ' + str(e).strip('"'))
def copyAuthorToOrigAuthor(self,junk):
self.origauthorbox.setText(self.authorbox.getText())
def saveExtraSettings(self):
try:
self.config.add_section('obj2vxp')
except:
pass
self.config.set('obj2vxp','enhance',`self.enhance_color.isChecked()`)
self.config.set('obj2vxp','textured',`self.textured.isChecked()`)
def getEnhanceColor(self):
try:
val=self.config.get('obj2vxp','enhance')
return sockgui.BoolConv(val)
except:
return False
def getTextured(self):
try:
val=self.config.get('obj2vxp','textured')
return sockgui.BoolConv(val)
except:
return False
def getOBJList(self):
out=[]
for file in os.listdir('.'):
flower=file.lower()
if flower.endswith('.obj'):
out.append(file)
return out
def onShortNameChanged(self,data,newtext):
if newtext=='':
out=''
else:
out=self.shortnamebox.getText() + '.vxp'
self.filenamelabel.setRed(os.path.exists(out))
self.filenamelabel.setText(out)
def RunConverter(title):
pygame.display.set_caption(title+'obj2vxpGUI '+obj2vxp.version)
screen=pygame.display.set_mode((375,397))
gui=obj2vxpGUI(screen)
return gui.run()
if __name__=='__main__':
pygame.init()
RunConverter('')
def GetInfo():
return ('obj2vxp','Convert OBJs to props',None,obj2vxp.version) # None is the ICONOS.
|
foone/7gen
|
bin/obj2vxpGUI.py
|
Python
|
gpl-2.0
| 6,717
|
Skip to content
Search or jump to…
Pull requests
Issues
Marketplace
Explore
@zhejoe
9
3028PacktPublishing/Intelligent-Projects-Using-Python
Code Issues 0 Pull requests 0 Wiki Security Insights
Intelligent-Projects-Using-Python/Chapter02/TransferLearning.py
@santanupattanayak santanupattanayak chapter02 changes
67a9665 on 27 Jan
@santanupattanayak@santanupattanayak1
248 lines (207 sloc) 9.06 KB
Code navigation is available!Beta
Navigate your code with ease. Click on function and method calls to jump to their definitions or references in the same repository. Learn more
Code navigation is still being calculated for this commit. Check back in a bit. Beta
Learn more or give us feedback
_author__ = 'Santanu Pattanayak'
import numpy as np
np.random.seed(1000)
import os
import glob
import cv2
import datetime
import pandas as pd
import time
import warnings
warnings.filterwarnings("ignore")
from sklearn.model_selection import KFold
from sklearn.metrics import cohen_kappa_score
from keras.models import Sequential,Model
from keras.layers.core import Dense, Dropout, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.layers import GlobalMaxPooling2D,GlobalAveragePooling2D
from keras.optimizers import SGD
from keras.callbacks import EarlyStopping
from keras.utils import np_utils
from sklearn.metrics import log_loss
import keras
from keras import __version__ as keras_version
from keras.applications.inception_v3 import InceptionV3
from keras.applications.resnet50 import ResNet50
from keras.applications.vgg16 import VGG16
from keras.preprocessing.image import ImageDataGenerator
from keras import optimizers
from keras.callbacks import EarlyStopping, ModelCheckpoint, CSVLogger, Callback
from keras.applications.resnet50 import preprocess_input
import h5py
import argparse
from sklearn.externals import joblib
import json
class TransferLearning:
def __init__(self):
parser = argparse.ArgumentParser(description='Process the inputs')
parser.add_argument('--path',help='image directory')
parser.add_argument('--class_folders',help='class images folder names')
parser.add_argument('--dim',type=int,help='Image dimensions to process')
parser.add_argument('--lr',type=float,help='learning rate',default=1e-4)
parser.add_argument('--batch_size',type=int,help='batch size')
parser.add_argument('--epochs',type=int,help='no of epochs to train')
parser.add_argument('--initial_layers_to_freeze',type=int,help='the initial layers to freeze')
parser.add_argument('--model',help='Standard Model to load',default='InceptionV3')
parser.add_argument('--folds',type=int,help='num of cross validation folds',default=5)
parser.add_argument('--outdir',help='output directory')
args = parser.parse_args()
self.path = args.path
self.class_folders = json.loads(args.class_folders)
self.dim = int(args.dim)
self.lr = float(args.lr)
self.batch_size = int(args.batch_size)
self.epochs = int(args.epochs)
self.initial_layers_to_freeze = int(args.initial_layers_to_freeze)
self.model = args.model
self.folds = int(args.folds)
self.outdir = args.outdir
def get_im_cv2(self,path,dim=224):
img = cv2.imread(path)
resized = cv2.resize(img, (dim,dim), cv2.INTER_LINEAR)
return resized
# Pre Process the Images based on the ImageNet pre-trained model Image transformation
def pre_process(self,img):
img[:,:,0] = img[:,:,0] - 103.939
img[:,:,1] = img[:,:,0] - 116.779
img[:,:,2] = img[:,:,0] - 123.68
return img
# Function to build X, y in numpy format based on the train/validation datasets
def read_data(self,class_folders,path,num_class,dim,train_val='train'):
print(train_val)
train_X,train_y = [],[]
for c in class_folders:
path_class = path + str(train_val) + '/' + str(c)
file_list = os.listdir(path_class)
for f in file_list:
img = self.get_im_cv2(path_class + '/' + f)
img = self.pre_process(img)
train_X.append(img)
label = int(c.split('class')[1])
train_y.append(int(label))
train_y = keras.utils.np_utils.to_categorical(np.array(train_y),num_class)
return np.array(train_X),train_y
def inception_pseudo(self,dim=224,freeze_layers=30,full_freeze='N'):
model = InceptionV3(weights='imagenet',include_top=False)
x = model.output
x = GlobalAveragePooling2D()(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
out = Dense(5,activation='softmax')(x)
model_final = Model(input = model.input,outputs=out)
if full_freeze != 'N':
for layer in model.layers[0:freeze_layers]:
layer.trainable = False
return model_final
# ResNet50 Model for transfer Learning
def resnet_pseudo(self,dim=224,freeze_layers=10,full_freeze='N'):
model = ResNet50(weights='imagenet',include_top=False)
x = model.output
x = GlobalAveragePooling2D()(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
out = Dense(5,activation='softmax')(x)
model_final = Model(input = model.input,outputs=out)
if full_freeze != 'N':
for layer in model.layers[0:freeze_layers]:
layer.trainable = False
return model_final
# VGG16 Model for transfer Learning
def VGG16_pseudo(self,dim=224,freeze_layers=10,full_freeze='N'):
model = VGG16(weights='imagenet',include_top=False)
x = model.output
x = GlobalAveragePooling2D()(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
out = Dense(5,activation='softmax')(x)
model_final = Model(input = model.input,outputs=out)
if full_freeze != 'N':
for layer in model.layers[0:freeze_layers]:
layer.trainable = False
return model_final
def train_model(self,train_X,train_y,n_fold=5,batch_size=16,epochs=40,dim=224,lr=1e-5,model='ResNet50'):
model_save_dest = {}
k = 0
kf = KFold(n_splits=n_fold, random_state=0, shuffle=True)
for train_index, test_index in kf.split(train_X):
k += 1
X_train,X_test = train_X[train_index],train_X[test_index]
y_train, y_test = train_y[train_index],train_y[test_index]
if model == 'Resnet50':
model_final = self.resnet_pseudo(dim=224,freeze_layers=10,full_freeze='N')
if model == 'VGG16':
model_final = self.VGG16_pseudo(dim=224,freeze_layers=10,full_freeze='N')
if model == 'InceptionV3':
model_final = self.inception_pseudo(dim=224,freeze_layers=10,full_freeze='N')
datagen = ImageDataGenerator(
horizontal_flip = True,
vertical_flip = True,
width_shift_range = 0.1,
height_shift_range = 0.1,
channel_shift_range=0,
zoom_range = 0.2,
rotation_range = 20)
adam = optimizers.Adam(lr=lr, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)
model_final.compile(optimizer=adam, loss=["categorical_crossentropy"],metrics=['accuracy'])
reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.50,
patience=3, min_lr=0.000001)
callbacks = [
EarlyStopping(monitor='val_loss', patience=10, mode='min', verbose=1),
CSVLogger('keras-5fold-run-01-v1-epochs_ib.log', separator=',', append=False),reduce_lr,
ModelCheckpoint(
'kera1-5fold-run-01-v1-fold-' + str('%02d' % (k + 1)) + '-run-' + str('%02d' % (1 + 1)) + '.check',
monitor='val_loss', mode='min',
save_best_only=True,
verbose=1)]
model_final.fit_generator(datagen.flow(X_train,y_train, batch_size=batch_size),
steps_per_epoch=X_train.shape[0]/batch_size,epochs=epochs,verbose=1,
validation_data=(X_test,y_test),callbacks=callbacks,
class_weight={0:0.012,1:0.12,2:0.058,3:0.36,4:0.43})
model_name = 'kera1-5fold-run-01-v1-fold-' + str('%02d' % (k + 1)) + '-run-' + str('%02d' % (1 + 1)) + '.check'
del model_final
f = h5py.File(model_name, 'r+')
del f['optimizer_weights']
f.close()
model_final = keras.models.load_model(model_name)
model_name1 = self.outdir + str(model) + '___' + str(k)
model_final.save(model_name1)
model_save_dest[k] = model_name1
return model_save_dest
# Hold out dataset validation function
def inference_validation(self,test_X,test_y,model_save_dest,n_class=5,folds=5):
pred = np.zeros((len(test_X),n_class))
for k in range(1,folds + 1):
model = keras.models.load_model(model_save_dest[k])
pred = pred + model.predict(test_X)
pred = pred/(1.0*folds)
pred_class = np.argmax(pred,axis=1)
act_class = np.argmax(test_y,axis=1)
accuracy = np.sum([pred_class == act_class])*1.0/len(test_X)
kappa = cohen_kappa_score(pred_class,act_class,weights='quadratic')
return pred_class,accuracy,kappa
def main(self):
start_time = time.time()
print('Data Processing..')
self.num_class = len(self.class_folders)
train_X,train_y = self.read_data(self.class_folders,self.path,self.num_class,self.dim,train_val='train')
self.model_save_dest = self.train_model(train_X,train_y,n_fold=self.folds,batch_size=self.batch_size,
epochs=self.epochs,dim=self.dim,lr=self.lr,model=self.model)
print("Model saved to dest:",self.model_save_dest)
test_X,test_y = self.read_data(self.class_folders,self.path,self.num_class,self.dim,train_val='validation')
_,accuracy,kappa = self.inference_validation(test_X,test_y,self.model_save_dest,n_class=self.num_class,folds=self.folds)
joblib.dump(self.model_save_dest,self.outdir + "dict_model.pkl")
print("-----------------------------------------------------")
print("Kappa score:", kappa)
print("accuracy:", accuracy)
print("End of training")
print("-----------------------------------------------------")
print("Processing Time",time.time() - start_time,' secs')
if __name__ == "__main__":
obj = TransferLearning()
obj.main()
© 2019 GitHub, Inc.
Terms
Privacy
Security
Status
Help
Contact GitHub
Pricing
API
Training
Blog
About
|
zhejoe/my1stRepository
|
text.py
|
Python
|
gpl-2.0
| 10,082
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
GeobricksTRMM
A QGIS plugin
Download TRMM daily data.
-------------------
begin : 2015-10-06
copyright : (C) 2015 by Geobricks
email : info@geobricks.org
git sha : $Format:%H$
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
This script initializes the plugin, making it known to QGIS.
"""
# noinspection PyPep8Naming
def classFactory(iface): # pylint: disable=invalid-name
"""Load GeobricksTRMM class from file GeobricksTRMM.
:param iface: A QGIS interface instance.
:type iface: QgsInterface
"""
#
from .geobricks_trmm_qgis import GeobricksTRMM
return GeobricksTRMM(iface)
|
geobricks/geobricks_qgis_plugin_trmm
|
__init__.py
|
Python
|
gpl-2.0
| 1,510
|
import scipy.integrate as intg
import numpy as np
#Physical Constants
#Everything is in MKS units
#Planck constant [J/s]
h = 6.6261e-34
#Boltzmann constant [J/K]
kB = 1.3806e-23
#Speed of light [m/s]
c = 299792458.0
#Pi
PI = 3.14159265
#Vacuum Permitivity
eps0 = 8.85e-12
#Resistivity of the mirror
rho=2.417e-8
GHz = 10 ** 9
Tcmb = 2.725
#Calculates total black body power for a given temp and emis.
def bbSpec(freq,temp,emis):
occ = 1.0/(np.exp(h*freq/(temp*kB)) - 1)
if callable(emis):
e = emis(freq)
else:
e = emis
return (2*e*h*freq**3)/(c**2)* occ
#Calculates total black body power for a given temp and emis multiplied by the optical throughput.
def weightedSpec(freq,temp,emis):
AOmega = (c/freq)**2
return (AOmega*bbSpec(freq,temp,emis))
def bbPower(temp, emis, f1,f2):
power = .5*intg.quad(lambda x: weightedSpec(x,temp,emis), f1, f2)[0]
return power
def powFromSpec(freqs, spec):
return np.trapz(spec, freqs)
#Spillover efficiency
def spillEff(D, F, waistFact, freq):
return 1. - np.exp((-np.power(np.pi,2)/2.)*np.power((D/(waistFact*F*(c/freq))),2))
def powFrac(T1, T2, f1, f2):
if T1==0:
return 0
else:
return bbPower(T1, 1.0, f1, f2)/bbPower(T2, 1.0, f1, f2)
def getLambdaOptCoeff(chi):
geom = (1 / np.cos(chi) - np.cos(chi))
return - 2 * geom * np.sqrt(4 * PI * eps0 * rho )
def getLambdaOpt(nu, chi):
geom = (1 / np.cos(chi) - np.cos(chi))
return - 2 * geom * np.sqrt(4 * PI * eps0 * rho * nu)
def aniPowSpec(emissivity, freq, temp=None):
if temp == None:
temp = Tcmb
occ = 1.0/(np.exp(h*freq/(temp*kB)) - 1)
return ((h**2)/kB)*emissivity*(occ**2)*((freq**2)/(temp**2))*np.exp((h*freq)/(kB*temp))
def dPdT(elements, det):
"""Conversion from Power on detector to Kcmb"""
totalEff = lambda f : reduce((lambda x,y : x * y), map(lambda e : e.Eff(f), elements[1:]))
# print "Total Efficiency: %e"%totalEff(det.band_center)
return intg.quad(lambda x: aniPowSpec(totalEff(x), x, Tcmb), det.flo, det.fhi)[0]
#***** Public Methods *****
def lamb(freq, index=None):
"""Convert from from frequency [Hz] to wavelength [m]"""
if index == None:
index = 1.
return c/(freq*index)
def dielectricLoss( lossTan, thickness, index, freq, atmScatter=0):
"""Dielectric loss coefficient with thickness [m] and freq [Hz]"""
return 1.0 - np.exp((-2*PI*index*lossTan*thickness)/lamb(freq/GHz))
if __name__=="__main__":
bc = 145 * GHz
fbw = .276
flo = bc * (1 - fbw / 2)
fhi = bc * (1 + fbw / 2)
T = Tcmb
#Exact
occ = lambda nu : 1./(np.exp(h * nu / (T * kB)) - 1)
aniSpec = lambda nu : 2 * h**2 * nu **2 / (kB * T**2) * occ(nu)**2 * np.exp(h * nu / (kB * T))
factor1 = intg.quad(aniSpec, flo, fhi)[0]
cumEff = .3638
factor2 = 2 * kB * (fhi - flo)
print factor1 * pW
print factor2 * pW
print factor2 / factor1
# freqs = np.linspace(flo, fhi, 30)
# plt.plot(freqs, aniSpec(freqs) / (2 * kB))
## plt.plot(freqs, [2 * kB for f in freqs])
# plt.show()
|
MillerCMBLabUSC/lab_analysis
|
apps/4f_model/OldCode/thermo.py
|
Python
|
gpl-2.0
| 3,192
|
# encoding: utf-8
# module samba.dcerpc.wkssvc
# from /usr/lib/python2.7/dist-packages/samba/dcerpc/wkssvc.so
# by generator 1.135
""" wkssvc DCE/RPC """
# imports
import dcerpc as __dcerpc
import talloc as __talloc
class NetWkstaInfo1059(__talloc.Object):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
buf_read_only_files = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/samba/dcerpc/wkssvc/NetWkstaInfo1059.py
|
Python
|
gpl-2.0
| 675
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import wx
import widgetUtils
class audio_album(widgetUtils.BaseDialog):
def __init__(self, *args, **kwargs):
super(audio_album, self).__init__(title=_("Create a new album"), parent=None)
panel = wx.Panel(self)
sizer = wx.BoxSizer(wx.VERTICAL)
lbl = wx.StaticText(panel, wx.NewId(), _("Album title"))
self.title = wx.TextCtrl(panel, wx.NewId())
box = wx.BoxSizer(wx.HORIZONTAL)
box.Add(lbl, 1, wx.ALL, 5)
box.Add(self.title, 1, wx.ALL, 5)
sizer.Add(box, 1, wx.ALL, 5)
ok = wx.Button(panel, wx.ID_OK, _("&OK"))
ok.SetDefault()
cancel = wx.Button(panel, wx.ID_CANCEL, _("&Close"))
btnsizer = wx.BoxSizer()
btnsizer.Add(ok, 0, wx.ALL, 5)
btnsizer.Add(cancel, 0, wx.ALL, 5)
sizer.Add(btnsizer, 0, wx.ALL, 5)
panel.SetSizer(sizer)
self.SetClientSize(sizer.CalcMin())
|
manuelcortez/socializer
|
src/wxUI/dialogs/creation.py
|
Python
|
gpl-2.0
| 975
|
# -*- coding: utf-8 -*-
"""
uds.utils.dict
~~~~~~~~~~~~~~
Utility functions to parse string and others.
:copyright: Copyright (c) 2015, National Institute of Information and Communications Technology.All rights reserved.
:license: GPL2, see LICENSE for more details.
"""
import copy
def override_dict(new, old):
"""Override old dict object with new one.
:param object new: New dict
:param object old: Nld dict
:return: Overridden result
:rtype: :attr:`object`
"""
if isinstance(new, dict):
merged = copy.deepcopy(old)
for key in new.keys():
if key in old:
merged[key] = override_dict(new[key], old[key])
else:
merged[key] = new[key]
return merged
else:
return new
|
nict-isp/uds-sdk
|
uds/utils/dict.py
|
Python
|
gpl-2.0
| 789
|
from __future__ import print_function, division, absolute_import
# Copyright (c) 2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import six
# Supported Features:
IDENTITY = "IDENTITY"
CERT_SORTER = "CERT_SORTER"
PRODUCT_DATE_RANGE_CALCULATOR = "PRODUCT_DATE_RANGE_CALCULATOR"
ENT_DIR = "ENT_DIR"
PROD_DIR = "PROD_DIR"
RHSM_ICON_CACHE = "RHSM_ICON_CACHE"
CONTENT_ACCESS_MODE_CACHE = "CONTENT_ACCESS_MODE_CACHE"
CURRENT_OWNER_CACHE = "CURRENT_OWNER_CACHE"
SYSPURPOSE_VALID_FIELDS_CACHE = "SYSPURPOSE_VALID_FIELDS_CACHE"
SUPPORTED_RESOURCES_CACHE = "SUPPORTED_RESOURCES_CACHE"
AVAILABLE_ENTITLEMENT_CACHE = "AVAILABLE_ENTITLEMENT_CACHE"
ENTITLEMENT_STATUS_CACHE = "ENTITLEMENT_STATUS_CACHE"
POOL_STATUS_CACHE = "POOL_STATUS_CACHE"
PROD_STATUS_CACHE = "PROD_STATUS_CACHE"
OVERRIDE_STATUS_CACHE = "OVERRIDE_STATUS_CACHE"
CP_PROVIDER = "CP_PROVIDER"
PLUGIN_MANAGER = "PLUGIN_MANAGER"
DBUS_IFACE = "DBUS_IFACE"
POOLTYPE_CACHE = "POOLTYPE_CACHE"
ACTION_LOCK = "ACTION_LOCK"
FACTS = "FACTS"
PROFILE_MANAGER = "PROFILE_MANAGER"
INSTALLED_PRODUCTS_MANAGER = "INSTALLED_PRODUCTS_MANAGER"
RELEASE_STATUS_CACHE = "RELEASE_STATUS_CACHE"
CONTENT_ACCESS_CACHE = "CONTENT_ACCESS_CACHE"
SYSTEMPURPOSE_COMPLIANCE_STATUS_CACHE = "SYSTEMPURPOSE_COMPLIANCE_STATUS_CACHE"
class FeatureBroker(object):
"""
Tracks all configured features.
Can track both objects to be created on the fly, and singleton's only
created once throughout the application.
Do not use this class directly, rather the global instance created below.
"""
def __init__(self):
self.providers = {}
def provide(self, feature, provider):
"""
Provide an implementation for a feature.
Can pass a callable you wish to be called on every invocation.
Can also pass an actual instance which will be returned on every
invocation. (i.e. pass an actual instance if you want a "singleton".
"""
self.providers[feature] = provider
def require(self, feature, *args, **kwargs):
"""
Require an implementation for a feature. Can be used to create objects
without requiring an exact implementation to use.
Depending on how the feature was configured during initialization, this
may return a class, or potentially a singleton object. (in which case
the args passed would be ignored)
"""
try:
provider = self.providers[feature]
except KeyError:
raise KeyError("Unknown feature: %r" % feature)
if isinstance(provider, (type, six.class_types)):
self.providers[feature] = provider(*args, **kwargs)
elif six.callable(provider):
return provider(*args, **kwargs)
return self.providers[feature]
def nonSingleton(other):
"""
Creates a factory method for a class. Passes args to the constructor
in order to create a new object every time it is required.
"""
def factory(*args, **kwargs):
return other(*args, **kwargs)
return factory
# Create a global instance we can use in all components. Tests can override
# features as desired and that change should trickle out to all components.
FEATURES = FeatureBroker()
# Small wrapper functions to make usage look a little cleaner, can use these
# instead of the global:
def require(feature, *args, **kwargs):
global FEATURES
return FEATURES.require(feature, *args, **kwargs)
def provide(feature, provider, singleton=False):
global FEATURES
if not singleton and isinstance(provider, (type, six.class_types)):
provider = nonSingleton(provider)
return FEATURES.provide(feature, provider)
|
Lorquas/subscription-manager
|
src/subscription_manager/injection.py
|
Python
|
gpl-2.0
| 4,210
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: David C. Morrill
# Date: 06/21/2002
#
# Refactored into a separate module: 07/04/2003
#
#------------------------------------------------------------------------------
""" Defines common, low-level capabilities needed by the Traits package.
"""
#-------------------------------------------------------------------------------
# Imports:
#-------------------------------------------------------------------------------
from __future__ import absolute_import
import os
import sys
from os import getcwd
from os.path import dirname, exists, join
from string import lowercase, uppercase
from types import (ListType, TupleType, DictType, StringType, UnicodeType,
IntType, LongType, FloatType, ComplexType, ClassType, TypeType)
# Set the Python version being used:
vi = sys.version_info
python_version = vi[0] + (float( vi[1] ) / 10.0)
try:
from traits.etsconfig.api import ETSConfig
except:
# If the ETSConfig package is not available, fake it:
class ETSConfig ( object ):
#-----------------------------------------------------------------------
# 'object' interface:
#-----------------------------------------------------------------------
def __init__ ( self ):
""" Constructor.
Note that this constructor can only ever be called from within
this module, since we don't expose the class.
"""
# Shadow attributes for properties:
self._application_data = None
self._toolkit = None
return
#-----------------------------------------------------------------------
# 'ETSConfig' interface:
#-----------------------------------------------------------------------
#-- Property Implementations -------------------------------------------
def _get_application_data ( self ):
""" Property getter.
This is a directory that applications and packages can safely
write non-user accessible data to i.e. configuration
information, preferences etc.
Do not put anything in here that the user might want to navigate
to (e.g. projects, user data files, etc).
The actual location differs between operating systems.
"""
if self._application_data is None:
self._application_data = self._initialize_application_data()
return self._application_data
def _set_application_data ( self, application_data ):
""" Property setter.
"""
self._application_data = application_data
application_data = property( _get_application_data,
_set_application_data )
def _get_toolkit ( self ):
"""
Property getter for the GUI toolkit. The value returned is, in
order of preference: the value set by the application; the value
passed on the command line using the '-toolkit' option; the value
specified by the 'ETS_TOOLKIT' environment variable; otherwise the
empty string.
"""
if self._toolkit is None:
self._toolkit = self._initialize_toolkit()
return self._toolkit
def _set_toolkit ( self, toolkit ):
"""
Property setter for the GUI toolkit. The toolkit can be set more
than once, but only if it is the same one each time. An application
that is written for a particular toolkit can explicitly set it
before any other module that gets the value is imported.
"""
if self._toolkit and (self._toolkit != toolkit):
raise ValueError( 'Cannot set toolkit to %s because it has '
'already been set to %s' % ( toolkit, self._toolkit ) )
self._toolkit = toolkit
return
toolkit = property( _get_toolkit, _set_toolkit )
#-- Private Methods ----------------------------------------------------
def _initialize_application_data ( self ):
""" Initializes the (default) application data directory.
"""
if sys.platform == 'win32':
environment_variable = 'APPDATA'
directory_name = 'Enthought'
else:
environment_variable = 'HOME'
directory_name = '.enthought'
# Lookup the environment variable:
parent_directory = os.environ.get( environment_variable, None )
if parent_directory is None:
raise ValueError( 'Environment variable "%s" not set' %
environment_variable )
application_data = os.path.join( parent_directory, directory_name )
# If a file already exists with this name then make sure that it is
# a directory!
if os.path.exists( application_data ):
if not os.path.isdir( application_data ):
raise ValueError( 'File "%s" already exists' %
application_data )
# Otherwise, create the directory:
else:
os.makedirs( application_data )
return application_data
def _initialize_toolkit ( self ):
""" Initializes the toolkit.
"""
# We handle the command line option even though it doesn't have the
# highest precedence because we always want to remove it from the
# command line:
if '-toolkit' in sys.argv:
opt_idx = sys.argv.index( '-toolkit' )
try:
opt_toolkit = sys.argv[ opt_idx + 1 ]
except IndexError:
raise ValueError( 'The -toolkit command line argument must '
'be followed by a toolkit name' )
# Remove the option:
del sys.argv[ opt_idx: opt_idx + 1 ]
else:
opt_toolkit = None
if self._toolkit is not None:
toolkit = self._toolkit
elif opt_toolkit is not None:
toolkit = opt_toolkit
else:
toolkit = os.environ.get( 'ETS_TOOLKIT', '' )
return toolkit
ETSConfig = ETSConfig()
#-------------------------------------------------------------------------------
# Provide Python 2.3+ compatible definitions (if necessary):
#-------------------------------------------------------------------------------
try:
from types import BooleanType
except ImportError:
BooleanType = IntType
def _enumerate ( seq ):
for i in xrange( len( seq) ):
yield i, seq[i]
try:
enumerate = enumerate
except:
enumerate = _enumerate
del _enumerate
#-------------------------------------------------------------------------------
# Constants:
#-------------------------------------------------------------------------------
ClassTypes = ( ClassType, TypeType )
SequenceTypes = ( ListType, TupleType )
ComplexTypes = ( float, int )
TypeTypes = ( StringType, UnicodeType, IntType, LongType, FloatType,
ComplexType, ListType, TupleType, DictType, BooleanType )
TraitNotifier = '__trait_notifier__'
# The standard Traits property cache prefix:
TraitsCache = '_traits_cache_'
#-------------------------------------------------------------------------------
# Singleton 'Uninitialized' object:
#-------------------------------------------------------------------------------
Uninitialized = None
class _Uninitialized(object):
""" The singleton value of this class represents the uninitialized state
of a trait and is specified as the 'old' value in the trait change
notification that occurs when the value of a trait is read before being
set.
"""
def __new__(cls):
if Uninitialized is not None:
return Uninitialized
else:
self = object.__new__(cls)
return self
def __repr__(self):
return '<uninitialized>'
def __reduce_ex__(self, protocol):
return (_Uninitialized, ())
#: When the first reference to a trait is a 'get' reference, the default value of
#: the trait is implicitly assigned and returned as the value of the trait.
#: Because of this implicit assignment, a trait change notification is
#: generated with the Uninitialized object as the 'old' value of the trait, and
#: the default trait value as the 'new' value. This allows other parts of the
#: traits package to recognize the assignment as the implicit default value
#: assignment, and treat it specially.
Uninitialized = _Uninitialized()
#-------------------------------------------------------------------------------
# Singleton 'Undefined' object (used as undefined trait name and/or value):
#-------------------------------------------------------------------------------
Undefined = None
class _Undefined(object):
""" Singleton 'Undefined' object (used as undefined trait name and/or value)
"""
def __new__(cls):
if Undefined is not None:
return Undefined
else:
self = object.__new__(cls)
return self
def __repr__(self):
return '<undefined>'
def __reduce_ex__(self, protocol):
return (_Undefined, ())
def __eq__(self, other):
return type(self) is type(other)
def __ne__(self, other):
return type(self) is not type(other)
#: Singleton object that indicates that a trait attribute has not yet had a
#: value set (i.e., its value is undefined). This object is used instead of
#: None, because None often has other meanings, such as that a value is not
#: used. When a trait attribute is first assigned a value, and its associated
#: trait notification handlers are called, Undefined is passed as the *old*
#: parameter, to indicate that the attribute previously had no value.
Undefined = _Undefined()
# Tell the C-base code about singleton 'Undefined' and 'Uninitialized' objects:
from . import ctraits
ctraits._undefined( Undefined, Uninitialized )
#-------------------------------------------------------------------------------
# Singleton 'Missing' object (used as missing method argument marker):
#-------------------------------------------------------------------------------
class Missing ( object ):
""" Singleton 'Missing' object (used as missing method argument marker).
"""
def __repr__ ( self ):
return '<missing>'
#: Singleton object that indicates that a method argument is missing from a
#: type-checked method signature.
Missing = Missing()
#-------------------------------------------------------------------------------
# Singleton 'Self' object (used as object reference to current 'object'):
#-------------------------------------------------------------------------------
class Self ( object ):
""" Singleton 'Self' object (used as object reference to current 'object').
"""
def __repr__ ( self ):
return '<self>'
#: Singleton object that references the current 'object'.
Self = Self()
#-------------------------------------------------------------------------------
# Define a special 'string' coercion function:
#-------------------------------------------------------------------------------
def strx ( arg ):
""" Wraps the built-in str() function to raise a TypeError if the
argument is not of a type in StringTypes.
"""
if type( arg ) in StringTypes:
return str( arg )
raise TypeError
#-------------------------------------------------------------------------------
# Constants:
#-------------------------------------------------------------------------------
StringTypes = ( StringType, UnicodeType, IntType, LongType, FloatType,
ComplexType )
#-------------------------------------------------------------------------------
# Define a mapping of coercable types:
#-------------------------------------------------------------------------------
# Mapping of coercable types.
CoercableTypes = {
LongType: ( 11, long, int ),
FloatType: ( 11, float, int ),
ComplexType: ( 11, complex, float, int ),
UnicodeType: ( 11, unicode, str )
}
#-------------------------------------------------------------------------------
# Return a string containing the class name of an object with the correct
# article (a or an) preceding it (e.g. 'an Image', 'a PlotValue'):
#-------------------------------------------------------------------------------
def class_of ( object ):
""" Returns a string containing the class name of an object with the
correct indefinite article ('a' or 'an') preceding it (e.g., 'an Image',
'a PlotValue').
"""
if isinstance( object, basestring ):
return add_article( object )
return add_article( object.__class__.__name__ )
#-------------------------------------------------------------------------------
# Return a string containing the right article (i.e. 'a' or 'an') prefixed to
# a specified string:
#-------------------------------------------------------------------------------
def add_article ( name ):
""" Returns a string containing the correct indefinite article ('a' or 'an')
prefixed to the specified string.
"""
if name[:1].lower() in 'aeiou':
return 'an ' + name
return 'a ' + name
#----------------------------------------------------------------------------
# Return a 'user-friendly' name for a specified trait:
#----------------------------------------------------------------------------
def user_name_for ( name ):
""" Returns a "user-friendly" version of a string, with the first letter
capitalized and with underscore characters replaced by spaces. For example,
``user_name_for('user_name_for')`` returns ``'User name for'``.
"""
name = name.replace( '_', ' ' )
result = ''
last_lower = False
for c in name:
if (c in uppercase) and last_lower:
result += ' '
last_lower = (c in lowercase)
result += c
return result.capitalize()
#-------------------------------------------------------------------------------
# Gets the path to the traits home directory:
#-------------------------------------------------------------------------------
_traits_home = None
def traits_home ( ):
""" Gets the path to the Traits home directory.
"""
global _traits_home
if _traits_home is None:
_traits_home = verify_path( join( ETSConfig.application_data,
'traits' ) )
return _traits_home
#-------------------------------------------------------------------------------
# Verify that a specified path exists, and try to create it if it doesn't:
#-------------------------------------------------------------------------------
def verify_path ( path ):
""" Verify that a specified path exists, and try to create it if it
does not exist.
"""
if not exists( path ):
try:
os.mkdir( path )
except:
pass
return path
#-------------------------------------------------------------------------------
# Returns the name of the module the caller's caller is located in:
#-------------------------------------------------------------------------------
def get_module_name ( level = 2 ):
""" Returns the name of the module that the caller's caller is located in.
"""
return sys._getframe( level ).f_globals.get( '__name__', '__main__' )
#-------------------------------------------------------------------------------
# Returns a resource path calculated from the caller's stack:
#-------------------------------------------------------------------------------
def get_resource_path ( level = 2 ):
"""Returns a resource path calculated from the caller's stack.
"""
module = sys._getframe( level ).f_globals.get( '__name__', '__main__' )
if module != '__main__':
# Return the path to the module:
try:
return dirname( getattr( sys.modules.get( module ), '__file__' ) )
except:
# Apparently 'module' is not a registered module...treat it like
# '__main__':
pass
# '__main__' is not a real module, so we need a work around:
for path in [ dirname( sys.argv[0] ), getcwd() ]:
if exists( path ):
break
return path
#-------------------------------------------------------------------------------
# Returns the value of an extended object attribute name of the form:
# name[.name2[.name3...]]:
#-------------------------------------------------------------------------------
def xgetattr( object, xname, default = Undefined ):
""" Returns the value of an extended object attribute name of the form:
name[.name2[.name3...]].
"""
names = xname.split( '.' )
for name in names[:-1]:
if default is Undefined:
object = getattr( object, name )
else:
object = getattr( object, name, None )
if object is None:
return default
if default is Undefined:
return getattr( object, names[-1] )
return getattr( object, names[-1], default )
#-------------------------------------------------------------------------------
# Sets the value of an extended object attribute name of the form:
# name[.name2[.name3...]]:
#-------------------------------------------------------------------------------
def xsetattr( object, xname, value ):
""" Sets the value of an extended object attribute name of the form:
name[.name2[.name3...]].
"""
names = xname.split( '.' )
for name in names[:-1]:
object = getattr( object, name )
setattr( object, names[-1], value )
#-------------------------------------------------------------------------------
# Traits metadata selection functions:
#-------------------------------------------------------------------------------
def is_none ( value ):
return (value is None)
def not_none ( value ):
return (value is not None)
def not_false ( value ):
return (value is not False)
def not_event ( value ):
return (value != 'event')
def is_str ( value ):
return isinstance( value, basestring )
|
HyperloopTeam/FullOpenMDAO
|
lib/python2.7/site-packages/traits-4.3.0-py2.7-macosx-10.10-x86_64.egg/traits/trait_base.py
|
Python
|
gpl-2.0
| 19,017
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import time
__author__ = 'mah'
__email__ = 'andrew.makhotin@gmail.com'
import MySQLdb as mdb
import sys
import ConfigParser
import logging
import logging.handlers
import re
import os
from ffprobe import FFProbe
#### LOG ###
logger = logging.getLogger('Logging for check_sound')
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler = logging.handlers.RotatingFileHandler('/var/log/eservices/ffmpeg_sound.log', maxBytes=(1048576*5), backupCount=5)
handler.setFormatter(formatter)
consolehandler = logging.StreamHandler() # for stdout
consolehandler.setFormatter(formatter)
logger.addHandler(consolehandler)
logger.addHandler(handler)
### LOG ###
dbcon = {}
conf = ConfigParser.ConfigParser()
conf.read('/etc/eservices/ffmpeg_sound.cfg')
dbcon.update({'service_id': conf.get('main', 'service_id')})
dbcon.update({'dbhost': conf.get("mysql", "host")})
dbcon.update({'dbuser': conf.get("mysql", "user")})
dbcon.update({'dbpasswd': conf.get("mysql", "passwd")})
dbcon.update({'dbbase': conf.get("mysql", "base")})
def channelsinsrv(srvid):
'''
What channels in this service id
:param srvid:
:return: cch id
'''
chids = []
try:
con = mdb.connect(dbcon['dbhost'], dbcon['dbuser'], dbcon['dbpasswd'], dbcon['dbbase'], charset='utf8')
cur = con.cursor()
cur.execute('''SELECT service_id, btv_channel_id, file_name FROM ma_internet_v2.hls_collector_report_view
where service_id = %s''' % (srvid,))
rows = cur.fetchall()
ch = []
for row in rows:
ch.append(row)
except con.Error, e:
logger.error("Error %d: %s", e.args[0], e.args[1])
#print "Error %d: %s" % (e.args[0], e.args[1])
#sys.exit(1)
finally:
if con:
con.close()
return ch
def checksound(pls):
'''
Check sound in ffprobe class and return status sound restart ch pid if needed
:param src: pls.m3u8
:return: status sound in ch
'''
status = {}
meta = False
try:
meta = FFProbe(pls)
except IOError, e:
logger.error('====Error:%s', e)
return 'nofile'
if meta:
for stream in meta.streams:
if stream.isVideo():
status['video'] = stream.codec()
elif stream.isAudio():
status['audio'] = stream.codec()
else:
return False
logger.debug('status: %s, %s', status, pls)
return status #status
def restartchid(ch):
'''
Restart ch i
:param ch: (89L, 326L, u'/var/lastxdays/326/5a9f3bad8adba3a5')
:return:
'''
logger.warning('to do restart ch:%s', ch[1])
try:
con = mdb.connect(dbcon['dbhost'], dbcon['dbuser'], dbcon['dbpasswd'], dbcon['dbbase'], charset='utf8')
cur = con.cursor()
cur.execute('''UPDATE ma_internet_v2.hls_collector_report_view set restart = 1 where service_id = %s
AND btv_channel_id = %s;''' % (ch[0], ch[1],))
con.commit()
logger.warning('Restart Done')
except con.Error, e:
logger.error("Error %d: %s", e.args[0], e.args[1])
#sys.exit(1)
finally:
if con:
con.close()
def create_m3u8(pls, ch):
with open(pls+'.m3u8', 'r') as f:
data = f.readlines()
last = data[:6] + data[-4:]
file = os.path.split(pls+'.m3u8')
f = '/run/sound/'+str(ch)+'.m3u8'
with open(f, 'w') as tempfile:
for i in last:
m = re.search(r'.ts', i)
if m:
tempfile.write(file[0]+'/'+i)
else:
tempfile.write(i)
tempfile.close()
return '/run/sound/'+str(ch)+'.m3u8'
#########################
def main():
if not os.path.isdir('/run/sound'):
os.mkdir('/run/sound')
for id in dbcon['service_id'].split(','):
chids = channelsinsrv(id)
logger.info('service: %s', id)
'''
chid is:[0] [1] [2]
(service_id, btv_channel_id, file_name)
'''
for ch in chids:
#print ch[1]
pls = create_m3u8(ch[2], ch[1])
#print 'pls:',pls
if ch[1] == 159:
print '!!!!! 159 !!!!!!'
if 'audio' not in checksound(pls):
logger.warning('not audio in %s, %s', checksound(ch[2], ch[1]), ch[1])
#TODO if not video do not restart ch
if checksound(ch[2], ch[1]) != 'nofile':
restartchid(ch)
if __name__ == '__main__':
while 1:
try:
main()
except KeyboardInterrupt:
sys.exit(0)
#logger.info('waiting...')
time.sleep(30)
|
mahandra/recipes_video_conv
|
rec_hls_server/check_rec_stream.py
|
Python
|
gpl-2.0
| 4,889
|
from django.conf.urls import url
urlpatterns = [
url(r'^itemsearch/(?P<index>.*)/(?P<concept_id>.*)/(?P<term>.*)$', 'wikidata.views.search_typed_items'),
]
|
FUB-HCC/neonion
|
wikidata/urls.py
|
Python
|
gpl-2.0
| 174
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import unittest
import jsbeautifier
class TestJSBeautifier(unittest.TestCase):
def test_unescape(self):
# Test cases contributed by <chrisjshull on GitHub.com>
test_fragment = self.decodesto
bt = self.bt
bt('"\\\\s"'); # == "\\s" in the js source
bt("'\\\\s'"); # == '\\s' in the js source
bt("'\\\\\\s'"); # == '\\\s' in the js source
bt("'\\s'"); # == '\s' in the js source
bt('"•"');
bt('"—"');
bt('"\\x41\\x42\\x43\\x01"', '"\\x41\\x42\\x43\\x01"');
bt('"\\u2022"', '"\\u2022"');
bt('a = /\s+/')
#bt('a = /\\x41/','a = /A/')
bt('"\\u2022";a = /\s+/;"\\x41\\x42\\x43\\x01".match(/\\x41/);','"\\u2022";\na = /\s+/;\n"\\x41\\x42\\x43\\x01".match(/\\x41/);')
bt('"\\x22\\x27",\'\\x22\\x27\',"\\x5c",\'\\x5c\',"\\xff and \\xzz","unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"', '"\\x22\\x27", \'\\x22\\x27\', "\\x5c", \'\\x5c\', "\\xff and \\xzz", "unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"');
self.options.unescape_strings = True
bt('"\\x41\\x42\\x43\\x01"', '"ABC\\x01"');
bt('"\\u2022"', '"\\u2022"');
bt('a = /\s+/')
bt('"\\u2022";a = /\s+/;"\\x41\\x42\\x43\\x01".match(/\\x41/);','"\\u2022";\na = /\s+/;\n"ABC\\x01".match(/\\x41/);')
bt('"\\x22\\x27",\'\\x22\\x27\',"\\x5c",\'\\x5c\',"\\xff and \\xzz","unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"', '"\\"\'", \'"\\\'\', "\\\\", \'\\\\\', "\\xff and \\xzz", "unicode \\u0000 \\" \' \\\\ \\uffff \\uzzzz"');
self.options.unescape_strings = False
def test_beautifier(self):
test_fragment = self.decodesto
bt = self.bt
bt('');
bt('return .5');
test_fragment(' return .5');
bt('a = 1', 'a = 1');
bt('a=1', 'a = 1');
bt("a();\n\nb();", "a();\n\nb();");
bt('var a = 1 var b = 2', "var a = 1\nvar b = 2");
bt('var a=1, b=c[d], e=6;', 'var a = 1,\n b = c[d],\n e = 6;');
bt('a = " 12345 "');
bt("a = ' 12345 '");
bt('if (a == 1) b = 2;', "if (a == 1) b = 2;");
bt('if(1){2}else{3}', "if (1) {\n 2\n} else {\n 3\n}");
bt('if(1||2);', 'if (1 || 2);');
bt('(a==1)||(b==2)', '(a == 1) || (b == 2)');
bt('var a = 1 if (2) 3;', "var a = 1\nif (2) 3;");
bt('a = a + 1');
bt('a = a == 1');
bt('/12345[^678]*9+/.match(a)');
bt('a /= 5');
bt('a = 0.5 * 3');
bt('a *= 10.55');
bt('a < .5');
bt('a <= .5');
bt('a<.5', 'a < .5');
bt('a<=.5', 'a <= .5');
bt('a = 0xff;');
bt('a=0xff+4', 'a = 0xff + 4');
bt('a = [1, 2, 3, 4]');
bt('F*(g/=f)*g+b', 'F * (g /= f) * g + b');
bt('a.b({c:d})', "a.b({\n c: d\n})");
bt('a.b\n(\n{\nc:\nd\n}\n)', "a.b({\n c: d\n})");
bt('a=!b', 'a = !b');
bt('a?b:c', 'a ? b : c');
bt('a?1:2', 'a ? 1 : 2');
bt('a?(b):c', 'a ? (b) : c');
bt('x={a:1,b:w=="foo"?x:y,c:z}', 'x = {\n a: 1,\n b: w == "foo" ? x : y,\n c: z\n}');
bt('x=a?b?c?d:e:f:g;', 'x = a ? b ? c ? d : e : f : g;');
bt('x=a?b?c?d:{e1:1,e2:2}:f:g;', 'x = a ? b ? c ? d : {\n e1: 1,\n e2: 2\n} : f : g;');
bt('function void(void) {}');
bt('if(!a)foo();', 'if (!a) foo();');
bt('a=~a', 'a = ~a');
bt('a;/*comment*/b;', "a; /*comment*/\nb;");
bt('a;/* comment */b;', "a; /* comment */\nb;");
test_fragment('a;/*\ncomment\n*/b;', "a;\n/*\ncomment\n*/\nb;"); # simple comments don't get touched at all
bt('a;/**\n* javadoc\n*/b;', "a;\n/**\n * javadoc\n */\nb;");
test_fragment('a;/**\n\nno javadoc\n*/b;', "a;\n/**\n\nno javadoc\n*/\nb;");
bt('a;/*\n* javadoc\n*/b;', "a;\n/*\n * javadoc\n */\nb;"); # comment blocks detected and reindented even w/o javadoc starter
bt('if(a)break;', "if (a) break;");
bt('if(a){break}', "if (a) {\n break\n}");
bt('if((a))foo();', 'if ((a)) foo();');
bt('for(var i=0;;) a', 'for (var i = 0;;) a');
bt('for(var i=0;;)\na', 'for (var i = 0;;)\n a');
bt('a++;', 'a++;');
bt('for(;;i++)a()', 'for (;; i++) a()');
bt('for(;;i++)\na()', 'for (;; i++)\n a()');
bt('for(;;++i)a', 'for (;; ++i) a');
bt('return(1)', 'return (1)');
bt('try{a();}catch(b){c();}finally{d();}', "try {\n a();\n} catch (b) {\n c();\n} finally {\n d();\n}");
bt('(xx)()'); # magic function call
bt('a[1]()'); # another magic function call
bt('if(a){b();}else if(c) foo();', "if (a) {\n b();\n} else if (c) foo();");
bt('switch(x) {case 0: case 1: a(); break; default: break}', "switch (x) {\n case 0:\n case 1:\n a();\n break;\n default:\n break\n}");
bt('switch(x){case -1:break;case !y:break;}', 'switch (x) {\n case -1:\n break;\n case !y:\n break;\n}');
bt('a !== b');
bt('if (a) b(); else c();', "if (a) b();\nelse c();");
bt("// comment\n(function something() {})"); # typical greasemonkey start
bt("{\n\n x();\n\n}"); # was: duplicating newlines
bt('if (a in b) foo();');
bt('var a, b;');
# bt('var a, b');
bt('{a:1, b:2}', "{\n a: 1,\n b: 2\n}");
bt('a={1:[-1],2:[+1]}', 'a = {\n 1: [-1],\n 2: [+1]\n}');
bt('var l = {\'a\':\'1\', \'b\':\'2\'}', "var l = {\n 'a': '1',\n 'b': '2'\n}");
bt('if (template.user[n] in bk) foo();');
bt('{{}/z/}', "{\n {}\n /z/\n}");
bt('return 45', "return 45");
bt('If[1]', "If[1]");
bt('Then[1]', "Then[1]");
bt('a = 1e10', "a = 1e10");
bt('a = 1.3e10', "a = 1.3e10");
bt('a = 1.3e-10', "a = 1.3e-10");
bt('a = -1.3e-10', "a = -1.3e-10");
bt('a = 1e-10', "a = 1e-10");
bt('a = e - 10', "a = e - 10");
bt('a = 11-10', "a = 11 - 10");
bt("a = 1;// comment", "a = 1; // comment");
bt("a = 1; // comment", "a = 1; // comment");
bt("a = 1;\n // comment", "a = 1;\n// comment");
bt('a = [-1, -1, -1]');
# The exact formatting these should have is open for discussion, but they are at least reasonable
bt('a = [ // comment\n -1, -1, -1\n]');
bt('var a = [ // comment\n -1, -1, -1\n]');
bt('a = [ // comment\n -1, // comment\n -1, -1\n]');
bt('var a = [ // comment\n -1, // comment\n -1, -1\n]');
bt('o = [{a:b},{c:d}]', 'o = [{\n a: b\n}, {\n c: d\n}]');
bt("if (a) {\n do();\n}"); # was: extra space appended
bt("if (a) {\n// comment\n}else{\n// comment\n}", "if (a) {\n // comment\n} else {\n // comment\n}"); # if/else statement with empty body
bt("if (a) {\n// comment\n// comment\n}", "if (a) {\n // comment\n // comment\n}"); # multiple comments indentation
bt("if (a) b() else c();", "if (a) b()\nelse c();");
bt("if (a) b() else if c() d();", "if (a) b()\nelse if c() d();");
bt("{}");
bt("{\n\n}");
bt("do { a(); } while ( 1 );", "do {\n a();\n} while (1);");
bt("do {} while (1);");
bt("do {\n} while (1);", "do {} while (1);");
bt("do {\n\n} while (1);");
bt("var a = x(a, b, c)");
bt("delete x if (a) b();", "delete x\nif (a) b();");
bt("delete x[x] if (a) b();", "delete x[x]\nif (a) b();");
bt("for(var a=1,b=2)d", "for (var a = 1, b = 2) d");
bt("for(var a=1,b=2,c=3) d", "for (var a = 1, b = 2, c = 3) d");
bt("for(var a=1,b=2,c=3;d<3;d++)\ne", "for (var a = 1, b = 2, c = 3; d < 3; d++)\n e");
bt("function x(){(a||b).c()}", "function x() {\n (a || b).c()\n}");
bt("function x(){return - 1}", "function x() {\n return -1\n}");
bt("function x(){return ! a}", "function x() {\n return !a\n}");
# a common snippet in jQuery plugins
bt("settings = $.extend({},defaults,settings);", "settings = $.extend({}, defaults, settings);");
bt('{xxx;}()', '{\n xxx;\n}()');
bt("a = 'a'\nb = 'b'");
bt("a = /reg/exp");
bt("a = /reg/");
bt('/abc/.test()');
bt('/abc/i.test()');
bt("{/abc/i.test()}", "{\n /abc/i.test()\n}");
bt('var x=(a)/a;', 'var x = (a) / a;');
bt('x != -1', 'x != -1');
bt('for (; s-->0;)t', 'for (; s-- > 0;) t');
bt('for (; s++>0;)u', 'for (; s++ > 0;) u');
bt('a = s++>s--;', 'a = s++ > s--;');
bt('a = s++>--s;', 'a = s++ > --s;');
bt('{x=#1=[]}', '{\n x = #1=[]\n}');
bt('{a:#1={}}', '{\n a: #1={}\n}');
bt('{a:#1#}', '{\n a: #1#\n}');
test_fragment('"incomplete-string');
test_fragment("'incomplete-string");
test_fragment('/incomplete-regex');
test_fragment('{a:1},{a:2}', '{\n a: 1\n}, {\n a: 2\n}');
test_fragment('var ary=[{a:1}, {a:2}];', 'var ary = [{\n a: 1\n}, {\n a: 2\n}];');
test_fragment('{a:#1', '{\n a: #1'); # incomplete
test_fragment('{a:#', '{\n a: #'); # incomplete
test_fragment('}}}', '}\n}\n}'); # incomplete
test_fragment('<!--\nvoid();\n// -->', '<!--\nvoid();\n// -->');
test_fragment('a=/regexp', 'a = /regexp'); # incomplete regexp
bt('{a:#1=[],b:#1#,c:#999999#}', '{\n a: #1=[],\n b: #1#,\n c: #999999#\n}');
bt("a = 1e+2");
bt("a = 1e-2");
bt("do{x()}while(a>1)", "do {\n x()\n} while (a > 1)");
bt("x(); /reg/exp.match(something)", "x();\n/reg/exp.match(something)");
test_fragment("something();(", "something();\n(");
test_fragment("#!she/bangs, she bangs\nf=1", "#!she/bangs, she bangs\n\nf = 1");
test_fragment("#!she/bangs, she bangs\n\nf=1", "#!she/bangs, she bangs\n\nf = 1");
test_fragment("#!she/bangs, she bangs\n\n/* comment */", "#!she/bangs, she bangs\n\n/* comment */");
test_fragment("#!she/bangs, she bangs\n\n\n/* comment */", "#!she/bangs, she bangs\n\n\n/* comment */");
test_fragment("#", "#");
test_fragment("#!", "#!");
bt("function namespace::something()");
test_fragment("<!--\nsomething();\n-->", "<!--\nsomething();\n-->");
test_fragment("<!--\nif(i<0){bla();}\n-->", "<!--\nif (i < 0) {\n bla();\n}\n-->");
bt('{foo();--bar;}', '{\n foo();\n --bar;\n}');
bt('{foo();++bar;}', '{\n foo();\n ++bar;\n}');
bt('{--bar;}', '{\n --bar;\n}');
bt('{++bar;}', '{\n ++bar;\n}');
# Handling of newlines around unary ++ and -- operators
bt('{foo\n++bar;}', '{\n foo\n ++bar;\n}');
bt('{foo++\nbar;}', '{\n foo++\n bar;\n}');
# This is invalid, but harder to guard against. Issue #203.
bt('{foo\n++\nbar;}', '{\n foo\n ++\n bar;\n}');
# regexps
bt('a(/abc\\/\\/def/);b()', "a(/abc\\/\\/def/);\nb()");
bt('a(/a[b\\[\\]c]d/);b()', "a(/a[b\\[\\]c]d/);\nb()");
test_fragment('a(/a[b\\[', "a(/a[b\\["); # incomplete char class
# allow unescaped / in char classes
bt('a(/[a/b]/);b()', "a(/[a/b]/);\nb()");
bt('a=[[1,2],[4,5],[7,8]]', "a = [\n [1, 2],\n [4, 5],\n [7, 8]\n]");
bt('a=[[1,2],[4,5],function(){},[7,8]]',
"a = [\n [1, 2],\n [4, 5],\n function() {},\n [7, 8]\n]");
bt('a=[[1,2],[4,5],function(){},function(){},[7,8]]',
"a = [\n [1, 2],\n [4, 5],\n function() {},\n function() {},\n [7, 8]\n]");
bt('a=[[1,2],[4,5],function(){},[7,8]]',
"a = [\n [1, 2],\n [4, 5],\n function() {},\n [7, 8]\n]");
bt('a=[b,c,function(){},function(){},d]',
"a = [b, c,\n function() {},\n function() {},\n d\n]");
bt('a=[a[1],b[4],c[d[7]]]', "a = [a[1], b[4], c[d[7]]]");
bt('[1,2,[3,4,[5,6],7],8]', "[1, 2, [3, 4, [5, 6], 7], 8]");
bt('[[["1","2"],["3","4"]],[["5","6","7"],["8","9","0"]],[["1","2","3"],["4","5","6","7"],["8","9","0"]]]',
'[\n [\n ["1", "2"],\n ["3", "4"]\n ],\n [\n ["5", "6", "7"],\n ["8", "9", "0"]\n ],\n [\n ["1", "2", "3"],\n ["4", "5", "6", "7"],\n ["8", "9", "0"]\n ]\n]');
bt('{[x()[0]];indent;}', '{\n [x()[0]];\n indent;\n}');
bt('return ++i', 'return ++i');
bt('return !!x', 'return !!x');
bt('return !x', 'return !x');
bt('return [1,2]', 'return [1, 2]');
bt('return;', 'return;');
bt('return\nfunc', 'return\nfunc');
bt('catch(e)', 'catch (e)');
bt('var a=1,b={foo:2,bar:3},{baz:4,wham:5},c=4;',
'var a = 1,\n b = {\n foo: 2,\n bar: 3\n }, {\n baz: 4,\n wham: 5\n }, c = 4;');
bt('var a=1,b={foo:2,bar:3},{baz:4,wham:5},\nc=4;',
'var a = 1,\n b = {\n foo: 2,\n bar: 3\n }, {\n baz: 4,\n wham: 5\n },\n c = 4;');
# inline comment
bt('function x(/*int*/ start, /*string*/ foo)', 'function x( /*int*/ start, /*string*/ foo)');
# javadoc comment
bt('/**\n* foo\n*/', '/**\n * foo\n */');
bt('{\n/**\n* foo\n*/\n}', '{\n /**\n * foo\n */\n}');
bt('var a,b,c=1,d,e,f=2;', 'var a, b, c = 1,\n d, e, f = 2;');
bt('var a,b,c=[],d,e,f=2;', 'var a, b, c = [],\n d, e, f = 2;');
bt('function() {\n var a, b, c, d, e = [],\n f;\n}');
bt('do/regexp/;\nwhile(1);', 'do /regexp/;\nwhile (1);'); # hmmm
bt('var a = a,\na;\nb = {\nb\n}', 'var a = a,\n a;\nb = {\n b\n}');
bt('var a = a,\n /* c */\n b;');
bt('var a = a,\n // c\n b;');
bt('foo.("bar");'); # weird element referencing
bt('if (a) a()\nelse b()\nnewline()');
bt('if (a) a()\nnewline()');
bt('a=typeof(x)', 'a = typeof(x)');
bt('var a = function() {\n return null;\n},\n b = false;');
bt('var a = function() {\n func1()\n}');
bt('var a = function() {\n func1()\n}\nvar b = function() {\n func2()\n}');
self.options.jslint_happy = True
bt('x();\n\nfunction(){}', 'x();\n\nfunction () {}');
bt('function () {\n var a, b, c, d, e = [],\n f;\n}');
bt('switch(x) {case 0: case 1: a(); break; default: break}',
"switch (x) {\ncase 0:\ncase 1:\n a();\n break;\ndefault:\n break\n}");
bt('switch(x){case -1:break;case !y:break;}',
'switch (x) {\ncase -1:\n break;\ncase !y:\n break;\n}');
test_fragment("// comment 1\n(function()", "// comment 1\n(function ()"); # typical greasemonkey start
bt('var o1=$.extend(a);function(){alert(x);}', 'var o1 = $.extend(a);\n\nfunction () {\n alert(x);\n}');
bt('a=typeof(x)', 'a = typeof (x)');
self.options.jslint_happy = False
bt('switch(x) {case 0: case 1: a(); break; default: break}',
"switch (x) {\n case 0:\n case 1:\n a();\n break;\n default:\n break\n}");
bt('switch(x){case -1:break;case !y:break;}',
'switch (x) {\n case -1:\n break;\n case !y:\n break;\n}');
test_fragment("// comment 2\n(function()", "// comment 2\n(function()"); # typical greasemonkey start
bt("var a2, b2, c2, d2 = 0, c = function() {}, d = '';", "var a2, b2, c2, d2 = 0,\n c = function() {}, d = '';");
bt("var a2, b2, c2, d2 = 0, c = function() {},\nd = '';", "var a2, b2, c2, d2 = 0,\n c = function() {},\n d = '';");
bt('var o2=$.extend(a);function(){alert(x);}', 'var o2 = $.extend(a);\n\nfunction() {\n alert(x);\n}');
bt('{"x":[{"a":1,"b":3},7,8,8,8,8,{"b":99},{"a":11}]}', '{\n "x": [{\n "a": 1,\n "b": 3\n },\n 7, 8, 8, 8, 8, {\n "b": 99\n }, {\n "a": 11\n }\n ]\n}');
bt('{"1":{"1a":"1b"},"2"}', '{\n "1": {\n "1a": "1b"\n },\n "2"\n}');
bt('{a:{a:b},c}', '{\n a: {\n a: b\n },\n c\n}');
bt('{[y[a]];keep_indent;}', '{\n [y[a]];\n keep_indent;\n}');
bt('if (x) {y} else { if (x) {y}}', 'if (x) {\n y\n} else {\n if (x) {\n y\n }\n}');
bt('if (foo) one()\ntwo()\nthree()');
bt('if (1 + foo() && bar(baz()) / 2) one()\ntwo()\nthree()');
bt('if (1 + foo() && bar(baz()) / 2) one();\ntwo();\nthree();');
self.options.indent_size = 1;
self.options.indent_char = ' ';
bt('{ one_char() }', "{\n one_char()\n}");
bt('var a,b=1,c=2', 'var a, b = 1,\n c = 2');
self.options.indent_size = 4;
self.options.indent_char = ' ';
bt('{ one_char() }', "{\n one_char()\n}");
self.options.indent_size = 1;
self.options.indent_char = "\t";
bt('{ one_char() }', "{\n\tone_char()\n}");
bt('x = a ? b : c; x;', 'x = a ? b : c;\nx;');
self.options.indent_size = 4;
self.options.indent_char = ' ';
self.options.preserve_newlines = False;
bt('var\na=dont_preserve_newlines;', 'var a = dont_preserve_newlines;');
# make sure the blank line between function definitions stays
# even when preserve_newlines = False
bt('function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}');
bt('function foo() {\n return 1;\n}\nfunction foo() {\n return 1;\n}',
'function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}'
);
bt('function foo() {\n return 1;\n}\n\n\nfunction foo() {\n return 1;\n}',
'function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}'
);
self.options.preserve_newlines = True;
bt('var\na=do_preserve_newlines;', 'var\na = do_preserve_newlines;')
bt('// a\n// b\n\n// c\n// d')
bt('if (foo) // comment\n{\n bar();\n}')
self.options.keep_array_indentation = False;
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f']",
"a = ['a', 'b', 'c',\n 'd', 'e', 'f'\n]");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']",
"a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i'\n]");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']",
"a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i'\n]");
bt('var x = [{}\n]', 'var x = [{}]');
bt('var x = [{foo:bar}\n]', 'var x = [{\n foo: bar\n}]');
bt("a = ['something',\n 'completely',\n 'different'];\nif (x);",
"a = ['something',\n 'completely',\n 'different'\n];\nif (x);");
bt("a = ['a','b','c']", "a = ['a', 'b', 'c']");
bt("a = ['a', 'b','c']", "a = ['a', 'b', 'c']");
bt("x = [{'a':0}]",
"x = [{\n 'a': 0\n}]");
bt('{a([[a1]], {b;});}',
'{\n a([\n [a1]\n ], {\n b;\n });\n}');
bt("a();\n [\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ].toString();",
"a();\n[\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n].toString();");
bt("function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}",
"function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}");
self.options.keep_array_indentation = True;
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f']");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']");
bt('var x = [{}\n]', 'var x = [{}\n]');
bt('var x = [{foo:bar}\n]', 'var x = [{\n foo: bar\n }\n]');
bt("a = ['something',\n 'completely',\n 'different'];\nif (x);");
bt("a = ['a','b','c']", "a = ['a', 'b', 'c']");
bt("a = ['a', 'b','c']", "a = ['a', 'b', 'c']");
bt("x = [{'a':0}]",
"x = [{\n 'a': 0\n}]");
bt('{a([[a1]], {b;});}',
'{\n a([[a1]], {\n b;\n });\n}');
bt("a();\n [\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ].toString();",
"a();\n [\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ].toString();");
bt("function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}",
"function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}");
self.options.keep_array_indentation = False;
bt('a = //comment\n/regex/;');
test_fragment('/*\n * X\n */');
test_fragment('/*\r\n * X\r\n */', '/*\n * X\n */');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a) {\n b;\n} else {\n c;\n}');
bt('var a = new function();');
test_fragment('new function');
self.options.brace_style = 'expand';
bt('//case 1\nif (a == 1)\n{}\n//case 2\nelse if (a == 2)\n{}');
bt('if(1){2}else{3}', "if (1)\n{\n 2\n}\nelse\n{\n 3\n}");
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try\n{\n a();\n}\ncatch (b)\n{\n c();\n}\ncatch (d)\n{}\nfinally\n{\n e();\n}");
bt('if(a){b();}else if(c) foo();',
"if (a)\n{\n b();\n}\nelse if (c) foo();");
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a)\n{\n // comment\n}\nelse\n{\n // comment\n}"); # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x)\n{\n y\n}\nelse\n{\n if (x)\n {\n y\n }\n}');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a)\n{\n b;\n}\nelse\n{\n c;\n}');
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo)\n {\n bar();\n }');
bt('if (foo)\n{}\nelse /regex/.test();');
bt('if (foo) /regex/.test();');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a)\n{\n b;\n}\nelse\n{\n c;\n}');
test_fragment('if (foo) {', 'if (foo)\n{');
test_fragment('foo {', 'foo\n{');
test_fragment('return {', 'return {'); # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {');
# test_fragment('return\n{', 'return\n{'); # can't support this?, but that's an improbable and extreme case anyway.
test_fragment('return;\n{', 'return;\n{');
bt("throw {}");
bt("throw {\n foo;\n}");
bt('var foo = {}');
bt('if (foo) bar();\nelse break');
bt('function x() {\n foo();\n}zzz', 'function x()\n{\n foo();\n}\nzzz');
bt('a: do {} while (); xxx', 'a: do {} while ();\nxxx');
bt('var a = new function();');
bt('var a = new function() {};');
bt('var a = new function a()\n {};');
test_fragment('new function');
bt("foo({\n 'a': 1\n},\n10);",
"foo(\n {\n 'a': 1\n },\n 10);");
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i)\n{\n return i;\n});');
bt('(function(i) {return i;})();',
'(function(i)\n{\n return i;\n})();');
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
self.options.brace_style = 'collapse';
bt('//case 1\nif (a == 1) {}\n//case 2\nelse if (a == 2) {}');
bt('if(1){2}else{3}', "if (1) {\n 2\n} else {\n 3\n}");
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try {\n a();\n} catch (b) {\n c();\n} catch (d) {} finally {\n e();\n}");
bt('if(a){b();}else if(c) foo();',
"if (a) {\n b();\n} else if (c) foo();");
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a) {\n // comment\n} else {\n // comment\n}"); # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x) {\n y\n} else {\n if (x) {\n y\n }\n}');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a) {\n b;\n} else {\n c;\n}');
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo) {\n bar();\n }');
bt('if (foo) {} else /regex/.test();');
bt('if (foo) /regex/.test();');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a) {\n b;\n} else {\n c;\n}');
test_fragment('if (foo) {', 'if (foo) {');
test_fragment('foo {', 'foo {');
test_fragment('return {', 'return {'); # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {');
# test_fragment('return\n{', 'return\n{'); # can't support this?, but that's an improbable and extreme case anyway.
test_fragment('return;\n{', 'return; {');
bt("throw {}");
bt("throw {\n foo;\n}");
bt('var foo = {}');
bt('if (foo) bar();\nelse break');
bt('function x() {\n foo();\n}zzz', 'function x() {\n foo();\n}\nzzz');
bt('a: do {} while (); xxx', 'a: do {} while ();\nxxx');
bt('var a = new function();');
bt('var a = new function() {};');
bt('var a = new function a() {};');
test_fragment('new function');
bt("foo({\n 'a': 1\n},\n10);",
"foo({\n 'a': 1\n },\n 10);");
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i) {\n return i;\n});');
bt('(function(i) {return i;})();',
'(function(i) {\n return i;\n})();');
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
self.options.brace_style = "end-expand";
bt('//case 1\nif (a == 1) {}\n//case 2\nelse if (a == 2) {}');
bt('if(1){2}else{3}', "if (1) {\n 2\n}\nelse {\n 3\n}");
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try {\n a();\n}\ncatch (b) {\n c();\n}\ncatch (d) {}\nfinally {\n e();\n}");
bt('if(a){b();}else if(c) foo();',
"if (a) {\n b();\n}\nelse if (c) foo();");
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a) {\n // comment\n}\nelse {\n // comment\n}"); # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x) {\n y\n}\nelse {\n if (x) {\n y\n }\n}');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a) {\n b;\n}\nelse {\n c;\n}');
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo) {\n bar();\n }');
bt('if (foo) {}\nelse /regex/.test();');
bt('if (foo) /regex/.test();');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a) {\n b;\n}\nelse {\n c;\n}');
test_fragment('if (foo) {', 'if (foo) {');
test_fragment('foo {', 'foo {');
test_fragment('return {', 'return {'); # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {');
# test_fragment('return\n{', 'return\n{'); # can't support this?, but that's an improbable and extreme case anyway.
test_fragment('return;\n{', 'return; {');
bt("throw {}");
bt("throw {\n foo;\n}");
bt('var foo = {}');
bt('if (foo) bar();\nelse break');
bt('function x() {\n foo();\n}zzz', 'function x() {\n foo();\n}\nzzz');
bt('a: do {} while (); xxx', 'a: do {} while ();\nxxx');
bt('var a = new function();');
bt('var a = new function() {};');
bt('var a = new function a() {};');
test_fragment('new function');
bt("foo({\n 'a': 1\n},\n10);",
"foo({\n 'a': 1\n },\n 10);");
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i) {\n return i;\n});');
bt('(function(i) {return i;})();',
'(function(i) {\n return i;\n})();');
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
self.options.brace_style = 'collapse';
bt('a = <?= external() ?> ;'); # not the most perfect thing in the world, but you're the weirdo beaufifying php mix-ins with javascript beautifier
bt('a = <%= external() %> ;');
test_fragment('roo = {\n /*\n ****\n FOO\n ****\n */\n BAR: 0\n};');
test_fragment("if (zz) {\n // ....\n}\n(function");
self.options.preserve_newlines = True;
bt('var a = 42; // foo\n\nvar b;')
bt('var a = 42; // foo\n\n\nvar b;')
bt("var a = 'foo' +\n 'bar';");
bt("var a = \"foo\" +\n \"bar\";");
bt('"foo""bar""baz"', '"foo"\n"bar"\n"baz"')
bt("'foo''bar''baz'", "'foo'\n'bar'\n'baz'")
bt("{\n get foo() {}\n}")
bt("{\n var a = get\n foo();\n}")
bt("{\n set foo() {}\n}")
bt("{\n var a = set\n foo();\n}")
bt("var x = {\n get function()\n}")
bt("var x = {\n set function()\n}")
bt("var x = set\n\nfunction() {}", "var x = set\n\n function() {}")
bt('<!-- foo\nbar();\n-->')
bt('<!-- dont crash')
bt('for () /abc/.test()')
bt('if (k) /aaa/m.test(v) && l();')
bt('switch (true) {\n case /swf/i.test(foo):\n bar();\n}')
bt('createdAt = {\n type: Date,\n default: Date.now\n}')
bt('switch (createdAt) {\n case a:\n Date,\n default:\n Date.now\n}')
bt('return function();')
bt('var a = function();')
bt('var a = 5 + function();')
bt('{\n foo // something\n ,\n bar // something\n baz\n}')
bt('function a(a) {} function b(b) {} function c(c) {}', 'function a(a) {}\n\nfunction b(b) {}\n\nfunction c(c) {}')
bt('3.*7;', '3. * 7;')
bt('import foo.*;', 'import foo.*;') # actionscript's import
test_fragment('function f(a: a, b: b)') # actionscript
bt('foo(a, function() {})');
bt('foo(a, /regex/)');
bt('/* foo */\n"x"');
self.options.break_chained_methods = False
self.options.preserve_newlines = False
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo.bar().baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo.bar().baz().cucumber(fat);\nfoo.bar().baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo.bar().baz().cucumber(fat)\nfoo.bar().baz().cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this.something = foo.bar().baz().cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this.something.xxx = foo.moo.bar()');
self.options.break_chained_methods = False
self.options.preserve_newlines = True
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo\n .bar()\n .baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz().cucumber(fat);\nfoo.bar().baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz().cucumber(fat)\nfoo.bar().baz().cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this\n .something = foo.bar()\n .baz().cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this\n .something\n .xxx = foo.moo\n .bar()');
self.options.break_chained_methods = True
self.options.preserve_newlines = False
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo.bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo.bar()\n .baz()\n .cucumber(fat);\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo.bar()\n .baz()\n .cucumber(fat)\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this.something = foo.bar()\n .baz()\n .cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this.something.xxx = foo.moo.bar()');
self.options.break_chained_methods = True
self.options.preserve_newlines = True
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo\n .bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz()\n .cucumber(fat);\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz()\n .cucumber(fat)\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this\n .something = foo.bar()\n .baz()\n .cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this\n .something\n .xxx = foo.moo\n .bar()');
self.options.break_chained_methods = False
self.options.preserve_newlines = False
self.options.preserve_newlines = False
self.options.wrap_line_length = 0
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_.okay();')
self.options.wrap_line_length = 70
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_.okay();');
self.options.wrap_line_length = 40
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat &&\n' +
' "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();');
self.options.wrap_line_length = 41
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();');
self.options.wrap_line_length = 45
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('{\n' +
' foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
' if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();\n' +
'}',
# expected #
'{\n' +
' foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();\n' +
'}');
self.options.preserve_newlines = True
self.options.wrap_line_length = 0
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 70
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 40
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat &&\n' +
' "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 41
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 45
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('{\n' +
' foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
' if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();\n' +
'}',
# expected #
'{\n' +
' foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();\n' +
'}');
self.options.wrap_line_length = 0
self.options.preserve_newlines = False
bt('if (foo) // comment\n bar();');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n /asdf/;');
bt('this.oa = new OAuth(\n' +
' _requestToken,\n' +
' _accessToken,\n' +
' consumer_key\n' +
');',
'this.oa = new OAuth(_requestToken, _accessToken, consumer_key);');
bt('foo = {\n x: y, // #44\n w: z // #44\n}');
bt('switch (x) {\n case "a":\n // comment on newline\n break;\n case "b": // comment on same line\n break;\n}');
# these aren't ready yet.
#bt('if (foo) // comment\n bar() /*i*/ + baz() /*j\n*/ + asdf();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\na();',
'if (foo)\n if (bar)\n if (baz) whee();\na();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\nelse\na();',
'if (foo)\n if (bar)\n if (baz) whee();\n else a();');
bt('if (foo)\nbar();\nelse\ncar();',
'if (foo) bar();\nelse car();');
bt('if (foo) if (bar) if (baz);\na();',
'if (foo)\n if (bar)\n if (baz);\na();');
bt('if (foo) if (bar) if (baz) whee();\na();',
'if (foo)\n if (bar)\n if (baz) whee();\na();');
bt('if (foo) a()\nif (bar) if (baz) whee();\na();',
'if (foo) a()\nif (bar)\n if (baz) whee();\na();');
bt('if (foo);\nif (bar) if (baz) whee();\na();',
'if (foo);\nif (bar)\n if (baz) whee();\na();');
bt('if (options)\n' +
' for (var p in options)\n' +
' this[p] = options[p];',
'if (options)\n'+
' for (var p in options) this[p] = options[p];');
bt('if (options) for (var p in options) this[p] = options[p];',
'if (options)\n for (var p in options) this[p] = options[p];');
bt('if (options) do q(); while (b());',
'if (options)\n do q(); while (b());');
bt('if (options) while (b()) q();',
'if (options)\n while (b()) q();');
bt('if (options) do while (b()) q(); while (a());',
'if (options)\n do\n while (b()) q(); while (a());');
bt('function f(a, b, c,\nd, e) {}',
'function f(a, b, c, d, e) {}');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
bt('function f(a,b) {if(a) b()}\n\n\n\nfunction g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
# This is not valid syntax, but still want to behave reasonably and not side-effect
bt('(if(a) b())(if(a) b())',
'(\n if (a) b())(\n if (a) b())');
bt('(if(a) b())\n\n\n(if(a) b())',
'(\n if (a) b())\n(\n if (a) b())');
bt("if\n(a)\nb();", "if (a) b();");
bt('var a =\nfoo', 'var a = foo');
bt('var a = {\n"a":1,\n"b":2}', "var a = {\n \"a\": 1,\n \"b\": 2\n}");
bt("var a = {\n'a':1,\n'b':2}", "var a = {\n 'a': 1,\n 'b': 2\n}");
bt('var a = /*i*/ "b";');
bt('var a = /*i*/\n"b";', 'var a = /*i*/ "b";');
bt('var a = /*i*/\nb;', 'var a = /*i*/ b;');
bt('{\n\n\n"x"\n}', '{\n "x"\n}');
bt('if(a &&\nb\n||\nc\n||d\n&&\ne) e = f', 'if (a && b || c || d && e) e = f');
bt('if(a &&\n(b\n||\nc\n||d)\n&&\ne) e = f', 'if (a && (b || c || d) && e) e = f');
test_fragment('\n\n"x"', '"x"');
bt('a = 1;\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nb = 2;',
'a = 1;\nb = 2;');
self.options.preserve_newlines = True
bt('if (foo) // comment\n bar();');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n /asdf/;');
bt('this.oa = new OAuth(\n' +
' _requestToken,\n' +
' _accessToken,\n' +
' consumer_key\n' +
');');
bt('foo = {\n x: y, // #44\n w: z // #44\n}');
bt('switch (x) {\n case "a":\n // comment on newline\n break;\n case "b": // comment on same line\n break;\n}');
# these aren't ready yet.
# bt('if (foo) // comment\n bar() /*i*/ + baz() /*j\n*/ + asdf();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\na();',
'if (foo)\n if (bar)\n if (baz)\n whee();\na();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\nelse\na();',
'if (foo)\n if (bar)\n if (baz)\n whee();\n else\n a();');
bt('if (foo) bar();\nelse\ncar();',
'if (foo) bar();\nelse\n car();');
bt('if (foo) if (bar) if (baz);\na();',
'if (foo)\n if (bar)\n if (baz);\na();');
bt('if (foo) if (bar) if (baz) whee();\na();',
'if (foo)\n if (bar)\n if (baz) whee();\na();');
bt('if (foo) a()\nif (bar) if (baz) whee();\na();',
'if (foo) a()\nif (bar)\n if (baz) whee();\na();');
bt('if (foo);\nif (bar) if (baz) whee();\na();',
'if (foo);\nif (bar)\n if (baz) whee();\na();');
bt('if (options)\n' +
' for (var p in options)\n' +
' this[p] = options[p];');
bt('if (options) for (var p in options) this[p] = options[p];',
'if (options)\n for (var p in options) this[p] = options[p];');
bt('if (options) do q(); while (b());',
'if (options)\n do q(); while (b());');
bt('if (options) do; while (b());',
'if (options)\n do; while (b());');
bt('if (options) while (b()) q();',
'if (options)\n while (b()) q();');
bt('if (options) do while (b()) q(); while (a());',
'if (options)\n do\n while (b()) q(); while (a());');
bt('function f(a, b, c,\nd, e) {}',
'function f(a, b, c,\n d, e) {}');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
bt('function f(a,b) {if(a) b()}\n\n\n\nfunction g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\n\n\nfunction g(a, b) {\n if (!a) b()\n}');
# This is not valid syntax, but still want to behave reasonably and not side-effect
bt('(if(a) b())(if(a) b())',
'(\n if (a) b())(\n if (a) b())');
bt('(if(a) b())\n\n\n(if(a) b())',
'(\n if (a) b())\n\n\n(\n if (a) b())');
bt("if\n(a)\nb();", "if (a)\n b();");
bt('var a =\nfoo', 'var a =\n foo');
bt('var a = {\n"a":1,\n"b":2}', "var a = {\n \"a\": 1,\n \"b\": 2\n}");
bt("var a = {\n'a':1,\n'b':2}", "var a = {\n 'a': 1,\n 'b': 2\n}");
bt('var a = /*i*/ "b";');
bt('var a = /*i*/\n"b";', 'var a = /*i*/\n "b";');
bt('var a = /*i*/\nb;', 'var a = /*i*/\n b;');
bt('{\n\n\n"x"\n}', '{\n\n\n "x"\n}');
bt('if(a &&\nb\n||\nc\n||d\n&&\ne) e = f', 'if (a &&\n b ||\n c || d &&\n e) e = f');
bt('if(a &&\n(b\n||\nc\n||d)\n&&\ne) e = f', 'if (a &&\n (b ||\n c || d) &&\n e) e = f');
test_fragment('\n\n"x"', '"x"');
# this beavior differs between js and python, defaults to unlimited in js, 10 in python
bt('a = 1;\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nb = 2;',
'a = 1;\n\n\n\n\n\n\n\n\n\nb = 2;');
self.options.max_preserve_newlines = 8;
bt('a = 1;\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nb = 2;',
'a = 1;\n\n\n\n\n\n\n\nb = 2;');
# Test the option to have spaces within parens
self.options.space_in_paren = False
bt('if(p) foo(a,b)', 'if (p) foo(a, b)');
bt('try{while(true){willThrow()}}catch(result)switch(result){case 1:++result }',
'try {\n while (true) {\n willThrow()\n }\n} catch (result) switch (result) {\n case 1:\n ++result\n}');
bt('((e/((a+(b)*c)-d))^2)*5;', '((e / ((a + (b) * c) - d)) ^ 2) * 5;');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
bt('a=[];',
'a = [];');
bt('a=[b,c,d];',
'a = [b, c, d];');
bt('a= f[b];',
'a = f[b];');
self.options.space_in_paren = True
bt('if(p) foo(a,b)', 'if ( p ) foo( a, b )');
bt('try{while(true){willThrow()}}catch(result)switch(result){case 1:++result }',
'try {\n while ( true ) {\n willThrow( )\n }\n} catch ( result ) switch ( result ) {\n case 1:\n ++result\n}');
bt('((e/((a+(b)*c)-d))^2)*5;', '( ( e / ( ( a + ( b ) * c ) - d ) ) ^ 2 ) * 5;');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f( a, b ) {\n if ( a ) b( )\n}\n\nfunction g( a, b ) {\n if ( !a ) b( )\n}');
bt('a=[ ];',
'a = [ ];');
bt('a=[b,c,d];',
'a = [ b, c, d ];');
bt('a= f[b];',
'a = f[ b ];');
self.options.space_in_paren = False
# Test that e4x literals passed through when e4x-option is enabled
bt('xml=<a b="c"><d/><e>\n foo</e>x</a>;', 'xml = < a b = "c" > < d / > < e >\n foo < /e>x</a > ;');
self.options.e4x = True
bt('xml=<a b="c"><d/><e>\n foo</e>x</a>;', 'xml = <a b="c"><d/><e>\n foo</e>x</a>;');
bt('<a b=\'This is a quoted "c".\'/>', '<a b=\'This is a quoted "c".\'/>');
bt('<a b="This is a quoted \'c\'."/>', '<a b="This is a quoted \'c\'."/>');
bt('<a b="A quote \' inside string."/>', '<a b="A quote \' inside string."/>');
bt('<a b=\'A quote " inside string.\'/>', '<a b=\'A quote " inside string.\'/>');
bt('<a b=\'Some """ quotes "" inside string.\'/>', '<a b=\'Some """ quotes "" inside string.\'/>');
# Handles inline expressions
bt('xml=<{a} b="c"><d/><e v={z}>\n foo</e>x</{a}>;', 'xml = <{a} b="c"><d/><e v={z}>\n foo</e>x</{a}>;');
# Handles CDATA
bt('xml=<a b="c"><![CDATA[d/>\n</a></{}]]></a>;', 'xml = <a b="c"><![CDATA[d/>\n</a></{}]]></a>;');
bt('xml=<![CDATA[]]>;', 'xml = <![CDATA[]]>;');
bt('xml=<![CDATA[ b="c"><d/><e v={z}>\n foo</e>x/]]>;', 'xml = <![CDATA[ b="c"><d/><e v={z}>\n foo</e>x/]]>;');
# Handles messed up tags, as long as it isn't the same name
# as the root tag. Also handles tags of same name as root tag
# as long as nesting matches.
bt('xml=<a x="jn"><c></b></f><a><d jnj="jnn"><f></a ></nj></a>;',
'xml = <a x="jn"><c></b></f><a><d jnj="jnn"><f></a ></nj></a>;');
# If xml is not terminated, the remainder of the file is treated
# as part of the xml-literal (passed through unaltered)
test_fragment('xml=<a></b>\nc<b;', 'xml = <a></b>\nc<b;');
self.options.e4x = False
# START tests for issue 241
bt('obj\n' +
' .last({\n' +
' foo: 1,\n' +
' bar: 2\n' +
' });\n' +
'var test = 1;');
bt('obj\n' +
' .last(a, function() {\n' +
' var test;\n' +
' });\n' +
'var test = 1;');
bt('obj.first()\n' +
' .second()\n' +
' .last(function(err, response) {\n' +
' console.log(err);\n' +
' });');
# END tests for issue 241
# START tests for issue 268 and 275
bt('obj.last(a, function() {\n' +
' var test;\n' +
'});\n' +
'var test = 1;');
bt('obj.last(a,\n' +
' function() {\n' +
' var test;\n' +
' });\n' +
'var test = 1;');
bt('(function() {if (!window.FOO) window.FOO || (window.FOO = function() {var b = {bar: "zort"};});})();',
'(function() {\n' +
' if (!window.FOO) window.FOO || (window.FOO = function() {\n' +
' var b = {\n' +
' bar: "zort"\n' +
' };\n' +
' });\n' +
'})();');
# END tests for issue 268 and 275
# START tests for issue 281
bt('define(["dojo/_base/declare", "my/Employee", "dijit/form/Button",\n' +
' "dojo/_base/lang", "dojo/Deferred"\n' +
'], function(declare, Employee, Button, lang, Deferred) {\n' +
' return declare(Employee, {\n' +
' constructor: function() {\n' +
' new Button({\n' +
' onClick: lang.hitch(this, function() {\n' +
' new Deferred().then(lang.hitch(this, function() {\n' +
' this.salary * 0.25;\n' +
' }));\n' +
' })\n' +
' });\n' +
' }\n' +
' });\n' +
'});');
bt('define(["dojo/_base/declare", "my/Employee", "dijit/form/Button",\n' +
' "dojo/_base/lang", "dojo/Deferred"\n' +
' ],\n' +
' function(declare, Employee, Button, lang, Deferred) {\n' +
' return declare(Employee, {\n' +
' constructor: function() {\n' +
' new Button({\n' +
' onClick: lang.hitch(this, function() {\n' +
' new Deferred().then(lang.hitch(this, function() {\n' +
' this.salary * 0.25;\n' +
' }));\n' +
' })\n' +
' });\n' +
' }\n' +
' });\n' +
' });');
# END tests for issue 281
# This is what I think these should look like related #256
# we don't have the ability yet
#bt('var a=1,b={bang:2},c=3;',
# 'var a = 1,\n b = {\n bang: 2\n },\n c = 3;');
#bt('var a={bing:1},b=2,c=3;',
# 'var a = {\n bing: 1\n },\n b = 2,\n c = 3;');
def decodesto(self, input, expectation=None):
self.assertEqual(
jsbeautifier.beautify(input, self.options), expectation or input)
# if the expected is different from input, run it again
# expected output should be unchanged when run twice.
if not expectation == None:
self.assertEqual(
jsbeautifier.beautify(expectation, self.options), expectation)
def wrap(self, text):
return self.wrapregex.sub(' \\1', text)
def bt(self, input, expectation=None):
expectation = expectation or input
self.decodesto(input, expectation)
if self.options.indent_size == 4 and input:
wrapped_input = '{\n%s\nfoo=bar;}' % self.wrap(input)
wrapped_expect = '{\n%s\n foo = bar;\n}' % self.wrap(expectation)
self.decodesto(wrapped_input, wrapped_expect)
@classmethod
def setUpClass(cls):
options = jsbeautifier.default_options()
options.indent_size = 4
options.indent_char = ' '
options.preserve_newlines = True
options.jslint_happy = False
options.keep_array_indentation = False
options.brace_style = 'collapse'
options.indent_level = 0
options.break_chained_methods = False
cls.options = options
cls.wrapregex = re.compile('^(.+)$', re.MULTILINE)
if __name__ == '__main__':
unittest.main()
|
JT5D/Alfred-Popclip-Sublime
|
Sublime Text 2/JsFormat/libs/jsbeautifier/tests/testjsbeautifier.py
|
Python
|
gpl-2.0
| 64,176
|
# misc.py
# Copyright (C) 2012-2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
"""
Assorted utility functions for yum.
"""
from __future__ import print_function, absolute_import
from __future__ import unicode_literals
from dnf.pycomp import base64_decodebytes, basestring, unicode
from stat import *
import libdnf.utils
import dnf.const
import dnf.crypto
import dnf.exceptions
import dnf.i18n
import errno
import glob
import io
import os
import os.path
import pwd
import re
import shutil
import tempfile
_default_checksums = ['sha256']
_re_compiled_glob_match = None
def re_glob(s):
""" Tests if a string is a shell wildcard. """
global _re_compiled_glob_match
if _re_compiled_glob_match is None:
_re_compiled_glob_match = re.compile(r'[*?]|\[.+\]').search
return _re_compiled_glob_match(s)
_re_compiled_full_match = None
def re_full_search_needed(s):
""" Tests if a string needs a full nevra match, instead of just name. """
global _re_compiled_full_match
if _re_compiled_full_match is None:
# A glob, or a "." or "-" separator, followed by something (the ".")
one = re.compile(r'.*([-.*?]|\[.+\]).').match
# Any epoch, for envra
two = re.compile('[0-9]+:').match
_re_compiled_full_match = (one, two)
for rec in _re_compiled_full_match:
if rec(s):
return True
return False
def get_default_chksum_type():
return _default_checksums[0]
class GenericHolder(object):
"""Generic Holder class used to hold other objects of known types
It exists purely to be able to do object.somestuff, object.someotherstuff
or object[key] and pass object to another function that will
understand it"""
def __init__(self, iter=None):
self.__iter = iter
def __iter__(self):
if self.__iter is not None:
return iter(self[self.__iter])
def __getitem__(self, item):
if hasattr(self, item):
return getattr(self, item)
else:
raise KeyError(item)
def all_lists(self):
"""Return a dictionary of all lists."""
return {key: list_ for key, list_ in vars(self).items()
if type(list_) is list}
def merge_lists(self, other):
""" Concatenate the list attributes from 'other' to ours. """
for (key, val) in other.all_lists().items():
vars(self).setdefault(key, []).extend(val)
return self
def procgpgkey(rawkey):
'''Convert ASCII-armored GPG key to binary
'''
# Normalise newlines
rawkey = re.sub(b'\r\n?', b'\n', rawkey)
# Extract block
block = io.BytesIO()
inblock = 0
pastheaders = 0
for line in rawkey.split(b'\n'):
if line.startswith(b'-----BEGIN PGP PUBLIC KEY BLOCK-----'):
inblock = 1
elif inblock and line.strip() == b'':
pastheaders = 1
elif inblock and line.startswith(b'-----END PGP PUBLIC KEY BLOCK-----'):
# Hit the end of the block, get out
break
elif pastheaders and line.startswith(b'='):
# Hit the CRC line, don't include this and stop
break
elif pastheaders:
block.write(line + b'\n')
# Decode and return
return base64_decodebytes(block.getvalue())
def keyInstalled(ts, keyid, timestamp):
'''
Return if the GPG key described by the given keyid and timestamp are
installed in the rpmdb.
The keyid and timestamp should both be passed as integers.
The ts is an rpm transaction set object
Return values:
- -1 key is not installed
- 0 key with matching ID and timestamp is installed
- 1 key with matching ID is installed but has an older timestamp
- 2 key with matching ID is installed but has a newer timestamp
No effort is made to handle duplicates. The first matching keyid is used to
calculate the return result.
'''
# Search
for hdr in ts.dbMatch('name', 'gpg-pubkey'):
if hdr['version'] == keyid:
installedts = int(hdr['release'], 16)
if installedts == timestamp:
return 0
elif installedts < timestamp:
return 1
else:
return 2
return -1
def import_key_to_pubring(rawkey, keyid, gpgdir=None, make_ro_copy=True):
if not os.path.exists(gpgdir):
os.makedirs(gpgdir)
with dnf.crypto.pubring_dir(gpgdir), dnf.crypto.Context() as ctx:
# import the key
with open(os.path.join(gpgdir, 'gpg.conf'), 'wb') as fp:
fp.write(b'')
ctx.op_import(rawkey)
if make_ro_copy:
rodir = gpgdir + '-ro'
if not os.path.exists(rodir):
os.makedirs(rodir, mode=0o755)
for f in glob.glob(gpgdir + '/*'):
basename = os.path.basename(f)
ro_f = rodir + '/' + basename
shutil.copy(f, ro_f)
os.chmod(ro_f, 0o755)
# yes it is this stupid, why do you ask?
opts = """lock-never
no-auto-check-trustdb
trust-model direct
no-expensive-trust-checks
no-permission-warning
preserve-permissions
"""
with open(os.path.join(rodir, 'gpg.conf'), 'w', 0o755) as fp:
fp.write(opts)
return True
def getCacheDir():
"""return a path to a valid and safe cachedir - only used when not running
as root or when --tempcache is set"""
uid = os.geteuid()
try:
usertup = pwd.getpwuid(uid)
username = dnf.i18n.ucd(usertup[0])
prefix = '%s-%s-' % (dnf.const.PREFIX, username)
except KeyError:
prefix = '%s-%s-' % (dnf.const.PREFIX, uid)
# check for /var/tmp/prefix-* -
dirpath = '%s/%s*' % (dnf.const.TMPDIR, prefix)
cachedirs = sorted(glob.glob(dirpath))
for thisdir in cachedirs:
stats = os.lstat(thisdir)
if S_ISDIR(stats[0]) and S_IMODE(stats[0]) == 448 and stats[4] == uid:
return thisdir
# make the dir (tempfile.mkdtemp())
cachedir = tempfile.mkdtemp(prefix=prefix, dir=dnf.const.TMPDIR)
return cachedir
def seq_max_split(seq, max_entries):
""" Given a seq, split into a list of lists of length max_entries each. """
ret = []
num = len(seq)
seq = list(seq) # Trying to use a set/etc. here is bad
beg = 0
while num > max_entries:
end = beg + max_entries
ret.append(seq[beg:end])
beg += max_entries
num -= max_entries
ret.append(seq[beg:])
return ret
def unlink_f(filename):
""" Call os.unlink, but don't die if the file isn't there. This is the main
difference between "rm -f" and plain "rm". """
try:
os.unlink(filename)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def stat_f(filename, ignore_EACCES=False):
""" Call os.stat(), but don't die if the file isn't there. Returns None. """
try:
return os.stat(filename)
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
return None
if ignore_EACCES and e.errno == errno.EACCES:
return None
raise
def _getloginuid():
""" Get the audit-uid/login-uid, if available. os.getuid() is returned
instead if there was a problem. Note that no caching is done here. """
# We might normally call audit.audit_getloginuid(), except that requires
# importing all of the audit module. And it doesn't work anyway: BZ 518721
try:
with open("/proc/self/loginuid") as fo:
data = fo.read()
return int(data)
except (IOError, ValueError):
return os.getuid()
_cached_getloginuid = None
def getloginuid():
""" Get the audit-uid/login-uid, if available. os.getuid() is returned
instead if there was a problem. The value is cached, so you don't
have to save it. """
global _cached_getloginuid
if _cached_getloginuid is None:
_cached_getloginuid = _getloginuid()
return _cached_getloginuid
def decompress(filename, dest=None, check_timestamps=False):
"""take a filename and decompress it into the same relative location.
When the compression type is not recognized (or file is not compressed),
the content of the file is copied to the destination"""
if dest:
out = dest
else:
out = None
dot_pos = filename.rfind('.')
if dot_pos > 0:
ext = filename[dot_pos:]
if ext in ('.zck', '.xz', '.bz2', '.gz', '.lzma', '.zst'):
out = filename[:dot_pos]
if out is None:
raise dnf.exceptions.MiscError("Could not determine destination filename")
if check_timestamps:
fi = stat_f(filename)
fo = stat_f(out)
if fi and fo and fo.st_mtime == fi.st_mtime:
return out
try:
# libdnf.utils.decompress either decompress file to the destination or
# copy the content if the compression type is not recognized
libdnf.utils.decompress(filename, out, 0o644)
except RuntimeError as e:
raise dnf.exceptions.MiscError(str(e))
if check_timestamps and fi:
os.utime(out, (fi.st_mtime, fi.st_mtime))
return out
def read_in_items_from_dot_dir(thisglob, line_as_list=True):
""" Takes a glob of a dir (like /etc/foo.d/\\*.foo) returns a list of all
the lines in all the files matching that glob, ignores comments and blank
lines, optional paramater 'line_as_list tells whether to treat each line
as a space or comma-separated list, defaults to True.
"""
results = []
for fname in glob.glob(thisglob):
with open(fname) as f:
for line in f:
if re.match(r'\s*(#|$)', line):
continue
line = line.rstrip() # no more trailing \n's
line = line.lstrip() # be nice
if not line:
continue
if line_as_list:
line = line.replace('\n', ' ')
line = line.replace(',', ' ')
results.extend(line.split())
continue
results.append(line)
return results
|
rpm-software-management/dnf
|
dnf/yum/misc.py
|
Python
|
gpl-2.0
| 11,251
|
#!/usr/bin/python
import os
import sys
import math
import glob
import sql
import process_xls as p_xls
""" Change to whatever is needed. """
DEFAULT_DATE_STR = ''
DB_NAME = 'trost_prod'
TABLE_NAME = 'plants2'
TABLE = [
'id INT(11) AUTO_INCREMENT',
'aliquot INT(11)',
'name VARCHAR(45)',
'subspecies_id INT(11)',
'location_id INT(11)',
'culture_id INT(11)',
'sampleid INT(11)',
'description TEXT',
'created DATETIME',
'PRIMARY KEY(id)']
columns_d = {
'Aliquot_Id': (0, 'aliquot', int),
'Name': (1, 'name', str),
'Subspecies_Id': (2, 'subspecies_id', int),
'Location_Id': (3, 'location_id', int),
'Culture_Id': (4, 'culture_id', int),
'Sample_Id': (5, 'sampleid', int),
'Description': (6, 'description', str),
'created': (7, 'created', str)}
###
def main(argv):
if len(argv) == 0:
sys.stderr.write('Missing input file.\nUsage: python create_plants2table.py <dir>\n')
sys.exit(1)
sql.write_sql_header(DB_NAME, TABLE_NAME, TABLE)
dir_name = argv[0]
fn = '%s/%s' % (dir_name, 'current_plants.xls')
data, headers = p_xls.read_xls_data(fn)
"""
Some plants do not have a subspecies id - causing trouble
further downstream. Hence, I
inserted dummy into subspecies table:
insert into subspecies values(NULL, -1, 1, 'UNKNOWN', NULL, NULL,
NULL, NULL);
"""
for dobj in data:
dobj.created = DEFAULT_DATE_STR
if dobj.Subspecies_Id == '':
dobj.Subspecies_Id = -1
"""
Table writing logic is specific to this table,
therefore it does not use sql.write_sql_table.
"""
for row in sql.prepare_sql_table(data, columns_d):
# print row
try:
"""
This adds the required values for subspecies.limsid
and locations.limsid to the insert query.
TODO: culture-id!, possibly sample-id!
"""
entry = [x[2](x[3])
for x in row[1:3] + row[5:7]]
entry += (int(row[3][3]), int(row[4][3]))
entry = tuple(entry)
sys.stdout.write('%s\n' % (sql.INSERT_PLANTS2_STR % entry))
except:
sys.stderr.write('EXC: %s\n' % row)
sys.exit(1)
return None
if __name__ == '__main__': main(sys.argv[1:])
|
ingkebil/trost
|
scripts/create_tables/create_plants2table.py
|
Python
|
gpl-2.0
| 2,360
|
#!/usr/bin/python
'''
*
* Copyright (C) 2013 Simone Denei <simone.denei@gmail.com>
*
* This file is part of pyrsyncgui.
*
* pyrsyncgui is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* pyrsyncgui is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with pyrsyncgui. If not, see <http://www.gnu.org/licenses/>.
*
'''
import PyQt4.QtCore as core
import PyQt4.QtGui as gui
import PyQt4.uic as uic
from PyQt4.QtCore import pyqtSlot
from PyQt4.QtCore import pyqtSignal
import sys
import os
import inspect
from os import path, environ
import configuration as conf
import dispatcher as disp
from addsyncwizard import addsyncwizard
from managesyncwizard import managesyncwizard
from configurationwizard import configurationwizard
from backupengine import backupengine
import platform
#######################################
# Configuration format #
#######################################
#
# GUI Configuration
#
# PollingTime Number - seconds between each server check
# RsyncCmd String - path to the rsync command
#
# Server Configuration
#
# servername - Dictionary key identifying the server entry
# Address String
# Username (String,None)
# Password (String,None)
# Transportation int - 0 Daemon, 1 ssh
#
# Sync Configuration
#
# syncname - Dictionary key identifying the sync entry
# Server (String,None) if is None it means that the destination is an harddisk
# DiskID (String,None)
# Source String
# Enabled bool
# Destination String
# NeedsUpdate bool
# Username (String,None)
# Password (String,None)
#
# ServerStatus 0 Disconnected, 1 Partial connected, 2 Connected
# BackupStatus 0 Updated, 1 Updating, 2 Paused
class pyrsyncgui(gui.QWidget):
updateGraphics = pyqtSignal()
updateInfo = pyqtSignal('QString')
def __init__(self,app):
gui.QWidget.__init__(self)
self.icons={}
self.icons['Disconnected'] = gui.QIcon('img/disconnected.svg')
self.icons['ConnectedUpdated'] = gui.QIcon('img/connectedupdated.svg')
self.icons['ConnectedUpdating'] = (gui.QIcon('img/connectedupdating1.svg'),gui.QIcon('img/connectedupdating2.svg'))
self.icons['PartialConnectedUpdating'] = (gui.QIcon('img/partialconnectedupdating1.svg'),gui.QIcon('img/partialconnectedupdating2.svg'))
self.icons['PartialConnectedUpdated'] = gui.QIcon('img/partialconnectedupdated.svg')
self.progress = gui.QMovie('img/progress.gif')
self.tray = gui.QSystemTrayIcon(self.icons['Disconnected'])
self.app = app
#Loading configuration files
cfgfile = os.path.join(self.configDir(),'.pyrsyncgui')
servcfgfile = os.path.join(self.configDir(),'.pyrsyncgui.server')
schedcfgfile = os.path.join(self.configDir(),'.pyrsyncgui.sync')
self.config = conf.configuration(cfgfile)
if len(self.config) == 0:
self.defaultconfig()
self.serverconfig = conf.configuration(servcfgfile)
if len(self.serverconfig) == 0:
self.defaultserverconfig()
self.schedconfig = conf.configuration(schedcfgfile)
if len(self.schedconfig) == 0:
self.defaultschedconfig()
self.window = uic.loadUi('gui.ui')
self.connect(self.window.AddSyncButton, core.SIGNAL('clicked()'), self, core.SLOT('addSync()'))
self.connect(self.window.ManageButton, core.SIGNAL('clicked()'), self, core.SLOT('manageSync()'))
self.connect(self.window.ConfigButton, core.SIGNAL('clicked()'), self, core.SLOT('config()'))
disp.register('SyncProgress',self.syncprogressupdate)
disp.register('InfoMsg',self.infomsg)
self.serverstatus = 0
self.backupstatus = 0
disp.register('ServerStatus',self.__serverStatusUpdate)
disp.register('BackupStatus',self.__backupStatusUpdate)
self.bckeng = backupengine(self.config,self.serverconfig,self.schedconfig)
self.app.aboutToQuit.connect(self.cleanup)
self.menu = gui.QMenu()
self.menu.addAction('Pause Backups',self.__pauseBackups)
self.menu.addSeparator()
self.menu.addAction('Exit', self.quit)
self.tray.activated.connect(self.__trayActivated)
self.tray.setContextMenu(self.menu)
self.tray.show()
self.window.closeEvent = self.closing
self.window.ServerVerifyButton.raise_()
self.window.ServerVerifyInProgress.setMovie(self.progress)
self.window.ServerProceedButton.raise_()
self.window.ServerProceedInProgress.setMovie(self.progress)
self.progress.start()
self.currentwizard=None
self.updateGraphics.connect(self.__updateGraphics)
self.updateInfo.connect(self.__updateInfo)
self.bckeng.start()
self.window.InfoLabel.setText(' Welcome to pyrsyncgui!')
def configDir(self):
if platform.system() == 'Windows':
appdata = path.join(environ['APPDATA'], 'pyrsyncgui')
else:
appdata = path.expanduser(path.join('~', '.config', 'pyrsyncgui'))
if not os.path.exists(appdata):
os.makedirs(appdata)
return appdata
def quit(self):
self.tray.hide()
self.window.hide()
self.close()
self.app.quit()
def closing(self, event):
event.ignore()
self.config.save()
self.schedconfig.save()
self.serverconfig.save()
self.window.hide()
if self.currentwizard != None:
self.currentwizard.stop()
def cleanup(self):
print('Closing')
self.bckeng.stop()
self.config.save()
self.schedconfig.save()
self.serverconfig.save()
@pyqtSlot()
def addSync(self):
self.currentwizard = addsyncwizard(self.window, self.config, self.serverconfig,self.schedconfig)
self.currentwizard.start()
@pyqtSlot()
def manageSync(self):
self.currentwizard = managesyncwizard(self.window, self.config, self.serverconfig,self.schedconfig)
self.currentwizard.start()
@pyqtSlot()
def config(self):
self.currentwizard = configurationwizard(self.window, self.config)
self.currentwizard.start()
def infomsg(self,msg):
self.updateInfo.emit(msg)
def syncprogressupdate(self,msg):
filename = msg['CurrentFile'][msg['CurrentFile'].rfind('/')+1:]
if len(filename) > 20:
filename = filename[0:17]+'...'
self.window.InfoLabel.setText(' Sync: '+filename+' '+str(msg['FilePercentage'])+'%, '+ str(msg['OverallPercentage'])+'%')
self.tray.setToolTip(' Sync: '+filename+'\nProgress: '+str(msg['FilePercentage'])+'%, '+ str(msg['OverallPercentage'])+'%')
def defaultconfig(self):
self.config['PollingTime'] = 1
if platform.system() == 'Windows':
self.config['RsyncCmd'] = '.\\bin\\rsync'
elif platform.system() == 'Linux':
self.config['RsyncCmd'] = 'rsync'
self.config['SshCmd'] = 'ssh'
def defaultschedconfig(self):
pass
def defaultserverconfig(self):
pass
def changeState(self,state):
pass
@pyqtSlot('QString')
def __updateInfo(self,text):
self.window.InfoLabel.setText(' '+text)
@pyqtSlot()
def __updateGraphics(self):
if self.serverstatus == 0:
self.tray.setIcon(self.icons['Disconnected'])
elif self.serverstatus == 1:
if self.backupstatus == 0:
self.tray.setIcon(self.icons['PartialConnectedUpdated'])
else:
self.tray.setIcon(self.icons['PartialConnectedUpdating'][0])
else:
if self.backupstatus == 0:
self.tray.setIcon(self.icons['ConnectedUpdated'])
else:
self.tray.setIcon(self.icons['ConnectedUpdating'][0])
@pyqtSlot('QSystemTrayIcon::ActivationReason')
def __trayActivated(self,event):
if event == gui.QSystemTrayIcon.Trigger:
if len(self.schedconfig) == 0:
self.window.ManageButton.setEnabled(False)
else:
self.window.ManageButton.setEnabled(True)
self.window.setVisible(not self.window.isVisible())
def __serverStatusUpdate(self,msg):
self.serverstatus = msg
self.updateGraphics.emit()
def __backupStatusUpdate(self,msg):
self.backupstatus = msg
self.updateGraphics.emit()
def __pauseBackups(self):
self.bckeng.pause()
def main(argv=sys.argv):
os.chdir(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))) # script directory
a = gui.QApplication( sys.argv )
k = pyrsyncgui(a)
a.setActiveWindow(k)
a.exec_()
if __name__ == '__main__':
sys.exit(main())
|
m4tto/pyrsyncgui
|
pyrsyncgui.py
|
Python
|
gpl-2.0
| 8,640
|
#! /usr/bin/env python
###############################################################################
# This file is part of openWNS (open Wireless Network Simulator)
# _____________________________________________________________________________
#
# Copyright (C) 2004-2007
# Chair of Communication Networks (ComNets)
# Kopernikusstr. 16, D-52074 Aachen, Germany
# phone: ++49-241-80-27910,
# fax: ++49-241-80-22242
# email: info@openwns.org
# www: http://www.openwns.org
# _____________________________________________________________________________
#
# openWNS is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License version 2 as published by the
# Free Software Foundation;
#
# openWNS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import pwd
import sys
import shutil
import subprocess
import optparse
import re
import datetime
import time
import openwns.wrowser.Configuration as conf
import openwns.wrowser.simdb.Database as db
import openwns.wrowser.simdb.Parameters as params
import openwns.wrowser.simdb.ProbeDB
import openwns.wrowser.Tools
config = conf.Configuration()
config.read('.campaign.conf')
db.Database.connectConf(config)
def getWrowserDir():
for cand in sys.path:
if os.path.isdir(os.path.join(cand, 'openwns', 'wrowser')):
return cand
return None
def __getFilteredScenarioIds(cursor, stateSpecial = None):
query = 'SELECT id FROM scenarios WHERE campaign_id = %d' % config.campaignId
if(options.state is not None):
if('state' in options.state):
query += ('AND ( %s )' % options.state)
else:
query += (' AND state = \'%s\'' % options.state)
if(stateSpecial is not None):
query += (' AND %s' % stateSpecial)
cursor.execute(query)
scenarioIds = [ entry[0] for entry in cursor.fetchall() ]
if(options.expression is not None):
scenarioIds = openwns.wrowser.Tools.objectFilter(options.expression, scenarioIds, viewGetter=__parametersDict)
scenarioIds.sort()
return scenarioIds
def createDatabase(arg = 'unused'):
subprocess.call(['python ./campaignConfiguration.py'], shell = True)
print 'Database entries successfully created.'
def createScenarios(arg = 'unused'):
cursor = db.Database.getCursor()
scenarioIds = __getFilteredScenarioIds(cursor)
cursor.connection.commit()
wdir = getWrowserDir()
if wdir is None:
print "ERROR: Cannot find Wrowser directory! Exiting..."
return
for scenario in scenarioIds:
simId = str(scenario)
simPath = os.path.abspath(os.path.join(os.getcwd(), simId))
if os.path.exists(simPath):
if options.forceOverwrite:
shutil.rmtree(simPath)
else:
print "Skipping %s, it already exists (consider --force switch)" % simPath
continue
os.mkdir(simPath)
os.symlink(os.path.join('..', '..', 'sandbox', options.flavor, 'bin', 'openwns'), os.path.join(simPath, 'openwns'))
if options.flavor == 'opt':
os.symlink(os.path.join('..', '..', 'sandbox', 'dbg', 'bin', 'openwns'), os.path.join(simPath, 'openwns-dbg'))
os.symlink(os.path.join(wdir, 'openwns', 'wrowser', 'simdb', 'SimConfig.py'),
os.path.join(simPath, 'SimConfig.py'))
os.symlink(os.path.join('..', '.campaign.conf'), os.path.join(simPath, '.campaign.conf'))
for f in os.listdir(os.getcwd()):
if f.endswith('.py') or f.endswith('.probes') or f.endswith('.ini'):
if not f == 'simcontrol.py' and not f == 'campaignConfiguration.py' and not f == 'ProbeDB.py':
os.symlink(os.path.join('..', f), os.path.join(simPath, f))
if not os.path.exists(os.path.join(os.getcwd(), 'ProbeDB.py')):
os.symlink(os.path.join(wdir, 'openwns', 'wrowser', 'simdb', 'ProbeDB.py'),
os.path.join(os.getcwd(), 'ProbeDB.py'))
print 'Scenarios successfully created.'
def removeDatabase(arg = 'unused'):
db.Database.truncateCampaign(config.campaignId)
print 'Campaign results successfully removed from database.'
def removeScenarios(arg = 'unused'):
cursor = db.Database.getCursor()
scenarioIds = __getFilteredScenarioIds(cursor)
cursor.connection.commit()
for scenarioId in scenarioIds:
simPath = os.path.abspath(os.path.join(os.getcwd(), str(scenarioId)))
if os.path.exists(simPath):
shutil.rmtree(simPath)
print 'Scenarios successfully removed.'
def __submitJob(scenarioId):
cursor = db.Database.getCursor()
cursor.execute('SELECT state FROM scenarios WHERE id = %d AND campaign_id = %d' % (scenarioId, config.campaignId))
state = cursor.fetchone()[0]
if state == 'Queued':
print >>sys.stderr, 'ERROR: Job is already in queue'
elif state == 'Running':
print >>sys.stderr, 'ERROR: Job is currently running'
simId = str(scenarioId)
simPath = os.path.abspath(os.path.join(os.getcwd(), simId))
if simPath.startswith('/local'):
raise Exception('\n\nYour current dir starts with "/local/...". You must chdir to /net/<hostname>/.... Otherwise your simulations will fail.\n')
print 'Submitting job with scenario id ' + simId
command = os.path.abspath(os.path.join('..', 'sim.py')) + ' -p ' + os.path.abspath(os.getcwd()) + ' -i ' + simId
if options.skipNullTrials == True:
command += ' -n'
process = subprocess.Popen(['qsub -q %s -N job%s -l s_cpu=%i:%i:00 -l h_cpu=%i:%i:00 -o %s -e %s -m a -M %s@comnets.rwth-aachen.de %s' % (options.queue,
simId,
options.cpuTime,
options.cpuMinutes,
options.cpuTime,
options.cpuMinutes + 15,
os.path.join(simPath, 'stdout'),
os.path.join(simPath, 'stderr'),
pwd.getpwuid(os.getuid())[0],
command)],
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT,
shell = True)
status = process.wait()
if not status == 0:
print >>sys.stderr, 'ERROR: qsub failed!'
print >>sys.stderr, process.stdout.read()
sys.exit(1)
state = 'Queued'
startDate = None
stopDate = None
hostname = None
try:
jobId = int(process.stdout.read().split()[2])
except:
print >>sys.stderr, 'ERROR: Could not get job id. Output of qsub has probably changed'
sys.exit(1)
cursor.execute('UPDATE scenarios SET state = \'Queued\', max_sim_time = 0.0, current_sim_time = 0.0, sim_time_last_write = 0.0 WHERE id = %d AND campaign_id = %d' % (scenarioId, config.campaignId))
cursor.execute('INSERT INTO jobs (campaign_id, scenario_id, sge_job_id, queue_date, start_date, stop_date, hostname, stdout, stderr) VALUES ' \
'(%d, %d, %d, \'%s\', \'1900-01-01\' , \'1900-01-01\', \'\', \'\', \'\')' % (config.campaignId, scenarioId, jobId, datetime.datetime.today().isoformat()))
cursor.connection.commit()
def queueSingleScenario(scenarioId):
cursor = db.Database.getCursor()
cursor.execute('SELECT state FROM scenarios WHERE campaign_id = %d AND id = %d' % (config.campaignId, scenarioId))
state = cursor.fetchone()[0]
cursor.connection.commit()
if state == 'Queued' or state == 'Running':
print >>sys.stderr, 'Job already queued/running.'
sys.exit(1)
__submitJob(scenarioId)
def __parametersDict(scenarioId):
cursor = db.Database.getCursor()
cursor.execute('SELECT state FROM scenarios WHERE campaign_id = %d AND id = %d' % (config.campaignId, scenarioId))
state = cursor.fetchone()[0]
cursor.connection.commit()
p = params.Parameters()
parameters = dict([[paramName, param.getValue()] for paramName, param in p.read(scenarioId).items()])
parameters['state'] = state
parameters['id'] = scenarioId
return parameters
def queueScenarios(stringexpression):
if(options.state == 'Queued' or options.state == 'Running'):
print >> sys.stderr, 'Cannot queue jobs which are already queue/running.'
sys.exit(1)
cursor = db.Database.getCursor()
scenarioIds = __getFilteredScenarioIds(cursor, stateSpecial = "state != \'Queued\' AND state != \'Running\'")
cursor.connection.commit()
if len(scenarioIds) < 1:
print >>sys.stderr, 'No scenarios found matching expression\n', options.expression
print >>sys.stderr, 'and state\n', options.state
sys.exit(1)
for scenarioId in scenarioIds:
__submitJob(scenarioId)
def requeueCrashedScenarios(arg = 'unused'):
if(options.state is not None):
print >> sys.stderr, 'Cannot filter by scenario state when requeuing crashed scenarios.'
sys.exit(1)
cursor = db.Database.getCursor()
scenarioIds = __getFilteredScenarioIds(cursor, stateSpecial = "state = \'Crashed\'")
cursor.connection.commit()
for scenarioId in scenarioIds:
# remove results from previous simulation runs
openwns.wrowser.simdb.ProbeDB.removeAllProbesFromDB(scenarioId = scenarioId)
__submitJob(scenarioId)
def __deleteJob(scenarioId):
cursor = db.Database.getCursor()
cursor.execute('SELECT state, current_job_id FROM scenarios WHERE campaign_id = %d AND id = %d' % (config.campaignId, scenarioId))
(state, sgeJobId) = cursor.fetchone()
simId = str(scenarioId)
if state.strip() != 'Running' and state.strip() != 'Queued':
print >>sys.stderr, 'Job is not queued/running.'
print 'Deleting job with scenario id ' + simId
process = subprocess.Popen(['qdel ' + str(sgeJobId)],
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT,
shell = True)
status = process.wait()
if not status == 0:
print >>sys.stderr, 'ERROR: qdel failed!'
print >>sys.stderr, process.stdout.read()
sys.exit(1)
if state == 'Running':
stopDate = datetime.datetime.today()
state = 'Aborted'
elif state == 'Queued':
state = 'NotQueued'
cursor.execute('UPDATE scenarios SET state = \'%s\', current_job_id = 0 WHERE campaign_id = %d AND id = %d' % (state, config.campaignId, scenarioId))
cursor.execute('UPDATE jobs SET stop_date = \'%s\' WHERE sge_job_id = %d' % (datetime.datetime.today().isoformat(), sgeJobId))
cursor.connection.commit()
def dequeueScenarios(arg = 'unused'):
if(options.state == 'NotQueued'):
print >> sys.stderr, 'Cannot dequeue jobs which are already dequeued'
sys.exit(1)
cursor = db.Database.getCursor()
scenarioIds = __getFilteredScenarioIds(cursor, stateSpecial = "(state = \'Queued\' OR state = \'Running\')")
cursor.connection.commit()
for scenarioId in scenarioIds:
__deleteJob(scenarioId)
def consistencyCheck(arg = 'unused'):
cursor = db.Database.getCursor()
cursor.execute('SELECT id, current_job_id, state FROM scenarios WHERE campaign_id = %d AND current_job_id != 0' % config.campaignId)
sqlResults = cursor.fetchall()
for scenario, sgeJobId, state in sqlResults:
tmp = os.tmpfile()
status = subprocess.call(['qstat -j %i' % sgeJobId],
shell = True,
stderr = subprocess.STDOUT,
stdout = tmp)
tmp.seek(0)
if status != 0 and state in ['Running', 'Queued'] and 'Following jobs do not exist' in tmp.read(30):
cursor.execute('UPDATE scenarios SET state=\'Crashed\' WHERE campaign_id=%d AND id=%d' % (config.campaignId, scenario))
cursor.execute('UPDATE jobs SET stderr =\'Consistency check failed. Simulation has crashed!\' WHERE sge_job_id = %d' % sgeJobId)
stderrFile = file(os.path.join(os.getcwd(), str(scenario), 'stderr'), 'a')
stderrFile.write('Consistency check failed. Simulation has crashed!')
stderrFile.close()
cursor.connection.commit()
def __getSimTime(fileName):
currentSimTimeExpression = re.compile('.*Simulation time: ([0-9.]*).*')
maxSimTimeExpression = re.compile('.*Max. simulation time: ([0-9.]*).*')
try:
f = file(fileName)
except:
return None, None
cst = 0
progress = 0
for line in f:
currentSimTime = currentSimTimeExpression.match(line)
maxSimTime = maxSimTimeExpression.match(line)
if currentSimTime is not None:
cstMatch, = currentSimTime.groups(1)
cst = float(cstMatch)
if maxSimTime is not None:
mstMatch, = maxSimTime.groups(1)
mst = float(mstMatch)
if not mst == 0:
progress = cst / mst * 100
cstStr = '%.2fs' % cst
progressStr = '%.2f%%' % progress
return cstStr, progressStr
def jobInfo(arg = 'unused'):
parameters = params.Parameters()
parameterNames = parameters.parameterSet.keys()
parameterNames.sort()
campaignParameters = parameters.readAllScenarios()
title = ' id state start stop duration simTime prog sgeId host '
parameterWidth = {}
for parameterName in parameterNames:
lengthAllValues = [len(parameterName)]
for scenarioId, parameters in campaignParameters.items():
lengthAllValues.append(len(str(parameters[parameterName].getValue())))
parameterWidth[parameterName] = max(lengthAllValues) + 2
title += parameterName.center(parameterWidth[parameterName])
print title
cursor = db.Database.getCursor()
if(options.expression is not None):
campaignIds = openwns.wrowser.Tools.objectFilter(options.expression, campaignParameters.keys(), viewGetter = __parametersDict)
else:
campaignIds = campaignParameters.keys()
for scenarioId in sorted(campaignIds):
cursor.execute('SELECT state, current_job_id FROM scenarios WHERE scenarios.campaign_id = %d AND scenarios.id = %d' % (config.campaignId, scenarioId))
(state, sgeJobId) = cursor.fetchone()
if((options.state is not None) and (options.state != state)):
continue
startDate = stopDate = datetime.datetime(1900, 1, 1)
hostname = None
if sgeJobId != 0:
cursor.execute('SELECT start_date, stop_date, hostname FROM jobs WHERE sge_job_id = %d' % sgeJobId)
(startDate, stopDate, hostname) = cursor.fetchone()
line = str(scenarioId).rjust(3) + ' ' + state.center(10)
if not startDate.year == 1900:
line += startDate.strftime('%d.%m.%y %H:%M:%S').center(20)
else:
line += str().center(20)
if not stopDate.year == 1900:
line += stopDate.strftime('%d.%m.%y %H:%M:%S').center(20)
else:
line += str().center(20)
if not startDate.year == 1900:
if not stopDate.year == 1900:
duration = stopDate - startDate
else:
duration = datetime.datetime.now() - startDate
line += str(duration).split('.')[0].rjust(17)
else:
line += str().rjust(17)
simTime, progress = __getSimTime(os.path.join(os.getcwd(), str(scenarioId), 'output', 'WNSStatus.dat'))
if not simTime == None:
line += simTime.rjust(8)
line += progress.rjust(9)
else:
line += str().center(17)
if not sgeJobId == 0:
line += str(sgeJobId).rjust(7)
else:
line += str().rjust(7)
if not hostname == None:
line += hostname.center(17)
else:
line += str().center(17)
for parameter in parameterNames:
value = str(campaignParameters[scenarioId][parameter].getValue())
if not value == None:
line += str(value).center(parameterWidth[parameter])
else:
line += str().center(parameterWidth[parameter])
print line
cursor.connection.commit()
def executeLocally(expression):
cursor = db.Database.getCursor()
scenarioIds = __getFilteredScenarioIds(cursor, stateSpecial = "(state != \'Queued\' OR state != \'Running\')")
cursor.connection.commit()
for scenario in scenarioIds:
__execute(scenario)
def __execute(scenario):
print 'Executing scenario with id: %i' % scenario
stdout = open(os.path.join(os.getcwd(), str(scenario), 'stdout'), 'w')
stderr = open(os.path.join(os.getcwd(), str(scenario), 'stderr'), 'w')
process = subprocess.Popen(['nice -n 19 ../sim.py -p %s -i %i -f' % (os.getcwd(), scenario)],
stdout = stdout,
stderr = stderr,
shell = True)
process.wait()
stdout.close()
stderr.close()
class CommandQueue:
def __init__(self):
self.queue = []
def append(self, option, opt, arg, parser, command):
self.queue.append((command, arg))
def run(self):
for command, arg in self.queue:
command(arg)
queue = CommandQueue()
usage = 'usage: %prog [options]'
parser = optparse.OptionParser(usage)
parser.add_option('', '--create-database',
action = 'callback', callback = queue.append,
callback_args = (createDatabase,),
help = 'create scenario database')
parser.add_option('', '--create-scenarios',
action = 'callback', callback = queue.append,
callback_args = (createScenarios,),
help = 'create scenario folders')
parser.add_option('', '--flavor',
type = 'str', dest = 'flavor', default = 'opt',
help = 'chose flavor for simulation (default: opt)\n' +
'option may only be used together with \'--create-scenarios\'', metavar = 'FLAVOR')
parser.add_option('', '--remove-database',
action = 'callback', callback = queue.append,
callback_args = (removeDatabase,),
help = 'remove results from database')
parser.add_option('', '--remove-scenarios',
action = 'callback', callback = queue.append,
callback_args = (removeScenarios,),
help = 'remove scenario folders')
parser.add_option('-q', '--queue',
type = 'str', dest = 'queue', default = 'cqStadt',
help = 'chose queue for jobs (default: cqStadt)', metavar = 'QUEUE')
parser.add_option('-t', '--cpu-time',
type = 'int', dest = 'cpuTime', default = 100,
help = 'chose time for jobs in hours (default: 100h)', metavar = 'HOURS')
parser.add_option('', '--minutes',
type = 'int', dest = 'cpuMinutes', default = 0,
help = 'chose time for jobs in minutes (default: 0m), can be combined with --cpu-time', metavar = 'MINUTES')
parser.add_option('-f', '--force',
dest = 'forceOverwrite', default = False, action ='store_true',
help = 'force overwriting existing scenario subdirs')
parser.add_option('-n', '--skipNullTrials',
dest = 'skipNullTrials', default = False, action ='store_true',
help = 'skip importing probes with zero trials into database')
parser.add_option('', '--execute-locally',
action = 'callback', callback = queue.append,
callback_args = (executeLocally,),
help = 'executes scenarios on the local machine')
parser.add_option('', '--queue-scenarios',
action = 'callback', callback = queue.append,
callback_args = (queueScenarios,),
help = 'queue scenarios in the SGE')
parser.add_option('', '--queue-single-scenario',
type = "int", metavar = "SCENARIOID",
action = 'callback', callback = queue.append,
callback_args = (queueSingleScenario,),
help = 'queue scenario with id SCENARIOID (same as --queue-scenarios --restrict-state=\'id=SCENARIOID\'')
parser.add_option('', '--requeue-crashed-scenarios',
action = 'callback', callback = queue.append,
callback_args = (requeueCrashedScenarios,),
help = 'requeue all crashed scenarios')
parser.add_option('', '--dequeue-scenarios',
action = 'callback', callback = queue.append,
callback_args = (dequeueScenarios,),
help = 'dequeue scenarios')
parser.add_option('', '--consistency-check',
action = 'callback', callback = queue.append,
callback_args = (consistencyCheck,),
help = 'solve inconsistencies between the sge job database and the scenario database')
parser.add_option('-i', '--info',
action = 'callback', callback = queue.append,
callback_args = (jobInfo,),
help = 'show information about all jobs')
parser.add_option('', '--interval',
dest = 'interval',
type = 'int', metavar = 'INTERVAL',
action = 'store',
default = 0,
help = 'run command in endless loop with interval length in between')
parser.add_option('', '--restrict-state', dest = 'state', metavar = 'STATE',
help = 'restrict the action to all scenarios having state STATE.')
parser.add_option('', '--restrict-expression', dest = 'expression', metavar = 'EXPRESSION',
help = 'restrict the action to all scenarios matching the SQL-statement EXPRESSION')
options, args = parser.parse_args()
if len(args):
parser.print_help()
sys.exit(1)
if not len(queue.queue):
parser.print_help()
sys.exit(0)
if options.interval != 0:
while True:
print "Running command ..."
queue.run()
until = time.localtime(time.time()+options.interval)
print "Sleeping until %d:%d %d-%d-%d..." % (until[3], until[4], until[0], until[1], until[2])
time.sleep(options.interval)
else:
queue.run()
sys.exit(0)
|
openwns/wrowser
|
openwns/wrowser/playgroundPlugins/SimulationCampaign/simcontrol.py
|
Python
|
gpl-2.0
| 23,950
|
'''
Copyright (C) 2016 Quinn D Granfor <spootdev@gmail.com>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2, as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License version 2 for more details.
You should have received a copy of the GNU General Public License
version 2 along with this program; if not, write to the Free
Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
'''
from __future__ import absolute_import, division, print_function, unicode_literals
import logging # pylint: disable=W0611
import time
import sys
sys.path.append('.')
sys.path.append('../MediaKraken-PyLint') # for jenkins server
sys.path.append('../MediaKraken-PyLint/build_code/jenkins/')
import pipeline_packages_list
from common import common_network_ssh
from common import common_network_vm_proxmox
###
# Will be used to deploy ubuntu server
###
JENKINS_BUILD_VIM_LXC = 103
JENKINS_BUILD_VIM_LNX_IP = '10.0.0.90'
JENKINS_DEPLOY_VIM_LXC = 108
JENKINS_DEPLOY_VIM_LNX_IP = '10.0.0.101'
# create prox class instance to use
PROX_CONNECTION = common_network_vm_proxmox.CommonNetworkProxMox('10.0.0.190', 'root@pam',\
'jenkinsbuild')
# check status of ubuntu build vm
if PROX_CONNECTION.com_net_prox_node_lxc_status('pve',\
JENKINS_BUILD_VIM_LXC)['data']['status'] == 'stopped':
# start up the vm
PROX_CONNECTION.com_net_prox_node_lxc_start('pve', JENKINS_BUILD_VIM_LXC)
time.sleep(120) # wait two minutes for box to boot
# check status of ubuntu deploy vm
if PROX_CONNECTION.com_net_prox_node_lxc_status('pve',\
JENKINS_DEPLOY_VIM_LXC)['data']['status'] == 'stopped':
# start up the vm
PROX_CONNECTION.com_net_prox_node_lxc_start('pve', JENKINS_DEPLOY_VIM_LXC)
time.sleep(120) # wait two minutes for box to boot
# connect to server via ssh
SSH_DEPLOY = common_network_ssh.CommonNetworkSSH(JENKINS_DEPLOY_VIM_LNX_IP,\
'metaman', 'metaman')
SSH_BUILD = common_network_ssh.CommonNetworkSSH(JENKINS_BUILD_VIM_LNX_IP,\
'metaman', 'metaman')
# TODO rollback snap to base?
# setup directories needed for app
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/backups')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/bin')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/cache')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/conf')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/key')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/log')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/passwordmeter')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/passwordmeter/res')
SSH_DEPLOY.com_net_ssh_run_command('cd mediakraken')
# install servers deps
# way too many deps, so install ffmpeg to stomp over with compiled version
SSH_DEPLOY.com_net_ssh_run_sudo_command('sudo apt-get -y install postgresql ffmpeg'\
' libva-drm1 libva-x11-1 libsmbclient nfs-common nginx redis-server'\
' cifs-utils')
# libhdhomerun
# scp ffmpeg
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -o StrictHostKeyChecking=no /home/metaman/bin/ff*'\
' metaman@%s:/home/metaman/.' % JENKINS_DEPLOY_VIM_LNX_IP)
SSH_DEPLOY.com_net_ssh_run_sudo_command('sudo mv /home/metaman/ff* /usr/bin/.')
SSH_DEPLOY.com_net_ssh_run_sudo_command('sudo ldconfig')
# prep files to scp
SSH_BUILD.com_net_ssh_run_command('mkdir /home/metaman/dist/xfer')
SSH_BUILD.com_net_ssh_run_command('rm -Rf /home/metaman/dist/xfer/*')
# move all programs
for app_to_build in pipeline_packages_list.PIPELINE_APP_LIST:
SSH_BUILD.com_net_ssh_run_command('rsync -r /home/metaman/dist/' + app_to_build\
+ '/ /home/metaman/dist/xfer/.')
# scp actual programs
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -r -o StrictHostKeyChecking=no /home/metaman/dist/xfer/*'\
' metaman@%s:/home/metaman/mediakraken/.' % JENKINS_DEPLOY_VIM_LNX_IP)
# scp the password common
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -r -o StrictHostKeyChecking=no /home/metaman/MediaKraken_Submodules/passwordmeter/'\
'passwordmeter/res/common.txt'\
' metaman@%s:/home/metaman/mediakraken/passwordmeter/res/.' % JENKINS_DEPLOY_VIM_LNX_IP)
# copy over config files
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -o StrictHostKeyChecking=no /home/metaman/MediaKraken_Deployment/'\
'MediaKraken.ini metaman@%s:/home/metaman/mediakraken/.' % JENKINS_DEPLOY_VIM_LNX_IP)
# copy postgresl user file
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -o StrictHostKeyChecking=no /home/metaman/MediaKraken_Deployment/'\
'build_code/jenkins/pipeline-deploy-os/pipeline-deploy-os-server-pgsql-user-ubuntu.sh'\
' metaman@%s:/home/metaman/mediakraken/.' % JENKINS_DEPLOY_VIM_LNX_IP)
# create the postgresql user
SSH_DEPLOY.com_net_ssh_run_sudo_command('sudo /home/metaman/mediakraken/'\
'pipeline-deploy-os-server-ubuntu-pgsql-user.sh')
# remove user create script
SSH_DEPLOY.com_net_ssh_run_command('rm /home/metaman/mediakraken/'\
'pipeline-deploy-os-server-pgsql-user-ubuntu.sh')
# copy ffmpeg and libs
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -o StrictHostKeyChecking=no /home/metaman/bin/*'\
' metaman@%s:/home/metaman/mediakraken/bin/.' % JENKINS_DEPLOY_VIM_LNX_IP)
SSH_DEPLOY.com_net_ssh_close()
SSH_BUILD.com_net_ssh_close()
|
MediaKraken/mkarchive
|
pipeline-deploy-os-server-ubuntu.py
|
Python
|
gpl-2.0
| 5,822
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2018-05-25 02:21
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('itemreg', '0008_auto_20160828_2058'),
]
operations = [
migrations.AlterField(
model_name='calculatorregistration',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='computerregistration',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='phoneregistration',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
tjcsl/ion
|
intranet/apps/itemreg/migrations/0009_auto_20180524_2221.py
|
Python
|
gpl-2.0
| 1,071
|
#!/usr/bin/env python2.7
############################################################################
##
## Copyright (c) 2000-2015 BalaBit IT Ltd, Budapest, Hungary
## Copyright (c) 2015-2018 BalaSys IT Ltd, Budapest, Hungary
##
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License along
## with this program; if not, write to the Free Software Foundation, Inc.,
## 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
##
############################################################################
import unittest
from HandlerMock import HandlerMock
from zorpctl.szig import SZIG
class TestSzig(unittest.TestCase):
def setUp(self):
self.szig = SZIG("", HandlerMock)
def test_get_value(self):
self.assertEquals(self.szig.get_value(""), None)
self.assertEquals(self.szig.get_value("service"), None)
self.assertEquals(self.szig.get_value("info.policy.file"), "/etc/zorp/policy.py")
self.assertEquals(self.szig.get_value("stats.thread_number"), 5)
self.assertEquals(self.szig.get_value("service.service_http_transparent.sessions_running"), 0)
def test_get_sibling(self):
self.assertEquals(self.szig.get_sibling("conns"), "info")
self.assertEquals(self.szig.get_sibling("stats.threads_running"), "stats.thread_rate_max")
self.assertEquals(self.szig.get_sibling("stats.thread_rate_max"), "stats.audit_number")
self.assertEquals(self.szig.get_sibling("stats.thread_number"), None)
def test_get_child(self):
self.assertEquals(self.szig.get_child(""), "conns")
self.assertEquals(self.szig.get_child("info"), "info.policy")
self.assertEquals(self.szig.get_child("info.policy"), "info.policy.reload_stamp")
self.assertEquals(self.szig.get_child("info.policy.reload_stamp"), None)
def test_get_set_loglevel(self):
loglevel = 6
self.szig.loglevel = loglevel
self.assertEquals(self.szig.loglevel, loglevel)
def test_get_set_logspec(self):
logspec = "this is a logspec"
self.szig.logspec = logspec
self.assertEquals(self.szig.logspec, logspec)
def test_get_set_deadlockcheck(self):
deadlockcheck = False
self.szig.deadlockcheck = deadlockcheck
self.assertEquals(self.szig.deadlockcheck, deadlockcheck)
def test_reload_and_reload_result(self):
self.szig.reload()
self.assertEquals(self.szig.reload_result(), True)
def test_coredump(self):
try:
self.szig.coredump()
self.assertTrue(False, "szig coredump should not work while not repaired")
except:
self.assertTrue(True, "szig coredump is not working yet")
if __name__ == '__main__':
unittest.main()
|
mochrul/zorp
|
tests/zorpctl/test_szig.py
|
Python
|
gpl-2.0
| 3,271
|
#!/usr/bin/python3
import subprocess
from xml.dom import minidom
import imaplib
from pycious.lib.common import singleton
class Mail:
def __init__(self, username, password,\
server='imap.gmail.com', port=993):
"""
It returns -1 if there is no connection otherwise it returns
the number of unread mails.
"""
if not username or not password:
raise Exception('Error: You must specify the username and '+\
'password in your config file of pycious.')
self.username, self.password = username, password
self.server, self.port = server, port
# Define the connection object to None
self.M = None
def __connect(self):
self.M=imaplib.IMAP4_SSL(self.server , self.port)
#First field is imap login (gmail uses login with
#domain and '@' character), second - password
self.M.login(self.username, self.password)
def __call__(self):
"""
It returns -1 if it's not available the information otherwise
returns the number of unread mail.
"""
try:
if not self.M:
self.__connect()
status, counts = self.M.status("Inbox","(MESSAGES UNSEEN)")
unread = counts[0].split()[4][:-1]
return int(unread)
except:
self.M = None
return -1
class Grss:
def __init__(self, username, password):
"""
It returns -1 if there is no connection otherwise it returns
the number of unread news.
"""
if not username or not password:
raise Exception('Error: You must specify the username and '+\
'password in your config file of pycious.')
self.username, self.password = username, password
def __connect(self):
st, out = subprocess.getstatusoutput('curl -fs '+\
'"https://www.google.com/accounts/ClientLogin?'+\
'service=reader&Email='+self.username+\
'&Passwd='+self.password+'"')
if not out or out=="":
raise Exception()
auth_resp_dict = dict(x.split('=') for x in out.split('\n') if x)
auth_token = auth_resp_dict["Auth"]
auth = 'GoogleLogin auth='+ auth_token
command = 'curl -s -X GET http://www.google.com/reader/api/0/unread-count?all=true --header "Authorization: '+auth+'"'
st, out = subprocess.getstatusoutput(command)
xml_doc = minidom.parseString(str(out))
return xml_doc
def __call__(self):
try:
xml_doc = self.__connect()
list_el = xml_doc.firstChild.getElementsByTagName('list')[0]
if len(list_el.childNodes)==0:
return -1
for obj in list_el.childNodes:
if obj.getElementsByTagName('string')[0].firstChild.data.find('reading-list')!=-1:
for numb in obj.getElementsByTagName('number'):
if numb.attributes['name'].value=='count':
count = int(numb.firstChild.data)
return count
except:
return -1
|
fsquillace/pycious
|
pycious/lib/web.py
|
Python
|
gpl-2.0
| 3,481
|
import fauxfactory
import pytest
from cfme import test_requirements
from cfme.fixtures.provider import rhel7_minimal
from cfme.infrastructure.provider.rhevm import RHEVMProvider
from cfme.infrastructure.provider.virtualcenter import VMwareProvider
from cfme.markers.env_markers.provider import ONE_PER_TYPE
from cfme.markers.env_markers.provider import ONE_PER_VERSION
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.appliance.implementations.ui import navigator
from cfme.utils.conf import cfme_data
from cfme.utils.conf import credentials
from cfme.utils.log import logger
from cfme.utils.wait import wait_for
pytestmark = [
test_requirements.ansible,
pytest.mark.meta(
server_roles=["+embedded_ansible"]
),
pytest.mark.provider(
classes=[RHEVMProvider],
selector=ONE_PER_VERSION,
required_flags=["v2v"],
scope="module"
),
pytest.mark.provider(
classes=[VMwareProvider],
selector=ONE_PER_TYPE,
fixture_name="source_provider",
required_flags=["v2v"],
scope="module"
),
]
def get_migrated_vm_obj(src_vm_obj, target_provider):
"""Returns migrated_vm obj from target_provider"""
collection = target_provider.appliance.provider_based_collection(target_provider)
migrated_vm = collection.instantiate(src_vm_obj.name, target_provider)
return migrated_vm
@pytest.fixture(scope="module")
def ansible_repository(appliance):
"""Fixture to add ansible repository"""
appliance.wait_for_embedded_ansible()
repositories = appliance.collections.ansible_repositories
try:
repository = repositories.create(
name=fauxfactory.gen_alpha(),
url=cfme_data.ansible_links.playbook_repositories.v2v,
description=fauxfactory.gen_alpha()
)
except KeyError:
pytest.skip("Skipping since no such key found in yaml")
view = navigate_to(repository, "Details")
wait_for(lambda: view.entities.summary("Properties").get_text_of("Status") == "successful",
delay=10,
timeout=60,
fail_func=view.toolbar.refresh.click)
yield repository
if repository.exists:
repository.delete()
def catalog_item(request, appliance, machine_credential, ansible_repository, playbook_type):
"""Add provisioning and retire ansible catalog item"""
cat_item = appliance.collections.catalog_items.create(
catalog_item_class=appliance.collections.catalog_items.ANSIBLE_PLAYBOOK,
name=fauxfactory.gen_alphanumeric(),
description=fauxfactory.gen_alphanumeric(),
provisioning={
"repository": ansible_repository.name,
"playbook": "{}.yml".format(playbook_type),
"machine_credential": machine_credential,
"create_new": True,
"provisioning_dialog_name": fauxfactory.gen_alphanumeric(),
},
)
@request.addfinalizer
def _cleanup():
if cat_item.exists:
cat_item.delete()
return cat_item
@pytest.mark.parametrize(
"form_data_vm_obj_single_datastore", [["nfs", "nfs", rhel7_minimal]], indirect=True
)
def test_migration_playbooks(request, appliance, v2v_providers, host_creds, conversion_tags,
ansible_repository, form_data_vm_obj_single_datastore):
"""Test for migrating vms with pre and post playbooks"""
creds = credentials[v2v_providers.vmware_provider.data.templates.get("rhel7_minimal").creds]
CREDENTIALS = (
"Machine",
{
"username": creds.username,
"password": creds.password,
"privilage_escalation": "sudo",
},
)
credential = appliance.collections.ansible_credentials.create(
name="{type}_credential_{cred}".format(type=CREDENTIALS[0], cred=fauxfactory.gen_alpha()),
credential_type=CREDENTIALS[0],
**CREDENTIALS[1]
)
provision_catalog = catalog_item(
request, appliance, credential.name, ansible_repository, "provision"
)
retire_catalog = catalog_item(
request, appliance, credential.name, ansible_repository, "retire"
)
infrastructure_mapping_collection = appliance.collections.v2v_mappings
mapping = infrastructure_mapping_collection.create(
form_data_vm_obj_single_datastore.form_data
)
@request.addfinalizer
def _cleanup():
infrastructure_mapping_collection.delete(mapping)
# vm_obj is a list, with only 1 VM object, hence [0]
src_vm_obj = form_data_vm_obj_single_datastore.vm_list[0]
migration_plan_collection = appliance.collections.v2v_plans
migration_plan = migration_plan_collection.create(
name="plan_{}".format(fauxfactory.gen_alphanumeric()),
description="desc_{}".format(fauxfactory.gen_alphanumeric()),
infra_map=mapping.name,
vm_list=form_data_vm_obj_single_datastore.vm_list,
start_migration=True,
pre_playbook=provision_catalog.name,
post_playbook=retire_catalog.name,
)
# explicit wait for spinner of in-progress status card
view = appliance.browser.create_view(
navigator.get_class(migration_plan_collection, "All").VIEW.pick()
)
wait_for(
func=view.progress_card.is_plan_started,
func_args=[migration_plan.name],
message="migration plan is starting, be patient please",
delay=5,
num_sec=280,
handle_exception=True,
fail_cond=False
)
# wait until plan is in progress
wait_for(
func=view.plan_in_progress,
func_args=[migration_plan.name],
message="migration plan is in progress, be patient please",
delay=15,
num_sec=3600,
)
view.switch_to("Completed Plans")
view.wait_displayed()
migration_plan_collection.find_completed_plan(migration_plan)
logger.info(
"For plan %s, migration status after completion: %s, total time elapsed: %s",
migration_plan.name,
view.migration_plans_completed_list.get_vm_count_in_plan(migration_plan.name),
view.migration_plans_completed_list.get_clock(migration_plan.name),
)
# validate MAC address matches between source and target VMs
assert view.migration_plans_completed_list.is_plan_succeeded(migration_plan.name)
migrated_vm = get_migrated_vm_obj(src_vm_obj, v2v_providers.rhv_provider)
assert src_vm_obj.mac_address == migrated_vm.mac_address
|
RedHatQE/cfme_tests
|
cfme/tests/v2v/test_v2v_ansible.py
|
Python
|
gpl-2.0
| 6,484
|
from .db import Database
__version__ = "0.1.1"
__maintainer__ = "Gunther Cox"
__email__ = "gunthercx@gmail.com"
|
wunderlins/learning
|
python/jsondb/jsondatabase-0.1.1/jsondb/__init__.py
|
Python
|
gpl-2.0
| 114
|
__author__ = 'snake'
from PyQt4 import QtGui, QtCore
class SiteItems(QtGui.QListWidget):
def __init__(self):
super(SiteItems, self).__init__()
def startDrag(self, dropAction):
# create mime data object
#get all selected items
selitems = ""
for i in self.selectedItems():
selitems += i.text() + ","
mime = QtCore.QMimeData()
mime.setText(str(selitems).strip(","))
# start drag
drag = QtGui.QDrag(self)
drag.setMimeData(mime)
drag.start(QtCore.Qt.CopyAction | QtCore.Qt.CopyAction)
def dragEnterEvent(self, event):
if event.mimeData().hasText():
event.accept()
else:
event.ignore()
def dragMoveEvent(self, event):
if event.mimeData().hasText():
event.setDropAction(QtCore.Qt.CopyAction)
event.accept()
else:
event.ignore()
def dropEvent(self, event):
if event.mimeData().hasText():
sites = event.mimeData().text()
for site in sites.split(","):
self.addItem(site)
event.setDropAction(QtCore.Qt.CopyAction)
event.accept()
else:
event.ignore()
|
slackeater/anal-beh
|
classes/gui/mylistwidget.py
|
Python
|
gpl-2.0
| 1,257
|
# -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2004-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Written by Benny Malengier
# Last change 2005/12/05:
# Correspond naming of dates with actual action, so for abbreviation
# of month given by mnd. not MAAND
# Also less possibilities
"""
Dutch-specific classes for parsing and displaying dates.
"""
from __future__ import unicode_literals
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
import re
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from ..lib.date import Date
from ._dateparser import DateParser
from ._datedisplay import DateDisplay
from ._datehandler import register_datehandler
#-------------------------------------------------------------------------
#
# Dutch parser
#
#-------------------------------------------------------------------------
class DateParserNL(DateParser):
month_to_int = DateParser.month_to_int
# Always add dutch and flemish name variants
# no matter what the current locale is
month_to_int["januari"] = 1
month_to_int["jan"] = 1
# Add other common latin, local and historical variants
month_to_int["januaris"] = 1
month_to_int["feber"] = 2
month_to_int["februaris"] = 2
month_to_int["merz"] = 3
#make sure on all distro mrt and maa are accepted
month_to_int["maa"] = 3
month_to_int["mrt"] = 3
month_to_int["aprilis"] = 4
month_to_int["maius"] = 5
month_to_int["junius"] = 6
month_to_int["julius"] = 7
month_to_int["augst"] = 8
month_to_int["7ber"] = 9
month_to_int["7bris"] = 9
month_to_int["8ber"] = 10
month_to_int["8bris"] = 10
month_to_int["9ber"] = 11
month_to_int["9bris"] = 11
month_to_int["10ber"] = 12
month_to_int["10bris"] = 12
month_to_int["xber"] = 12
month_to_int["xbris"] = 12
modifier_to_int = {
'voor' : Date.MOD_BEFORE,
'na' : Date.MOD_AFTER,
'tegen' : Date.MOD_ABOUT,
'om' : Date.MOD_ABOUT,
'rond' : Date.MOD_ABOUT,
'circa' : Date.MOD_ABOUT,
'ca.' : Date.MOD_ABOUT,
}
calendar_to_int = {
'gregoriaans' : Date.CAL_GREGORIAN,
'greg.' : Date.CAL_GREGORIAN,
'juliaans' : Date.CAL_JULIAN,
'jul.' : Date.CAL_JULIAN,
'hebreeuws' : Date.CAL_HEBREW,
'hebr.' : Date.CAL_HEBREW,
'islamitisch' : Date.CAL_ISLAMIC,
'isl.' : Date.CAL_ISLAMIC,
'franse republiek': Date.CAL_FRENCH,
'fran.' : Date.CAL_FRENCH,
'persisch' : Date.CAL_PERSIAN,
'zweeds' : Date.CAL_SWEDISH,
'z' : Date.CAL_SWEDISH,
}
quality_to_int = {
'geschat' : Date.QUAL_ESTIMATED,
'gesch.' : Date.QUAL_ESTIMATED,
'berekend' : Date.QUAL_CALCULATED,
'ber.' : Date.QUAL_CALCULATED,
}
bce = ["voor onze tijdrekening", "voor Christus", "v. Chr."] + DateParser.bce
def init_strings(self):
DateParser.init_strings(self)
self._span = re.compile("(van)\s+(?P<start>.+)\s+(tot)\s+(?P<stop>.+)",
re.IGNORECASE)
self._range = re.compile("tussen\s+(?P<start>.+)\s+en\s+(?P<stop>.+)",
re.IGNORECASE)
self._text2 = re.compile('(\d+)?.?\s+?%s\s*((\d+)(/\d+)?)?'
% self._mon_str,
re.IGNORECASE)
self._jtext2 = re.compile('(\d+)?.?\s+?%s\s*((\d+)(/\d+)?)?'
% self._jmon_str,
re.IGNORECASE)
#-------------------------------------------------------------------------
#
# Dutch display
#
#-------------------------------------------------------------------------
class DateDisplayNL(DateDisplay):
"""
Dutch language date display class.
"""
# TODO: Translate these month strings:
long_months = ( "", "januari", "februari", "maart", "april", "mei",
"juni", "juli", "augustus", "september", "oktober",
"november", "december" )
short_months = ( "", "jan", "feb", "mrt", "apr", "mei", "jun",
"jul", "aug", "sep", "okt", "nov", "dec" )
calendar = (
"", "juliaans", "hebreeuws",
"franse republiek", "persisch", "islamitisch",
"zweeds" )
_mod_str = ("", "voor ", "na ", "rond ", "", "", "")
_qual_str = ("", "geschat ", "berekend ")
_bce_str = "%s v. Chr."
formats = (
"JJJJ-MM-DD (ISO)", "Numerisch DD/MM/JJ", "Maand Dag, Jaar",
"Mnd. Dag Jaar", "Dag Maand Jaar", "Dag Mnd. Jaar"
)
# this definition must agree with its "_display_gregorian" method
def _display_gregorian(self, date_val):
"""
display gregorian calendar date in different format
"""
# this must agree with its locale-specific "formats" definition
year = self._slash_year(date_val[2], date_val[3])
if self.format == 0:
return self.display_iso(date_val)
elif self.format == 1:
if date_val[3]:
return self.display_iso(date_val)
else:
# day/month_number/year
if date_val[0] == date_val[1] == 0:
value = str(date_val[2])
else:
value = self._tformat.replace('%m', str(date_val[1]))
value = value.replace('%d', str(date_val[0]))
value = value.replace('%Y', str(abs(date_val[2])))
value = value.replace('-', '/')
elif self.format == 2:
# month_name day, year
if date_val[0] == 0:
if date_val[1] == 0:
value = year
else:
value = "%s %s" % (self.long_months[date_val[1]], year)
else:
value = "%s %d, %s" % (self.long_months[date_val[1]],
date_val[0], year)
elif self.format == 3:
# month_abbreviation day, year
if date_val[0] == 0:
if date_val[1] == 0:
value = year
else:
value = "%s %s" % (self.short_months[date_val[1]], year)
else:
value = "%s %d, %s" % (self.short_months[date_val[1]],
date_val[0], year)
elif self.format == 4:
# day month_name year
if date_val[0] == 0:
if date_val[1] == 0:
value = year
else:
value = "%s %s" % (self.long_months[date_val[1]], year)
else:
value = "%d %s %s" % (date_val[0],
self.long_months[date_val[1]], year)
else:
# day month_abbreviation year
if date_val[0] == 0:
if date_val[1] == 0:
value = year
else:
value = "%s %s" % (self.short_months[date_val[1]], year)
else:
value = "%d %s %s" % (date_val[0],
self.short_months[date_val[1]], year)
if date_val[2] < 0:
return self._bce_str % value
else:
return value
def display(self, date):
"""
Return a text string representing the date.
"""
mod = date.get_modifier()
cal = date.get_calendar()
qual = date.get_quality()
start = date.get_start_date()
newyear = date.get_new_year()
qual_str = self._qual_str[qual]
if mod == Date.MOD_TEXTONLY:
return date.get_text()
elif start == Date.EMPTY:
return ""
elif mod == Date.MOD_SPAN:
d1 = self.display_cal[cal](start)
d2 = self.display_cal[cal](date.get_stop_date())
scal = self.format_extras(cal, newyear)
return "%s%s %s %s %s%s" % (qual_str, 'van', d1,
'tot', d2, scal)
elif mod == Date.MOD_RANGE:
d1 = self.display_cal[cal](start)
d2 = self.display_cal[cal](date.get_stop_date())
scal = self.format_extras(cal, newyear)
return "%stussen %s en %s%s" % (qual_str, d1, d2,
scal)
else:
text = self.display_cal[date.get_calendar()](start)
scal = self.format_extras(cal, newyear)
return "%s%s%s%s" % (qual_str, self._mod_str[mod], text,
scal)
#-------------------------------------------------------------------------
#
# Register classes
#
#-------------------------------------------------------------------------
register_datehandler(('nl_NL', 'dutch', 'Dutch', 'nl_BE', 'nl'),
DateParserNL, DateDisplayNL)
|
pmghalvorsen/gramps_branch
|
gramps/gen/datehandler/_date_nl.py
|
Python
|
gpl-2.0
| 10,098
|
import numpy as np
import fdasrsf as fs
from scipy.integrate import cumtrapz
from scipy.linalg import norm, expm
import h5py
fun = h5py.File('/home/dtucker/fdasrsf/debug_data_oc_mlogit.h5')
q = fun['q'][:]
y = fun['y'][:]
alpha = fun['alpha'][:]
nu = fun['nu'][:]
max_itr = 8000 # 4000
tol = 1e-4
deltag = .05
deltaO = .08
display = 1
alpha = alpha/norm(alpha)
q, scale = fs.scale_curve(q) # q/norm(q)
for ii in range(0, nu.shape[2]):
nu[:, :, ii], scale = fs.scale_curve(nu[:, :, ii]) # nu/norm(nu)
# python code
n = q.shape[0]
TT = q.shape[1]
m = nu.shape[2]
time = np.linspace(0, 1, TT)
binsize = 1. / (TT - 1)
gam = np.linspace(0, 1, TT)
O = np.eye(n)
O_old = O.copy()
gam_old = gam.copy()
qtilde = q.copy()
# rotation basis (Skew Symmetric)
# E = np.array([[0, -1.], [1., 0]])
# warping basis (Fourier)
p = 20
f_basis = np.zeros((TT, p))
for i in range(0, int(p/2)):
f_basis[:, 2*i] = 1/np.sqrt(np.pi) * np.sin(2*np.pi*(i+1)*time)
f_basis[:, 2*i + 1] = 1/np.sqrt(np.pi) * np.cos(2*np.pi*(i+1)*time)
itr = 0
max_val = np.zeros(max_itr+1)
while itr <= max_itr:
# inner product value
A = np.zeros(m)
for i in range(0, m):
A[i] = fs.innerprod_q2(qtilde, nu[:, :, i])
# form gradient for rotation
# B = np.zeros((n, n, m))
# for i in range(0, m):
# B[:, :, i] = cf.innerprod_q2(E.dot(qtilde), nu[:, :, i]) * E
# tmp1 = np.sum(np.exp(alpha + A))
# tmp2 = np.sum(np.exp(alpha + A) * B, axis=2)
# hO = np.sum(y * B, axis=2) - (tmp2 / tmp1)
# O_new = O_old.dot(expm(deltaO * hO))
theta = np.arccos(O_old[0, 0])
Ograd = np.array([(-1*np.sin(theta), -1*np.cos(theta)),
(np.cos(theta), -1*np.sin(theta))])
B = np.zeros(m)
for i in range(0, m):
B[i] = fs.innerprod_q2(Ograd.dot(qtilde), nu[:, :, i])
tmp1 = np.sum(np.exp(alpha + A))
tmp2 = np.sum(np.exp(alpha + A) * B)
hO = np.sum(y * B) - (tmp2 / tmp1)
O_new = fs.rot_mat(theta+deltaO*hO)
# form gradient for warping
qtilde_diff = np.gradient(qtilde, binsize)
qtilde_diff = qtilde_diff[1]
c = np.zeros((TT, m))
for i in range(0, m):
tmp3 = np.zeros((TT, p))
for j in range(0, p):
cbar = cumtrapz(f_basis[:, j], time, initial=0)
ctmp = 2*qtilde_diff*cbar + qtilde*f_basis[:, j]
tmp3[:, j] = fs.innerprod_q2(ctmp, nu[:, :, i]) * f_basis[:, j]
c[:, i] = np.sum(tmp3, axis=1)
tmp2 = np.sum(np.exp(alpha + A) * c, axis=1)
hpsi = np.sum(y * c, axis=1) - (tmp2 / tmp1)
vecnorm = norm(hpsi)
costmp = np.cos(deltag * vecnorm) * np.ones(TT)
sintmp = np.sin(deltag * vecnorm) * (hpsi / vecnorm)
psi_new = costmp + sintmp
gam_tmp = cumtrapz(psi_new * psi_new, time, initial=0)
gam_tmp = (gam_tmp - gam_tmp[0]) / (gam_tmp[-1] - gam_tmp[0])
gam_new = np.interp(gam_tmp, time, gam_old)
max_val[itr] = np.sum(y * (alpha + A)) - np.log(tmp1)
if display == 1:
print("Iteration %d : Cost %f" % (itr+1, max_val[itr]))
gam_old = gam_new.copy()
O_old = O_new.copy()
qtilde = fs.group_action_by_gamma(O_old.dot(q), gam_old)
if vecnorm < tol and hO < tol:
break
itr += 1
|
glemaitre/fdasrsf
|
debug/debug_warp_ocmlogistic.py
|
Python
|
gpl-3.0
| 3,202
|
#!/usr/bin/env python
import os
import sys
import json
import click
import serial
import pkg_resources
import serial.tools.list_ports
import logging.config
from educube.web import server as webserver
import logging
logger = logging.getLogger(__name__)
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
def configure_logging(verbose):
loglevels = {
0: logging.ERROR,
1: logging.WARNING,
2: logging.INFO,
3: logging.DEBUG,
}
logging.basicConfig(level=loglevels[verbose])
def verify_serial_connection(port, baud):
try:
ser = serial.Serial(port, baud, timeout=1)
a = ser.read()
if a:
logger.debug('Serial open: %s' % port)
else:
logger.debug('Serial exists but is not readable (permissions?): %s' % port)
ser.close()
except serial.serialutil.SerialException as e:
raise click.BadParameter("Serial not readable: %s" % e)
##############################
# COMMANDS
##############################
def get_serial():
ports = serial.tools.list_ports.comports()
suggested_educube_port = ports[-1]
return suggested_educube_port.device
def get_baud():
ports = serial.tools.list_ports.comports()
suggested_educube_port = ports[-1]
if suggested_educube_port.description == 'BASE':
return 9600
else:
return 115200
@click.group()
@click.option('-v', '--verbose', count=True)
@click.pass_context
def cli(ctx, verbose):
"""Educube Client"""
configure_logging(verbose)
@cli.command()
def version():
"""Prints the EduCube client version"""
print(pkg_resources.require("educube")[0].version)
@cli.command()
@click.option('-s', '--serial', default=get_serial, prompt=True)
@click.option('-b', '--baud', default=get_baud, prompt=True)
@click.option('-e', '--board', default='CDH')
@click.option('--fake', is_flag=True, default=False, help="Fake the serial")
@click.option('--json', is_flag=True, default=False, help="Outputs mostly JSON instead")
@click.pass_context
def start(ctx, serial, baud, board, fake, json):
"""Starts the EduCube web interface"""
logger.debug("""Running with settings:
Serial: %s
Baudrate: %s
EduCube board: %s
""" % (serial, baud, board))
ctx.obj['connection'] = {
"type": "serial",
"port": serial,
"baud": baud,
"board": board,
"fake": fake,
}
if not fake:
verify_serial_connection(serial, baud)
webserver.start_webserver(
connection=ctx.obj.get('connection')
)
def main():
cli(obj={})
if __name__ == '__main__':
main()
|
ezeakeal/educube_client
|
educube/client.py
|
Python
|
gpl-3.0
| 2,664
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, time, datetime, re, threading
from electrum_creditbit.i18n import _
from electrum_creditbit.util import print_error, print_msg
import os.path, json, ast, traceback
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from electrum_creditbit import DEFAULT_SERVERS, DEFAULT_PORTS
from util import *
#protocol_names = ['TCP', 'HTTP', 'SSL', 'HTTPS']
#protocol_letters = 'thsg'
protocol_names = ['TCP', 'SSL']
protocol_letters = 'ts'
class NetworkDialog(QDialog):
def __init__(self, network, config, parent):
QDialog.__init__(self,parent)
self.setModal(1)
self.setWindowTitle(_('Network'))
self.setMinimumSize(375, 20)
self.network = network
self.config = config
self.protocol = None
self.servers = network.get_servers()
host, port, protocol, proxy_config, auto_connect = network.get_parameters()
if not proxy_config:
proxy_config = { "mode":"none", "host":"localhost", "port":"9050"}
if parent:
n = len(network.get_interfaces())
if n:
status = _("Blockchain") + ": " + "%d "%(network.get_local_height()) + _("blocks") + ".\n" + _("Getting block headers from %d nodes.")%n
else:
status = _("Not connected")
if network.is_connected():
status += "\n" + _("Server") + ": %s"%(host)
else:
status += "\n" + _("Disconnected from server")
else:
status = _("Please choose a server.") + "\n" + _("Select 'Cancel' if you are offline.")
vbox = QVBoxLayout()
vbox.setSpacing(30)
hbox = QHBoxLayout()
l = QLabel()
l.setPixmap(QPixmap(":icons/network.png"))
hbox.addStretch(10)
hbox.addWidget(l)
hbox.addWidget(QLabel(status))
hbox.addStretch(50)
msg = _("Electrum sends your wallet addresses to a single server, in order to receive your transaction history.") + "\n\n" \
+ _("In addition, Electrum connects to several nodes in order to download block headers and find out the longest blockchain.") + " " \
+ _("This blockchain is used to verify the transactions sent by the address server.")
hbox.addWidget(HelpButton(msg))
vbox.addLayout(hbox)
# grid layout
grid = QGridLayout()
grid.setSpacing(8)
vbox.addLayout(grid)
# server
self.server_host = QLineEdit()
self.server_host.setFixedWidth(200)
self.server_port = QLineEdit()
self.server_port.setFixedWidth(60)
grid.addWidget(QLabel(_('Server') + ':'), 0, 0)
# use SSL
self.ssl_cb = QCheckBox(_('Use SSL'))
self.ssl_cb.setChecked(auto_connect)
grid.addWidget(self.ssl_cb, 3, 1)
self.ssl_cb.stateChanged.connect(self.change_protocol)
# auto connect
self.autocycle_cb = QCheckBox(_('Auto-connect'))
self.autocycle_cb.setChecked(auto_connect)
grid.addWidget(self.autocycle_cb, 0, 1)
if not self.config.is_modifiable('auto_cycle'): self.autocycle_cb.setEnabled(False)
msg = _("If auto-connect is enabled, Electrum will always use a server that is on the longest blockchain.") + " " \
+ _("If it is disabled, Electrum will warn you if your server is lagging.")
grid.addWidget(HelpButton(msg), 0, 4)
grid.addWidget(self.server_host, 0, 2, 1, 2)
grid.addWidget(self.server_port, 0, 3)
label = _('Active Servers') if network.is_connected() else _('Default Servers')
self.servers_list_widget = QTreeWidget(parent)
self.servers_list_widget.setHeaderLabels( [ label, _('Limit') ] )
self.servers_list_widget.setMaximumHeight(150)
self.servers_list_widget.setColumnWidth(0, 240)
self.change_server(host, protocol)
self.set_protocol(protocol)
self.servers_list_widget.connect(self.servers_list_widget,
SIGNAL('currentItemChanged(QTreeWidgetItem*,QTreeWidgetItem*)'),
lambda x,y: self.server_changed(x))
grid.addWidget(self.servers_list_widget, 1, 1, 1, 3)
def enable_set_server():
if config.is_modifiable('server'):
enabled = not self.autocycle_cb.isChecked()
self.server_host.setEnabled(enabled)
self.server_port.setEnabled(enabled)
self.servers_list_widget.setEnabled(enabled)
else:
for w in [self.autocycle_cb, self.server_host, self.server_port, self.ssl_cb, self.servers_list_widget]:
w.setEnabled(False)
self.autocycle_cb.clicked.connect(enable_set_server)
enable_set_server()
# proxy setting
self.proxy_mode = QComboBox()
self.proxy_host = QLineEdit()
self.proxy_host.setFixedWidth(200)
self.proxy_port = QLineEdit()
self.proxy_port.setFixedWidth(60)
self.proxy_mode.addItems(['NONE', 'SOCKS4', 'SOCKS5', 'HTTP'])
def check_for_disable(index = False):
if self.config.is_modifiable('proxy'):
if self.proxy_mode.currentText() != 'NONE':
self.proxy_host.setEnabled(True)
self.proxy_port.setEnabled(True)
else:
self.proxy_host.setEnabled(False)
self.proxy_port.setEnabled(False)
else:
for w in [self.proxy_host, self.proxy_port, self.proxy_mode]: w.setEnabled(False)
check_for_disable()
self.proxy_mode.connect(self.proxy_mode, SIGNAL('currentIndexChanged(int)'), check_for_disable)
self.proxy_mode.setCurrentIndex(self.proxy_mode.findText(str(proxy_config.get("mode").upper())))
self.proxy_host.setText(proxy_config.get("host"))
self.proxy_port.setText(proxy_config.get("port"))
grid.addWidget(QLabel(_('Proxy') + ':'), 4, 0)
grid.addWidget(self.proxy_mode, 4, 1)
grid.addWidget(self.proxy_host, 4, 2)
grid.addWidget(self.proxy_port, 4, 3)
# buttons
vbox.addLayout(Buttons(CancelButton(self), OkButton(self)))
self.setLayout(vbox)
def init_servers_list(self):
self.servers_list_widget.clear()
for _host, d in sorted(self.servers.items()):
if d.get(self.protocol):
pruning_level = d.get('pruning','')
self.servers_list_widget.addTopLevelItem(QTreeWidgetItem( [ _host, pruning_level ] ))
def set_protocol(self, protocol):
if protocol != self.protocol:
self.protocol = protocol
self.init_servers_list()
def change_protocol(self, use_ssl):
p = 's' if use_ssl else 't'
host = unicode(self.server_host.text())
pp = self.servers.get(host, DEFAULT_PORTS)
if p not in pp.keys():
p = pp.keys()[0]
port = pp[p]
self.server_host.setText( host )
self.server_port.setText( port )
self.set_protocol(p)
def server_changed(self, x):
if x:
self.change_server(str(x.text(0)), self.protocol)
def change_server(self, host, protocol):
pp = self.servers.get(host, DEFAULT_PORTS)
if protocol and protocol not in protocol_letters:
protocol = None
if protocol:
port = pp.get(protocol)
if port is None:
protocol = None
if not protocol:
if 's' in pp.keys():
protocol = 's'
port = pp.get(protocol)
else:
protocol = pp.keys()[0]
port = pp.get(protocol)
self.server_host.setText( host )
self.server_port.setText( port )
self.ssl_cb.setChecked(protocol=='s')
def do_exec(self):
if not self.exec_():
return
host = str( self.server_host.text() )
port = str( self.server_port.text() )
protocol = 's' if self.ssl_cb.isChecked() else 't'
if self.proxy_mode.currentText() != 'NONE':
proxy = { 'mode':str(self.proxy_mode.currentText()).lower(),
'host':str(self.proxy_host.text()),
'port':str(self.proxy_port.text()) }
else:
proxy = None
auto_connect = self.autocycle_cb.isChecked()
self.network.set_parameters(host, port, protocol, proxy, auto_connect)
return True
|
creditbit/electrum-creditbit
|
gui/qt/network_dialog.py
|
Python
|
gpl-3.0
| 9,292
|
# test.py (c) Mikhail Mezyakov <mihail265@gmail.com>
# Released under the GNU GPL v.3
#
# Module sends "success" message to user on a channel
def horo(channel, user, args):
"""Send "success" message if everything is ok"""
return u'PRIVMSG {channel} :{user}: success'.format(channel=channel,
user=user)
|
aluminiumgeek/horo-modules
|
test/test.py
|
Python
|
gpl-3.0
| 370
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012-2015 Bastian Kleineidam
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Archive commands for the shar program."""
from .. import util
def create_shar (archive, compression, cmd, verbosity, interactive, filenames):
"""Create a SHAR archive."""
cmdlist = [util.shell_quote(cmd)]
cmdlist.extend([util.shell_quote(x) for x in filenames])
cmdlist.extend(['>', util.shell_quote(archive)])
return (cmdlist, {'shell': True})
|
wummel/patool
|
patoolib/programs/shar.py
|
Python
|
gpl-3.0
| 1,074
|
#!/usr/bin/env python
'''
Predict missing words with n-gram model
'''
import sys, argparse
from itertools import izip
from util import tokenize_words
def opts():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('sample', type=argparse.FileType('r'),
help='Sentences with one missing word')
parser.add_argument('removed', type=argparse.FileType('r'),
help='File with predicted indices of missing words')
return parser
if __name__ == "__main__":
args = opts().parse_args()
for sentence, i_missing in izip(args.sample, args.removed):
words = tokenize_words(sentence)
i_missing = int(i_missing)
print ' '.join(words[:i_missing]) + ' e ' + ' '.join(words[i_missing:])
|
timpalpant/KaggleBillionWordImputation
|
scripts/predict_missing_word.space.py
|
Python
|
gpl-3.0
| 770
|
import pygame
import logging
from tools.action import Action
class Speaker(Action):
def __init__(self, id, params):
super(Speaker, self).__init__(id, params)
try:
self.path_to_audio = params["path_to_audio"]
self.repetitions = int(params["repetitions"])
except ValueError as ve: # if repetitions can't be parsed as int
logging.error("Speaker: Wasn't able to initialize the device, please check your configuration: %s" % ve)
self.corrupted = True
return
except KeyError as ke: # if config parameters are missing in file
logging.error("Speaker: Wasn't able to initialize the device, it seems there is a config parameter missing: %s" % ke)
self.corrupted = True
return
logging.debug("Speaker: Audio device initialized")
def play_audio(self):
logging.debug("Speaker: Trying to play audio")
pygame.mixer.init()
try:
pygame.mixer.music.load(self.path_to_audio)
except Exception as e: # audio file doesn't exist or is not playable
logging.error("Speaker: Wasn't able to load audio file: %s" % e)
pygame.mixer.quit()
return
pygame.mixer.music.set_volume(1)
for i in range(0, self.repetitions):
pygame.mixer.music.rewind()
pygame.mixer.music.play()
while pygame.mixer.music.get_busy():
continue
pygame.mixer.quit()
logging.debug("Speaker: Finished playing audio")
def execute(self):
if not self.corrupted:
self.play_audio()
else:
logging.error("Speaker: Wasn't able to play sound because of an initialization error")
def cleanup(self):
logging.debug("Speaker: No cleanup necessary at the moment")
|
SecPi/SecPi
|
worker/speaker.py
|
Python
|
gpl-3.0
| 1,590
|
import json
import pymarc
from siskin.conversions import (de_listify, imslp_xml_to_marc, osf_to_intermediate)
def test_imslp_xml_to_marc():
example = """<?xml version="1.0"?>
<document docID="imslpvalsskramstadhans">
<localClass localClassName="col">imslp</localClass>
<localClass localClassName="vifa">vifamusik</localClass>
<identifier identifierEncodingSchema="originalID">valsskramstadhans</identifier>
<creator>
<mainForm>Skramstad, Hans</mainForm>
</creator>
<title>Vals for pianoforte</title>
<subject>
<mainForm>Romantic</mainForm>
</subject>
<music_arrangement_of>Piano</music_arrangement_of>
<url urlEncodingSchema="originalDetailView">http://imslp.org/wiki/Vals_(Skramstad,_Hans)</url>
<vifatype>Internetressource</vifatype>
<fetchDate>2018-04-25T00:00:00.01Z</fetchDate>
<vifaxml><![CDATA[<document docID="imslpvalsskramstadhans"><localClass
localClassName="col">imslp</localClass><localClass
localClassName="vifa">vifamusik</localClass><identifier
identifierEncodingSchema="originalID">valsskramstadhans</identifier><creator><mainForm>Skramstad,
Hans</mainForm></creator><title>Vals for
pianoforte</title><subject><mainForm>Romantic</mainForm></subject><music_arrangement_of>Piano</music_arrangement_of><url
urlEncodingSchema="originalDetailView">http://imslp.org/wiki/Vals_(Skramstad,_Hans)</url><vifatype>Internetressource</vifatype></document>]]></vifaxml>
</document>
"""
result = imslp_xml_to_marc(example)
assert result is not None
assert isinstance(result, pymarc.Record)
assert result["001"].value() == "finc-15-dmFsc3NrcmFtc3RhZGhhbnM"
assert result["100"]["a"] == "Skramstad, Hans"
assert result["245"]["a"] == "Vals for pianoforte"
assert result["856"]["u"] == "http://imslp.org/wiki/Vals_(Skramstad,_Hans)"
def test_de_listify():
cases = (
(None, None),
("", ""),
([], None),
({1, 2, 3}, 1),
([1, 2, 3], 1),
)
for v, expected in cases:
assert de_listify(v) == expected
def test_osf_to_intermediate():
cases = (
(None, None),
({}, None),
(json.loads("""
{
"id": "egcsk",
"type": "preprints",
"attributes": {
"date_created": "2021-07-19T07:32:33.252615",
"date_modified": "2021-07-19T07:42:12.725457",
"date_published": "2021-07-19T07:41:43.501204",
"original_publication_date": "2021-02-28T17:00:00",
"doi": null,
"title": "Konsep Allah Dalam Teologi Proses",
"description": "Penulisan karya ilmiah ini dikhususkan untuk membahas mengenai Allah yang dirumuskan dalam teologi proses, yang dicetuskan oleh Alfred Whitehead. Dalam bagian bagian ini penulis menyajikan konsep Allah dalam teologi proses dan bagaimana tanggapan terhadap konsep tersebut secara Alkitabiah Metode penelitian, penulis menggunakan pendekatan metode penelitian kualitatif analisis deskriptif, dengan pendekatan literatur dan tergolong dalam penelitian perpustakaan. Konsep Allah menurut teologi proses adalah Allah yang berproses, tidak berpribadi dan tidak memiliki kedaulatan absolut. Namun pandangan tentang Allah dalam teologi proses adalah suatu kumpulan pengalaman pribadi dan dijadikan sebagai suatu konsep dalam pemikiran manusia. Tanggapan tersebut menunjukan perbandingan dari pola pikir teologi proses mengenai Allah yang menyimpang dan mengarahkan seseorang dalam memahami konsep Allah yang benar sesuai dengan pernyataan Allah m",
"is_published": true,
"is_preprint_orphan": false,
"license_record": {
"copyright_holders": [
""
],
"year": "2021"
},
"tags": [
"Gambar",
"Respon",
"Teologi Proses",
"Tuhan"
],
"preprint_doi_created": "2021-07-19T07:42:12.695116",
"date_withdrawn": null,
"current_user_permissions": [],
"public": true,
"reviews_state": "accepted",
"date_last_transitioned": "2021-07-19T07:41:43.501204",
"has_coi": false,
"conflict_of_interest_statement": null,
"has_data_links": "no",
"why_no_data": null,
"data_links": [],
"has_prereg_links": "no",
"why_no_prereg": null,
"prereg_links": [],
"prereg_link_info": "",
"subjects": [
[
{
"id": "584240da54be81056cecaab4",
"text": "Arts and Humanities"
},
{
"id": "584240da54be81056cecaa9c",
"text": "Religion"
},
{
"id": "584240da54be81056cecaaf5",
"text": "Christianity"
}
]
]
},
"relationships": {
"contributors": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/contributors/",
"meta": {}
}
}
},
"bibliographic_contributors": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/bibliographic_contributors/",
"meta": {}
}
}
},
"citation": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/citation/",
"meta": {}
}
},
"data": {
"id": "egcsk",
"type": "preprints"
}
},
"identifiers": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/identifiers/",
"meta": {}
}
}
},
"node": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/uka4p/",
"meta": {}
},
"self": {
"href": "https://api.osf.io/v2/preprints/egcsk/relationships/node/",
"meta": {}
}
},
"data": {
"id": "uka4p",
"type": "nodes"
}
},
"license": {
"links": {
"related": {
"href": "https://api.osf.io/v2/licenses/563c1cf88c5e4a3877f9e96a/",
"meta": {}
}
},
"data": {
"id": "563c1cf88c5e4a3877f9e96a",
"type": "licenses"
}
},
"provider": {
"links": {
"related": {
"href": "https://api.osf.io/v2/providers/preprints/osf/",
"meta": {}
}
},
"data": {
"id": "osf",
"type": "preprint-providers"
}
},
"files": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/files/",
"meta": {}
}
}
},
"primary_file": {
"links": {
"related": {
"href": "https://api.osf.io/v2/files/60f52a94f1369301d5793a17/",
"meta": {}
}
},
"data": {
"id": "60f52a94f1369301d5793a17",
"type": "files"
}
},
"review_actions": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/review_actions/",
"meta": {}
}
}
},
"requests": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/requests/",
"meta": {}
}
}
}
},
"links": {
"self": "https://api.osf.io/v2/preprints/egcsk/",
"html": "https://osf.io/egcsk/",
"preprint_doi": "https://doi.org/10.31219/osf.io/egcsk"
}
}"""), {
'abstract':
'Penulisan karya ilmiah ini dikhususkan untuk membahas mengenai '
'Allah yang dirumuskan dalam teologi proses, yang dicetuskan oleh '
'Alfred Whitehead. Dalam bagian bagian ini penulis menyajikan '
'konsep Allah dalam teologi proses dan bagaimana tanggapan '
'terhadap konsep tersebut secara Alkitabiah Metode penelitian, '
'penulis menggunakan pendekatan metode penelitian kualitatif '
'analisis deskriptif, dengan pendekatan literatur dan tergolong '
'dalam penelitian perpustakaan. Konsep Allah menurut teologi '
'proses adalah Allah yang berproses, tidak berpribadi dan tidak '
'memiliki kedaulatan absolut. Namun pandangan tentang Allah dalam '
'teologi proses adalah suatu kumpulan pengalaman pribadi dan '
'dijadikan sebagai suatu konsep dalam pemikiran manusia. '
'Tanggapan tersebut menunjukan perbandingan dari pola pikir '
'teologi proses mengenai Allah yang menyimpang dan mengarahkan '
'seseorang dalam memahami konsep Allah yang benar sesuai dengan '
'pernyataan Allah m',
'authors': [{
'rft.aufirst': 'Ceria',
'rft.aulast': 'Ceria'
}],
'doi':
'10.31219/osf.io/egcsk',
'finc.format':
'Article',
'finc.id':
'ai-191-egcsk',
'finc.mega_collection': ['sid-191-col-osf', 'Osf'],
'finc.source_id':
'191',
'languages': ['eng'],
'rft.atitle':
'Konsep Allah Dalam Teologi Proses',
'rft.date':
'2021-07-19',
'rft.genre':
'article',
'rft.jtitle':
'osf',
'rft.pub': ['OSF Preprints'],
'subjects': ['Gambar', 'Respon', 'Teologi Proses', 'Tuhan'],
'url': ['https://doi.org/10.31219/osf.io/egcsk'],
'x.date':
'2021-07-19T07:42:12.695116Z',
}),
)
for v, expected in cases:
assert osf_to_intermediate(v) == expected
|
miku/siskin
|
siskin/test_conversions.py
|
Python
|
gpl-3.0
| 11,642
|
# news key word catch
import os
import random
import time
import tushare as ts
import math
import pandas
import threading
from MYSORT import *
from programdiary import *
import Stock_config_kit as Skit
import ForgeModel
COLLECTORSHOWNUM=5
fgt={'a':0.01,'a_2':0.01,'lam':0.01}
DIARYNAME='DIARY_Ver.0.1_ty2_0.01_0.01'
def readsinatime(timestr):
if timestr:
try:
Year=int(time.strftime('%Y',time.gmtime()) )
[F,L]=timestr.split(' ')
[mon,day]=F.split('-')
[hour,minus]=L.split(':')
mon=int(mon)
day=int(day)
hour=int(hour)
minus=int(minus)
# in time.mktime the #6, #7's value do not matter (#0--#8)
except:
print('the timestr is not enough for unpacking process :%s'%timestr)
[Year,mon,day,hour,minus]=[2016,0,0,0,0]
else:
[Year,mon,day,hour,minus]=[2016,0,0,0,0]
return time.mktime( (Year,mon,day,hour,minus,0,0,0,0) )
class loopobj(threading.Thread):
loopflag=0 #1 means stop
def __init__(self):
threading.Thread.__init__(self)
def setlooppara(self,span,funchandle,*funcpara):
self.span=span
self.funch=funchandle
self.funcpara=funcpara
def stoploop(self):
self.loopflag=1
def timeloop(self):
print(self.name+ ': loop start')
while self.loopflag==0:
time.sleep(self.span)
if self.funcpara[0]==None:
self.funch()
else:
#self.funch(self.funcpara)
#print('stock')
parastr=''
for i in range(len( self.funcpara)):
parastr+='self.funcpara[%d]'% (i)
eval('self.funch(%s)'%parastr)
print(self.name+ ' timeloop end')
def run(self):
self.timeloop()
class Keyword:
wordset=None
Counter=0
distribution=None
weight=None
show_dis_flag=False
def __init__(self,wordset,weight):
if(isinstance(wordset,list)):
self.wordset=wordset
self.distribution=[0 for n in range(len( wordset) )] ######################## good!
self.weight=weight
self._sortkeyword()
def modify_keyword(self,mode,M_KW):
if not isinstance(mode,1):
if(mode==1):
# delte keyword from KW
# delte the relavant distri from KW
for KW in M_KW:
I=self.index(KW)
self.wordset.remove(KW)
self.distribution.remove(self.distribution[I])
self.weight.remove(self.weight[I])
# or del self.distribution[I]
elif(mode==2):
for KW in M_KW:
self.wordset.append(KW)
self.distribution.append(0)
self.weight.append(1)
self._sortkeyword(self)
# creat%
# creat$
else:
print('Unknown Mode Number. mode=1 for subtracting keywords,mode=2 for adding keywords')
def modify_distri(self,prey):
ind=0
for ele in prey:
self.distribution[ind]+=ele
ind+=1
def show_distri(self):
# print like 'word': times 'word2': times
i=0
output=''
while i<len(self.distribution):
output+='%s: %s '%(self.wordset[i],self.distribution[i])
i+=1
print(output)
def hunt(self,preystr):
i=0
dis=[0 for n in range(len( self.wordset))]
c_flag=False
for key in self.wordset:
findcounter=False
findhead=-1
for j in range(len(self.wordset)):
findhead=preystr.find( key , findhead+1 )
if findhead == -1:
break
findcounter+=int(bool(1+findhead))
c_flag=True
dis[i]+=findcounter
#self.distribution[i]+=findcounter
i+=1
self.modify_distri(dis)
if c_flag==True and self.show_dis_flag==True:
self.show_distri()
def _sortkeyword(self):
try:
List_one=self.wordset
Capital_L_one=[]
for element in List_one:
try:
Capital_L_one.append( ord( element[0] ) )
except:
print(self.name,' :',element )
[self.wordset,self.distribution,self.weight]=mysort(Capital_L_one,self.wordset,self.distribution,self.weight)
except:
print(self.name,' ',self.wordset)
class stock_info(Keyword,loopobj,ForgeModel.Forge):
name=''
code=''
browsetime=''
Name_W=1
Code_W=1
Area_W=1
Ind_W=2
Con_W=2
def __init__(self,name='',code='',area=[],industry=[],concept=[],a=fgt['a'],a_2=fgt['a_2'],lam=fgt['lam']):
#some time later would add a return to creat more cal
self.name=name
self.code=code
self.area=area
self.industry=industry
self.concept=concept
#self.business=business
ForgeModel.Forge.__init__(self,a,a_2,lam)
Keyword.__init__(self,[code]+[name]+area+industry+concept,self.ini_weight() )
loopobj.__init__(self)
def ini_weight(self):
W=[self.Code_W,self.Name_W]
if self.area:
W.append(self.Area_W)
if isinstance( self.industry,list) and self.industry!=[]:
for i in self.industry:
W.append(self.Ind_W)
if isinstance( self.concept,list) and self.concept!=[]:
for i in self.concept:
W.append(self.Con_W)
return W
def Trum(self,Newsobj):
if Newsobj.latesttime!=self.browsetime:
brt=readsinatime(self.browsetime)
newst=readsinatime(Newsobj.latesttime)
if brt==(0,0,0,0,0,0,0,0,0):
deltat=0
elif newst==(0,0,0,0,0,0,0,0,0):
deltat=0
else:
deltat=newst-brt
self.stimulate_forge_type2(self.distribution,deltat)
self.browsetime=Newsobj.latesttime
self.hunt(Newsobj.latestnew)
else:
pass
class News(loopobj):
name=None
newslength=1
show_c=True
Newsmemory= pandas.DataFrame()
latesttime=''
latestnew=''
def __init__(self,name):
self.name=name
loopobj.__init__(self)
def modify_newspara(self,nl,show_c):
self.newslength=nl
self.show_c=show_c
def Newsget(self):
#PDnews=ts.get_latest_news(top=self.newslength,show_content=self.show_c)
PDnews=ts.get_latest_news(top=self.newslength,show_content=False)
try:
if PDnews.ix[0,'time']!=self.latesttime:
Newsmemory=pandas.concat([self.Newsmemory,PDnews],axis=0) #按行合并
#self.latesttime=PDnews.ix[0,'time']
if self.show_c:
try:
Content=None
C_C=0
while not Content and C_C<5:
Content=ts.latest_content(PDnews.ix[0,'url'])
C_C+=1
except:
print('latest_content api fail to load url:%s'%PDnews.ix[0,'url'])
Content=''
#self.latestnew=PDnews.ix[0,'classify']+PDnews.ix[0,'title']+Content
self.latestnew=PDnews.ix[0,'title']+Content
else:
#self.latestnew=PDnews.ix[0,'classify']+PDnews.ix[0,'title'] #maybe content later
self.latestnew=PDnews.ix[0,'title']
print(PDnews[['classify','title','time']])
if PDnews.ix[0,'time']:
print('old latesttime %s'%self.latesttime)
self.latesttime=PDnews.ix[0,'time']
print('new latesttime %s'%self.latesttime)
except:
print('Get Latest News Error')
class Collector(loopobj):
name=None
dissum=None
showNum=COLLECTORSHOWNUM
def __init__(self,name,diary,configfile):
self.name=name
self.diaryfile=diary
self.configfile=configfile
loopobj.__init__(self)
def info_collect(self,stocklist):
self.dissum=[0 for i in range(len(stocklist))]
#try:
i=0
for i, stock in enumerate( stocklist):
_dissum=0
for j,ele_dis in enumerate( stock.distribution):
_dissum+=ele_dis*stock.weight[j]
self.dissum[i]=_dissum
#except:
# print('Collector Error')
def info_process(self,stocklist):
self._indexlist=[]
self.counterlist=[]
self.orderlist=[]
Ind=0
OldInd=None
indexlist=[n for n in range(len(stocklist)) ]
[indexlist]=mysort(self.dissum,indexlist)
self.dissum.reverse()
indexlist.reverse()
for i in range(self.showNum):
temp=self.dissum[Ind]
self.orderlist.append(temp)
counter=1
if Ind+1<len(self.dissum):
for j in range(Ind+1,len(self.dissum)):
if self.dissum[j]==temp:
counter+=1
else:
OldInd=Ind
Ind=Ind+counter
break
if OldInd!=None :
self.counterlist.append(counter)
self._indexlist.append(indexlist[OldInd:OldInd+counter])
def report(self,stocklist):
reportlist=[]
for i in range( len(self.counterlist) ):
freq=self.counterlist[i]
order=self.orderlist[i]
if order!=0 and freq != len(stocklist):
SNameList=[]
for j in range( len( self._indexlist[i] )):
ind=self._indexlist[i][j]
SNameList.append(stocklist[ind].name)
restr='Order: %.5f , Freq: %d, Stock: %s'%(order,freq,','.join(SNameList))
reportlist.append(restr)
else:
reportlist.append('Order is Zero or Frequency is the lenth of stocklist')
self.diaryfile.get_message(reportlist)
self.diaryfile.update_txtdiary()
def change_conf(self,stocklist):
SNameList=[]
for i in range( len(self.counterlist) ):
if self.orderlist[i]!=0 and self.counterlist[i] != len(stocklist):
for j in range( len( self._indexlist[i] )):
ind=self._indexlist[i][j]
SNameList.append(stocklist[ind].code)
self.configfile.KW_modify(code=SNameList)
self.configfile.KW_save_config()
def collector(self,stocklist):
print('##############Inking the diary#############')
self.info_collect(stocklist)
self.info_process(stocklist)
self.report(stocklist)
self.change_conf(stocklist)
print('#############Report Finish#############')
def ini_classfication():
Industry=ts.get_industry_classified()
Concept=ts.get_concept_classified()
Area=ts.get_area_classified()
_Codelist=Area[['code']]
Codelist=[]
for i in range(len(_Codelist) ):
Codelist.append(_Codelist.ix[i,0])
return [Codelist,Area,Concept,Industry]
def stock_classfication(code,Area,Concept,Industry):
area=Area.query('code=='+"'"+code+"'")
_area=area[['area']]
_name=area[['name']]
try:
_name=str(_name.iloc[0,0])
except:
_name='未知'
# area=area.get_value(0,0)
try:
_area=[_area.iloc[0,0]]
except:
_area=[]
#or Area[Area.code.isin([code])]
concept=Concept.query('code=='+"'"+code+"'")
_concept=concept[['c_name']]
try:
__concept=[]
for i in range( len(_concept) ):
__concept.append( _concept.iloc[i,0] )
except:
__concept=[]
industry=Industry.query('code=='+"'"+code+"'")
try:
_industry=industry[['c_name']]
_industry=_industry.iloc[0,0]
_industry=_industry.replace('行业','')
if len(_industry)==4:
_industry=[_industry[0:2],_industry[2:4]]
else:
_industry=[_industry]
except:
_industry=[]
return [_name,_area,__concept,_industry]
def prelearn_weight_s(stockobj,STR):
stockobj.hunt(STR)
def prelearn_weight(stockobjlist,strlist):
for STR in strlist:
for stock in stockobjlist:
try:
prelearn_weight_s(stock,STR)
except:
print("Stock %s prelearn failed"%stock.name)
#12 min to download 1000 news with content.
def SINA_prelearn(stockobjlist,newslength,with_c=False):
download_flag=False
while not download_flag:
PDnews=ts.get_latest_news(top=newslength,show_content=with_c)
try:
len(PDnews)
download_flag=True
print("prelearn news download finished.")
except:
print("the newslength %d didn't work. We minus it with 100 and try again."%newslength)
newslength=newslength-100
Newsstr=[]
if(with_c):
for i in range( len( PDnews) ): #len(PDnews.index)
Newsstr.append(PDnews.ix[i,'classify']+PDnews.ix[i,'title']+PDnews.ix[i,'content'])
else:
for i in range(len( PDnews)):
Newsstr.append(PDnews.ix[i,'classify']+PDnews.ix[i,'title'])
prelearn_weight(stockobjlist,Newsstr)
def test():
# a=stock_info(name='a',code='000000',area=['概率'],industry=['方法','还好'],concept=['沪江','了就'])
# a.distribution=[0,0,0,0,0,0,0]
# b=stock_info(name='b',code='000000',area=[],industry=['方法','还好'],concept=['沪江','了就'])
# b.distribution=[0,0,0,0,0,2]
# c=stock_info(name='c',code='000000',area=['概率'],industry=[],concept=['沪江','了就'])
# c.distribution=[0,0,0,0,1]
# d=stock_info(name='d',code='000000',area=['概率'],industry=['方法','还好'],concept=[])
# d.distribution=[0,0,0,0,0]
# e=stock_info(name='e',code='000000',area=['概率'],industry=['方法'],concept=['沪江','了就'])
# e.distribution=[0,0,0,0,0,1]
# f=stock_info(name='f',code='000000',area=['概率'],industry=['方法'],concept=['沪江','了就'])
# f.distribution=[0,0,0,0,0,2]
path='%s%s'%(os.path.dirname(__file__),'/diary/')
#path='%s%s'%(os.path.dirname(os.path.abspath('__file__')),'/diary/')
#path='%s%s'%(os.getcwd(),'/diary/')
diary=diaryfile(rootpath=path,name=DIARYNAME,suffix='txt')
# testColl=Collector('SINA_COLLECTOR',diary)
# testColl.collector([a,b,c,d,e,f])
# diary.get_message('test')
# diary.update_txtdiary()
# diary.txtfile.close()
Conf=Skit.configfile('StockP_config.json')
# Testobj=stock_info(name='首钢',code='000959',industry=['普钢'])
# Testobj.setlooppara(5,Testobj.News)
# Testobj.start()
#--------------------News test-----------------------------
Newsobj=News('SINA_FORCAST_NEWS')
Newsobj.setlooppara(1,Newsobj.Newsget,None)
Newsobj.start()
# a=0
# while not a:
# a=bool(input())
# if a==1 or a==' ':
# Testobj.stoploop()
# print('stop loop The world!!!!!')
#----------------------Sort test-------------------------#
# Testobj=stock_info(name='首钢',code='000959',industry=['普钢','美少女','名给','哲学'])
# Testobj.show_distri()
# Testobj.hunt('普钢里面有美少女不过也有个明给')
# Testobj.show_distri()
#----------------------Initial test--------------#
# [Codelist,Area,Concept,Industry]=ini_classfication()
# [name,ar,co,ind]=stock_classfication('000959',Area,Concept,Industry)
# Stockobj=stock_info(name=name,code='000959',area=[ar],industry=[ind],concept=[co])
# Stockobj.show_distri()
#---------------------hunt test----------------------#
Stockobj_chain=[]
ini_class_flag = True
while ini_class_flag:
try:
[Codelist,Area,Concept,Industry]=ini_classfication()
ini_class_flag=False
except:
print("Fail to download the stock classification data, We try it again...")
ini_class_flag = True
testcoun=0
#print(Codelist)
for code in Codelist:
[name,ar,co,ind]=stock_classfication(code,Area,Concept,Industry)
Stockobj=stock_info(name=name,code=code,area=ar,industry=ind,concept=co)
if Stockobj.name!='未知':
Stockobj.setlooppara(1,Stockobj.Trum,Newsobj)
Stockobj.start()
# Stockobj.show_distri()
Stockobj_chain.append(Stockobj)
SINA_prelearn(Stockobj_chain,2000,0)
# except:
# print('stock initial error')
# for stock in Stockobj_chain:
Coll=Collector('SINA_COLLECTOR',diary,Conf)
Coll.setlooppara(60*10,Coll.collector,Stockobj_chain)
Coll.start()
if __name__ == '__main__': test()
|
AuroraLHT/Akagi
|
KW.py
|
Python
|
gpl-3.0
| 17,607
|
#!/usr/bin/env python
import pygame
from tools import singleton
@singleton
class Audio(object):
def __init__(self, initial_musics={}, initial_sounds={}):
if pygame.mixer.get_init() is None:
pygame.mixer.init()
self.__mute = False
self.__sounds = initial_sounds
self.__musics = initial_musics
def register_sound(self, sound_id, sound_object):
self.__sounds[sound_id] = sound_object
def register_music(self, music_id, music_object):
self.__musics[music_id] = music_object
def unregister_sound(self, sound_id):
if sound_id not in self.__sounds.keys():
return False
del(self.__sounds[sound_id])
def unregister_music(self, music_id):
if music_id not in self.__musics.keys():
return False
del(self.__musics[music_id])
@property
def sounds(self):
return self.__sounds.keys()
@property
def musics(self):
return self.__musics.keys()
@property
def is_muted(self):
return self.__mute
def mute(self):
if self.is_muted:
return
pygame.mixer.music.stop()
self.__mute = True
def unmute(self):
if not self.is_muted:
return
pygame.mixer.music.play(-1)
def set_mute(self, new_state=True):
if new_state:
self.mute()
else:
self.unmute()
def set_bgm_music(self, music_id):
if music_id not in self.musics:
return False
pygame.mixer.music.load(self.__musics[music_id])
if not self.is_muted:
pygame.mixer.music.play(-1)
return False
def play_sound(self, sound_id):
if self.is_muted:
return True
if sound_id not in self.sounds:
return False
self.__sounds[sound_id].play()
return True
# Create default instance
AUDIO = Audio()
|
int-0/aftris
|
beatbox.py
|
Python
|
gpl-3.0
| 1,953
|
#! /usr/bin/env python
import rospy
import roslib
roslib.load_manifest('clothing_type_classification')
import actionlib
import clothing_type_classification.msg
import std_msgs
from sensor_msgs.msg import Image
from clothing_type_classification.msg import ClothesArray, Clothes
# Specified target Centroid Points and Area of ClothesObject Here [x,y,z,area]
result_clothes = [[0.5, 0.0, 0.7, 50]]
class ClothesDetectionDummy(object):
def __init__(self, name):
self._action_name = name
self._as = actionlib.SimpleActionServer(self._action_name,
clothing_type_classification.msg.FindClothesAction,
self.execute_cb, False)
self._feedback = clothing_type_classification.msg.FindClothesFeedback()
self._result = clothing_type_classification.msg.FindClothesResult()
self._as.start()
print "Current Clothes: "
index = 0
for i in result_clothes:
print "clothes[" + str(index) + "] = " + str(i)
index += 1
print "-------------Complete Initialization------------------"
def execute_cb(self, goal):
global result_clothes
rospy.loginfo("-------Start Execution-----")
ca = ClothesArray()
ca.header.frame_id = "base_link"
ca.header.stamp = rospy.Time.now()
for i in result_clothes:
ca.array.append(self.create_clothes(i))
print "array => " + str(ca)
self._result.result = ca
print "result : " + str(type(self._result.result))
print str(self._result.result)
#self._result.result = ClothesArray()
self._as.set_succeeded(self._result)
def create_clothes(self, centroid_and_area):
tmp = Clothes()
tmp.type = "Unknown"
tmp.image = Image()
tmp.centroid.x = centroid_and_area[0]
tmp.centroid.y = centroid_and_area[1]
tmp.centroid.z = centroid_and_area[2]
tmp.area = centroid_and_area[3]
return tmp
if __name__ == '__main__':
rospy.init_node('clothes_detection_node')
ClothesDetectionDummy(rospy.get_name())
rospy.spin()
|
kandithws/clothes_detection_egbis
|
clothes_detection/test_nodes/clothes_detection_dummy_node.py
|
Python
|
gpl-3.0
| 2,197
|
#### FORMS
from flask import current_app
from flask.ext.wtf import Form
from flask.ext.security.forms import RegisterForm, LoginForm, RegisterFormMixin
from wtforms import (SelectField, StringField, SubmitField, TextAreaField,
HiddenField, FileField, RadioField, SelectField, IntegerField, ValidationError,
PasswordField)
from wtforms.fields.html5 import URLField
from wtforms.validators import Length, DataRequired, AnyOf, Regexp, NumberRange, Optional, Email, URL
from flask.ext.wtf.file import FileAllowed, FileField
from werkzeug.local import LocalProxy
from zxcvbn import password_strength
_datastore = LocalProxy(lambda: current_app.extensions['security'].datastore)
def good_enough_password(form, field):
if password_strength(field.data)['score'] < 4:
msg = 'Get a better password'
raise ValidationError(msg)
def unique_user_username(form, field):
if _datastore.find_user(username=field.data) is not None:
msg = '{0} is already associated with an account.'.format(field.data)
raise ValidationError(msg)
def unique_user_email(form, field):
if _datastore.get_user(field.data) is not None:
msg = '{} alread associated with an account'.format(field.data)
raise ValidationError(msg)
class ExtendedRegisterForm(RegisterForm):
username=StringField('Username', [DataRequired(),
Regexp(r'^\w+$', message="Only alphanumeric characters"),
Length(min=4, max=20),
unique_user_username])
class RegisterForm(Form, RegisterFormMixin):
email=StringField('Email', [DataRequired(), Email(), unique_user_email])
username=StringField('Username', [DataRequired(),
Regexp(r'^\w+$', message="Only alphanumeric characters"),
Length(min=4, max=20),
unique_user_username])
password=PasswordField('Password', [DataRequired(), good_enough_password])
class ChangePasswordForm(Form):
password=PasswordField('New password', [DataRequired(),
good_enough_password])
submit=SubmitField('Update')
class ExtendedLoginForm(LoginForm):
email=StringField('Login', [DataRequired()])
class OpenIssueForm(Form):
severity=SelectField('Severity', choices=[('Critical', 'Critical'),
('Medium', 'Medium'),
('Low', 'Low'),
('Future', 'Future')])
type=SelectField('Type', choices=[('Plumbing', 'Plumbing'),
('Electrical', 'Electrical'),
('Heating/Air Conditioning', 'Heating/Air Conditioning'),
('Cleaning', 'Cleaning'),
('Other', 'Other')])
photos=FileField('Photo', validators=[FileAllowed(['jpg', 'jpeg', 'png'], 'Images only!')])
description=TextAreaField('Description', [DataRequired()])
submit=SubmitField('Open')
class CloseIssueForm(Form):
reason=TextAreaField('Reason', [DataRequired()])
submit=SubmitField('Close')
class AddLandlordForm(Form):
location=SelectField('Location', coerce=int)
submit=SubmitField('Add')
class EndLandlordForm(Form):
end=HiddenField(default='True', validators=[AnyOf('True')])
submit=SubmitField('End')
class ConfirmTenantForm(Form):
confirm=SubmitField('Confirm', default='True')
disallow=SubmitField('Disallow', default='False')
class AddTenantForm(Form):
user=StringField('User', [DataRequired()])
apt=SelectField('Property', coerce=int)
submit=SubmitField('Invite')
class CommentForm(Form):
comment=TextAreaField('Comment', [DataRequired()])
submit=SubmitField('Add Comment')
class AddPropertyForm(Form):
unit=IntegerField('Unit:', [Optional(), NumberRange(min=1)])
address=StringField('Address:', [DataRequired()])
city=StringField('City:', [DataRequired()])
state=StringField('State:', [DataRequired()])
description=TextAreaField('Description:', [DataRequired()])
submit=SubmitField('Add Property')
class AddProviderForm(Form):
name=StringField('Name:', [DataRequired()])
area=SelectField('Area:', choices=[('Plumbing', 'Plumbing'),
('Electrical', 'Electrical'),
('Heating/Air Conditioning', 'Heating/Air Conditioning'),
('Cleaning', 'Cleaning'),
('Other', 'Other')])
email=StringField('Email:', [Email(), DataRequired()])
phone=StringField('Phone #:', [Optional(), Length(min=10)])
website=StringField('Website:', [Optional(), URL()])
submit=SubmitField('Add Provider')
class SelectProviderForm(Form):
provider=SelectField('Provider:', choices=[])
submit=SubmitField('Select Provider')
class ConnectProviderForm(Form):
action=SubmitField('Connect')
class ModifyPropertyForm(Form):
description=TextAreaField('Description:', [DataRequired()])
submit=SubmitField('Modify Property')
class AddPhoneNumber(Form):
phone=StringField('Phone #:', [DataRequired(), Length(min=10)])
country=SelectField('Country', choices=[('1', 'US'), ('02', 'UK')])
submit=SubmitField('Update number')
class ChangeNotifyForm(Form):
method=SelectField('Method', choices=[('Email', 'Email'),
('None', 'None')])
submit=SubmitField('Confirm')
class ResendNotifyForm(Form):
resend=SubmitField('Resend email', default='True')
class ImportYelpURLForm(Form):
url=URLField('Yelp URL')
submit=SubmitField('Import')
class SelectYelpProviderForm(Form):
id_=HiddenField()
submit=SubmitField('Save')
class ConfirmYelpChoiceForm(Form):
provider=HiddenField()
confirm=SubmitField('Confirm')
__all__=['AddLandlordForm', 'AddPhoneNumber', 'AddPropertyForm',
'AddProviderForm', 'AddTenantForm', 'ChangeNotifyForm',
'CloseIssueForm', 'CommentForm', 'ConfirmTenantForm',
'ConnectProviderForm', 'EndLandlordForm', 'ExtendedLoginForm',
'ExtendedRegisterForm', 'ModifyPropertyForm', 'OpenIssueForm',
'ResendNotifyForm', 'SelectProviderForm']
|
teffalump/rentport
|
common/forms.py
|
Python
|
gpl-3.0
| 6,442
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from datetime import datetime
from stacosys.model.comment import Comment
TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
def find_comment_by_id(id):
return Comment.get_by_id(id)
def notify_comment(comment: Comment):
comment.notified = datetime.now().strftime(TIME_FORMAT)
comment.save()
def publish_comment(comment: Comment):
comment.published = datetime.now().strftime(TIME_FORMAT)
comment.save()
def delete_comment(comment: Comment):
comment.delete_instance()
def find_not_notified_comments():
return Comment.select().where(Comment.notified.is_null())
def find_not_published_comments():
return Comment.select().where(Comment.published.is_null())
def find_published_comments_by_url(url):
return Comment.select(Comment).where((Comment.url == url) & (Comment.published.is_null(False))).order_by(
+Comment.published)
def count_published_comments(url):
return Comment.select(Comment).where(
(Comment.url == url) & (Comment.published.is_null(False))).count() if url else Comment.select(Comment).where(
Comment.published.is_null(False)).count()
def create_comment(url, author_name, author_site, author_gravatar, message):
created = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
comment = Comment(
url=url,
author_name=author_name,
author_site=author_site,
author_gravatar=author_gravatar,
content=message,
created=created,
notified=None,
published=None,
)
comment.save()
return comment
|
kianby/stacosys
|
stacosys/db/dao.py
|
Python
|
gpl-3.0
| 1,576
|
'''
A Mini-implementation of the Storlet middleware filter.
@author: josep sampe
'''
from swift.common.utils import get_logger
from swift.common.utils import register_swift_info
from swift.common.swob import Request
from swift.common.utils import config_true_value
from storlets.swift_middleware.handlers.base import SwiftFileManager
from swift.common.swob import wsgify
class StorletFilter(object):
def __init__(self, app, conf):
self.app = app
self.conf = conf
self.exec_server = self.conf.get('execution_server')
self.logger = get_logger(self.conf, log_route='storlet_filter')
self.filter_data = self.conf['filter_data']
self.parameters = self.filter_data['params']
self.gateway_class = self.conf['storlets_gateway_module']
self.sreq_class = self.gateway_class.request_class
self.storlet_container = conf.get('storlet_container')
self.storlet_dependency = conf.get('storlet_dependency')
self.log_container = conf.get('storlet_logcontainer')
self.client_conf_file = '/etc/swift/storlet-proxy-server.conf'
self.register_info()
def register_info(self):
register_swift_info('storlet_filter')
def _setup_gateway(self):
"""
Setup gateway instance
"""
self.gateway = self.gateway_class(self.conf, self.logger, self.scope)
def _augment_storlet_request(self, req):
"""
Add to request the storlet parameters to be used in case the request
is forwarded to the data node (GET case)
:param params: parameters to be augmented to request
"""
req.headers['X-Storlet-Language'] = self.filter_data['language']
req.headers['X-Storlet-Main'] = self.filter_data['main']
req.headers['X-Storlet-Dependency'] = self.filter_data['dependencies']
req.headers['X-Storlet-Content-Length'] = self.filter_data['size']
req.headers['X-Storlet-Generate-Log'] = False
req.headers['X-Storlet-X-Timestamp'] = 0
def _get_storlet_invocation_options(self, req):
options = dict()
filtered_key = ['X-Storlet-Range', 'X-Storlet-Generate-Log']
for key in req.headers:
prefix = 'X-Storlet-'
if key.startswith(prefix) and key not in filtered_key:
new_key = 'storlet_' + \
key[len(prefix):].lower().replace('-', '_')
options[new_key] = req.headers.get(key)
generate_log = req.headers.get('X-Storlet-Generate-Log')
options['generate_log'] = config_true_value(generate_log)
options['scope'] = self.scope
options['file_manager'] = \
SwiftFileManager(self.account, self.storlet_container,
self.storlet_dependency, self.log_container,
self.client_conf_file, self.logger)
return options
def _build_storlet_request(self, req_resp, params, data_iter):
storlet_id = self.storlet_name
new_env = dict(req_resp.environ)
req = Request.blank(new_env['PATH_INFO'], new_env)
req.headers['X-Run-Storlet'] = self.storlet_name
self._augment_storlet_request(req)
options = self._get_storlet_invocation_options(req)
if hasattr(data_iter, '_fp'):
sreq = self.sreq_class(storlet_id, params, dict(),
data_fd=data_iter._fp.fileno(),
options=options)
else:
sreq = self.sreq_class(storlet_id, params, dict(),
data_iter, options=options)
return sreq
def _call_gateway(self, req_resp, params, crystal_iter):
sreq = self._build_storlet_request(req_resp, params, crystal_iter)
sresp = self.gateway.invocation_flow(sreq)
return sresp.data_iter
@wsgify
def __call__(self, req):
if req.method in ('GET', 'PUT'):
storlet = self.filter_data.pop('name')
params = self.parameters
self.storlet_name = storlet
etag = None
try:
if self.exec_server == 'proxy':
_, self.account, _, _ = req.split_path(4, 4, rest_with_last=True)
elif self.exec_server == 'object':
_, _, self.account, _, _ = req.split_path(5, 5, rest_with_last=True)
except:
# No object Request
return req.get_response(self.app)
self.scope = self.account[5:18]
self.logger.info('Go to execute ' + storlet +
' storlet with parameters "' + str(params) + '"')
self._setup_gateway()
if 'Etag' in req.headers.keys():
etag = req.headers.pop('Etag')
if req.method == 'GET':
response = req.get_response(self.app)
data_iter = response.app_iter
response.app_iter = self._call_gateway(response, params, data_iter)
if 'Content-Length' in response.headers:
response.headers.pop('Content-Length')
if 'Transfer-Encoding' in response.headers:
response.headers.pop('Transfer-Encoding')
elif req.method == 'PUT':
reader = req.environ['wsgi.input'].read
data_iter = iter(lambda: reader(65536), '')
req.environ['wsgi.input'] = self._call_gateway(req, params, data_iter)
if 'CONTENT_LENGTH' in req.environ:
req.environ.pop('CONTENT_LENGTH')
req.headers['Transfer-Encoding'] = 'chunked'
response = req.get_response(self.app)
if etag:
response.headers['etag'] = etag
else:
response.headers['etag'] = ''
return response
return req.get_response(self.app)
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def storlet_filter(app):
return StorletFilter(app, conf)
return storlet_filter
|
Crystal-SDS/filter-middleware
|
crystal_filter_middleware/filters/storlet.py
|
Python
|
gpl-3.0
| 6,161
|
#~ # -*- coding: utf-8 -*-
#~ from os.path import join
from distutils.core import setup
from yamlweb import __version__, __progname as name
# readme is needed at register/upload time, not install time
try:
with open('readme.rst') as f:
long_description = f.read()
except IOError:
long_description = ''
setup(
name = name,
version = __version__,
description = 'Converts YAML to HTML and CSS.',
author = 'Mike Miller',
author_email = 'mixmastamyk@github.com',
url = 'https://github.com/mixmastamyk/%s' % name,
download_url = ('https://github.com/mixmastamyk/%s/archive/master.zip'
% name),
license = 'GPLv3+',
requires = ['PyYAML(>=3.10,<4.0)', ], #+ requires, # for pypi page
install_requires = ['PyYAML>=3.10,<4.0a0', ], #+ requires, # real reqs
packages = [name],
scripts = ['yaml2html', 'yaml2css'],
#~ package_data = {name: ['', '']},
#~ extras_require = {
#~ 'name': ['pkg1', 'pkg2'],
#~ },
long_description = long_description,
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3'
' or later (GPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
)
|
mixmastamyk/yamlweb
|
setup.py
|
Python
|
gpl-3.0
| 1,537
|
# -*- coding: utf-8 -*-
# Gedit Better Defaults plugin
# Copyright (C) 2017 Fabio Zendhi Nagao
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gedit
import gtk
import re
ui_str = """
<ui>
<menubar name="MenuBar">
<menu name="EditMenu" action="Edit">
<placeholder name="EditOps_4">
<menuitem action="DuplicateLine" name="Duplicate line"/>
</placeholder>
</menu>
</menubar>
</ui>
"""
class BetterDefaultsWindowHelper:
def __init__(self, plugin, window):
self._window = window
self._plugin = plugin
self.install_ui()
for view in self._window.get_views():
self.activate_view(view)
for doc in self._window.get_documents():
self.activate_doc(doc)
self._tab_added_id = self._window.connect("tab_added", self.on_tab_added)
# self._key_press_id = self._window.connect("key-press-event", self.on_key_press_event)
def deactivate(self):
# self._window.disconnect(self._key_press_id)
self._window.disconnect(self._tab_added_id)
for doc in self._window.get_documents():
self.deactivate_doc(doc)
for view in self._window.get_views():
self.deactivate_view(view)
self.uninstall_ui()
self._window = None
self._plugin = None
def update_ui(self):
pass
# # TODO: Use key press and button press events instead of update_ui
# doc = self._window.get_active_document()
# if doc:
# bounds = doc.get_selection_bounds()
# if bounds:
# content = doc.get_text(*bounds).decode("utf-8")
# highlightable = re.compile(r"[\S\{\}\[\]\(\)]+", flags=re.UNICODE)
# if highlightable.search(content):
# doc.set_search_text(content, gedit.SEARCH_CASE_SENSITIVE)
# else:
# doc.set_search_text("", gedit.SEARCH_CASE_SENSITIVE)
# else:
# doc.set_search_text("", gedit.SEARCH_CASE_SENSITIVE)
def install_ui(self):
manager = self._window.get_ui_manager()
self._action_group = gtk.ActionGroup("BetterDefaultsPluginActions")
self._action_group.add_actions([
( "DuplicateLine", None, _("Duplicate line"), "<Ctrl><Shift>d", _("Duplicate Line"), self.duplicate_line )
])
manager.insert_action_group(self._action_group, -1)
self._ui_id = manager.add_ui_from_string(ui_str)
def uninstall_ui(self):
manager = self._window.get_ui_manager()
manager.remove_ui(self._ui_id)
manager.remove_action_group(self._action_group)
manager.ensure_update()
def activate_view(self, view):
view.set_smart_home_end(True)
view.set_data("vscrolling_helper", (0.0, 0.0))
size_allocate_id = view.connect("size-allocate", self.on_size_allocate)
view.set_data("on_size_allocate_id", size_allocate_id)
va = view.get_vadjustment()
value_change_id = va.connect("value_changed", self.on_value_changed)
view.set_data("on_value_changed_id", value_change_id)
def deactivate_view(self, view):
va = view.get_vadjustment()
va.disconnect( view.get_data("on_value_changed_id") )
view.disconnect( view.get_data("on_size_allocate_id") )
view.set_smart_home_end(False)
def activate_doc(self, doc):
save_id = doc.connect("save", self.on_document_save)
doc.set_data("on_save_id", save_id)
def deactivate_doc(self, doc):
doc.disconnect( view.get_data("on_save_id") )
def on_tab_added(self, w, t):
self.activate_view(t.get_view())
self.activate_doc(t.get_document())
def on_document_save(self, doc):
piter = doc.get_end_iter()
if piter.starts_line():
while piter.backward_char():
if not piter.ends_line():
piter.forward_to_line_end()
break
doc.delete(piter, doc.get_end_iter())
def on_size_allocate(self, view, allocation):
va = view.get_vadjustment()
vsz = va.get_upper() + ( va.get_page_size() / 2 )
if va.get_upper() > va.get_page_size():
va.set_upper(vsz)
if va.get_value() < view.get_data("vscrolling_helper")[1]:
va.set_value(view.get_data("vscrolling_helper")[1])
view.set_data("vscrolling_helper", (vsz, va.get_value()))
def on_value_changed(self, adjustment):
view = self._window.get_active_view()
va = view.get_vadjustment()
if( va.get_upper() == view.get_data("vscrolling_helper")[0] ):
view.set_data( "vscrolling_helper", ( view.get_data("vscrolling_helper")[0], va.get_value() ) )
def duplicate_line(self, action):
doc = self._window.get_active_document()
doc.begin_user_action()
liter = doc.get_iter_at_mark(doc.get_insert())
liter.set_line_offset(0);
riter = doc.get_iter_at_mark(doc.get_insert())
f = riter.forward_line()
line = doc.get_slice(liter, riter, True)
if f:
doc.insert(riter, line)
else:
doc.insert(riter, '\n' + line)
doc.end_user_action()
def enclose_selected(self, l, r):
doc = self._window.get_active_document()
(a, b) = doc.get_selection_bounds()
doc.insert(b, r)
(a, b) = doc.get_selection_bounds()
doc.insert(a, l)
def on_key_press_event(self, window, event):
doc = self._window.get_active_document()
bounds = doc.get_selection_bounds()
if bounds:
c = event.keyval
if c == 123:
self.enclose_selected('{', '}')
elif c == 91:
self.enclose_selected('[', ']')
elif c == 40:
self.enclose_selected('(', ')')
elif c == 60:
self.enclose_selected('<', '>')
elif c == 65111:
self.enclose_selected('"', '"')
elif c == 65105:
self.enclose_selected("'", "'")
if c in [123, 91, 40, 60, 65111, 65105]:
return True
class BetterDefaultsPlugin(gedit.Plugin):
WINDOW_DATA_KEY = "BetterDefaultsPluginWindowData"
def __init__(self):
gedit.Plugin.__init__(self)
def activate(self, window):
helper = BetterDefaultsWindowHelper(self, window)
window.set_data(self.WINDOW_DATA_KEY, helper)
def deactivate(self, window):
window.get_data(self.WINDOW_DATA_KEY).deactivate()
window.set_data(self.WINDOW_DATA_KEY, None)
def update_ui(self, window):
window.get_data(self.WINDOW_DATA_KEY).update_ui()
|
nagaozen/my-os-customizations
|
home/nagaozen/.gnome2/gedit/plugins/better-defaults/__init__.py
|
Python
|
gpl-3.0
| 6,370
|
from ..i18n import trstring_factory
COMP_ID = 'layer'
_ = trstring_factory(COMP_ID)
|
nextgis/nextgisweb
|
nextgisweb/layer/util.py
|
Python
|
gpl-3.0
| 85
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from time import sleep
from random import randint
from argparse import ArgumentParser
""" Exemplo de retorno de carro e passagem de argumentos. """
parser = ArgumentParser(description='Exemplo de retorno de carro e passagem de argumentos')
parser.add_argument('-s',
action='store',
type=float,
dest='segundos',
default=0.0,
help='Intervalo de impressão em segundos, padrão: 0.0 segundos')
parser.add_argument('-n',
action='store',
type=int,
dest='numero',
default=100,
help='Quantidade de iterações, padrão: 100 iterações')
args = parser.parse_args()
# os parametros são capturados por atributos do objeto args, segundo informado no parâmetro dest de
# parser.add_argument()
Q = args.numero
# system('clear')
for i in range(1, Q + 1):
sleep(args.segundos)
# retorno de carro para imprimir no mesmo local a contagem de números, semelhante a uma barras de progresso.
# deve ter o carriage return no início da impressão(\r) e o parâmetro flush=True.
# a formatação irá imprimir os números com zeros a esquerda de acordo com a quantidade de caracteres de Q.
print('\r{:0>{}}'.format(i, len(str(Q))), end="", flush=True)
if i == randint(1, Q):
# sorteio aleatório
print(' -> Número sorteado')
else:
print('\nEND OF LINE.')
|
paulocsilvajr/python-code
|
src/iterar.py
|
Python
|
gpl-3.0
| 1,531
|
__problem_title__ = "Counting rectangles"
__problem_url___ = "https://projecteuler.net/problem=85"
__problem_description__ = "By counting carefully it can be seen that a rectangular grid " \
"measuring 3 by 2 contains eighteen rectangles: Although there exists " \
"no rectangular grid that contains exactly two million rectangles, " \
"find the area of the grid with the nearest solution."
import timeit
class Solution():
@staticmethod
def solution1():
pass
@staticmethod
def time_solutions():
setup = 'from __main__ import Solution'
print('Solution 1:', timeit.timeit('Solution.solution1()', setup=setup, number=1))
if __name__ == '__main__':
s = Solution()
print(s.solution1())
s.time_solutions()
|
jrichte43/ProjectEuler
|
Problem-0085/solutions.py
|
Python
|
gpl-3.0
| 839
|
import time
import csv
def report(ob):
#Create log file
log_file_report = ob.file_destination + "/" + "Parameters_Results.log"
log_report = file(log_file_report, 'a' )
#Print parameters
#Batch or single file
log_report.write("\nRun type: %s" % ob.runtype)
if ob.runtype in ["file","pictures"]:
log_report.write("\nInput file path: %s" % ob.inDEST)
else:
log_report.write("\nInput file path: %s" % ob.batchpool)
log_report.write("\nOutput dir: %s" % ob.fileD)
log_report.write("\nAdapt accAvg? %s" % ob.adapt)
if ob.adapt:
log_report.write("\nExpected hitrate: %s" % ob.frameHIT)
log_report.write("\nMinimum accAvg: %s" % ob.floorvalue)
log_report.write("\nThreshold %s" % ob.threshT)
log_report.write("\nMinimum contour area: %s" % ob.minSIZE)
log_report.write("\nBurnin: %s" % ob.burnin)
log_report.write("\nScan frames: %s" % ob.scan)
if ob.frameSET:
log_report.write("\nManual framerate: %s" % ob.frame_rate)
log_report.write("\nSet ROI: %s" % ob.ROI_include)
log_report.write("\nArea counter?: %s" % ob.set_areacounter)
log_report.write("\nOutput type?: %s\n\n" % ob.makeVID)
#Ending time
end=time.time()
#total_time()
total_min=(end-ob.start)/60
#processed frames per second
pfps=float(ob.frame_count)/(total_min*60)
##Write to log file
log_report.write("Total run time (min): %.2f \n " % total_min)
log_report.write("Average frames per second: %.2f \n " % pfps)
#End of program, report some statistic to screen and log
#log
log_report.write("\n Thank you for using MotionMeerkat! \n")
log_report.write("Candidate motion events: %.0f \n " % ob.total_count )
log_report.write("Frames skipped due to Threshold: %.0f \n " % ob.nocountr)
log_report.write("Frames skipped due to minSIZE: %.0f \n " % ob.toosmall)
log_report.write("Total frames in files: %.0f \n " % ob.frame_count)
rate=float(ob.total_count)/ob.frame_count*100
log_report.write("Hitrate: %.2f %% \n" % rate)
log_report.write("Exiting")
#print to screen
print("\n\nThank you for using MotionMeerkat! \n")
print("Total run time (min): %.2f \n " % total_min)
print("Average frames processed per second: %.2f \n " % pfps)
print("Candidate motion events: %.0f \n " % ob.total_count )
print("Frames skipped due to AccAvg: %.0f \n " % ob.nodiff)
print("Frames skipped due to Threshold: %.0f \n " % ob.nocountr)
print("Frames skipped due to minSIZE: %.0f \n " % ob.toosmall)
print("Total frames in files: %.0f \n " % ob.frame_count)
rate=float(ob.total_count)/ob.frame_count*100
print("Hitrate: %.2f %% \n" % rate)
#reset frame count if in batch loop
ob.frame_count=0
ob.total_count=0
ob.toosmall=0
ob.nocountr=0
#Write csv of time stamps and frame counts
#file name
time_stamp_report = ob.file_destination + "/" + "Frames.csv"
with open(time_stamp_report, 'wb') as f:
writer = csv.writer(f)
writer.writerows(ob.stamp)
if ob.set_areacounter:
area_report = ob.file_destination + "/" + "AreaCounter.csv"
with open(area_report, 'wb') as f:
writer = csv.writer(f)
writer.writerows(ob.areaC)
|
bw4sz/MotionMeerkat_Bisque
|
MotionMeerkat/report.py
|
Python
|
gpl-3.0
| 3,667
|
from os.path import join
from math import pi
from math import sqrt
from math import radians
import cv2
import numpy as np
from numpy import dot
from scipy.optimize import minimize
from scipy.optimize import differential_evolution
import matplotlib.pyplot as plt
from prototype.utils.euler import euler2rot
from prototype.utils.filesystem import walkdir
from prototype.models.gimbal import GimbalModel
from prototype.vision.common import focal_length
from prototype.vision.common import camera_intrinsics
from prototype.vision.camera.camera_model import PinholeCameraModel
from prototype.vision.camera.distortion_model import project_pinhole_equi
from prototype.calibration.chessboard import Chessboard
from prototype.calibration.camera import CameraIntrinsics
from prototype.viz.common import axis_equal_3dplot
from prototype.viz.plot_gimbal import PlotGimbal
from prototype.viz.plot_chessboard import PlotChessboard
class PreprocessData:
""" Preprocess calibration data
Attributes
----------
image_dir : string
Image base directory
images : np.array
Calibration images
images_ud : np.array
Undistorted calibration images
chessboard : Chessboard
Chessboard
intrinsics : CameraIntrinsics
Camera intrinsics
corners2d : np.array
Image corners
corners3d : np.array
Image point location
corners2d_ud : np.array
Undistorted image corners
corners3d_ud : np.array
Undistorted image point location
"""
def __init__(self, data_type, **kwargs):
self.data_type = data_type
if self.data_type == "IMAGES":
self.images_dir = kwargs["images_dir"]
self.images = []
self.images_ud = []
self.chessboard = kwargs["chessboard"]
self.intrinsics = kwargs["intrinsics"]
elif self.data_type == "PREPROCESSED":
self.data_path = kwargs["data_path"]
# Result
self.target_points = []
self.corners2d = []
self.corners3d = []
self.corners2d_ud = []
self.corners3d_ud = []
def ideal2pixel(self, points, K):
""" Ideal points to pixel coordinates
Parameters
----------
cam_id : int
Camera ID
points : np.array
Points in ideal coordinates
Returns
-------
pixels : np.array
Points in pixel coordinates
"""
# Get camera intrinsics
fx = K[0, 0]
fy = K[1, 1]
cx = K[0, 2]
cy = K[1, 2]
# Convert ideal points to pixel coordinates
pixels = []
nb_points = len(points)
for p in points.reshape((nb_points, 2)):
px = (p[0] * fx) + cx
py = (p[1] * fy) + cy
pixels.append([px, py])
return np.array(pixels)
def get_viz(self, i):
""" Return a visualization of the original and undistorted image with
detected chessboard corners and a 3D coordinate axis drawn on the
images. The original and undistorted image with the visualizations
will be stacked horizontally.
Parameters
----------
i : int
i-th Image frame
Returns
-------
image_viz : np.array
Image visualization
"""
# Visualize original image
image = self.images[i]
corners2d = self.corners2d[i]
K = self.intrinsics.K()
image = self.chessboard.draw_viz(image, corners2d, K)
# Visualize undistorted image
image_ud = self.images_ud[i]
corners2d_ud = self.corners2d_ud[i]
K_new = self.intrinsics.K_new
image_ud = self.chessboard.draw_viz(image_ud, corners2d_ud, K_new)
# Create visualization
image_viz = np.hstack((image, image_ud))
return image_viz
def preprocess(self):
""" Preprocess images """
image_files = walkdir(self.images_dir)
nb_images = len(image_files)
# Loop through calibration images
for i in range(nb_images):
# Load images and find chessboard corners
image = cv2.imread(image_files[i])
corners = self.chessboard.find_corners(image)
self.images.append(image)
# Calculate camera to chessboard transform
K = self.intrinsics.K()
P_c = self.chessboard.calc_corner_positions(corners, K)
nb_corners = corners.shape[0]
self.corners2d.append(corners.reshape((nb_corners, 2)))
self.corners3d.append(P_c)
# Undistort corners in camera 0
corners_ud = self.intrinsics.undistort_points(corners)
image_ud, K_new = self.intrinsics.undistort_image(image)
pixels_ud = self.ideal2pixel(corners_ud, K_new)
self.images_ud.append(image_ud)
# Calculate camera to chessboard transform
K_new = self.intrinsics.K_new
P_c = self.chessboard.calc_corner_positions(pixels_ud, K_new)
self.corners2d_ud.append(pixels_ud)
self.corners3d_ud.append(P_c)
self.corners2d = np.array(self.corners2d)
self.corners3d = np.array(self.corners3d)
self.corners2d_ud = np.array(self.corners2d_ud)
self.corners3d_ud = np.array(self.corners3d_ud)
def parse_gridpoints_line(self, line, data):
# Parse line
elements = line.strip().split(" ")
elements = [float(x) for x in elements]
x, y, z = elements[0:3]
u, v = elements[3:5]
# Form point 3d and 2d
point3d = [x, y, z]
point2d = [u, v]
# Add to storage
data["target_points"].append(point3d)
data["corners3d"].append(point3d)
data["corners2d"].append(point2d)
def parse_transform(self, line, data):
# Parse transform
elements = line.strip().split(" ")
elements = [float(x) for x in elements]
data["T_c_t"] += elements
def parse_gimbal_angles(self, line, data):
# Parse gimbal angles
elements = line.strip().split(" ")
data["gimbal_angles"] += [float(x) for x in elements]
def transform_corners(self, data):
data["T_c_t"] = np.array(data["T_c_t"]).reshape((4, 4))
data["corners3d"] = np.array(data["corners3d"])
data["corners2d"] = np.array(data["corners2d"])
# Transform the 3d points
# -- Convert 3d points to homogeneous coordinates
nb_corners = data["corners3d"].shape[0]
ones = np.ones((nb_corners, 1))
corners_homo = np.block([data["corners3d"], ones])
corners_homo = corners_homo.T
# -- Transform 3d points
X = np.dot(data["T_c_t"], corners_homo)
X = X.T
data["corners3d"] = X[:, 0:3]
def load_preprocessed_file(self, filepath):
# Setup
datafile = open(filepath, "r")
mode = None
# Data
data = {
"target_points": [],
"corners3d": [],
"corners2d": [],
"gimbal_angles": [],
"T_c_t": [] # Transform, target to camera
}
# Parse file
for line in datafile:
line = line.strip()
if line == "gridpoints:":
mode = "gridpoints"
elif line == "tmatrix:":
mode = "tmatrix"
elif line == "gimbalangles:":
mode = "gimbalangles"
elif line == "end:":
mode = None
else:
if mode == "gridpoints":
self.parse_gridpoints_line(line, data)
elif mode == "tmatrix":
self.parse_transform(line, data)
elif mode == "gimbalangles":
self.parse_gimbal_angles(line, data)
# Finish up
self.transform_corners(data)
data["target_points"] = np.array(data["target_points"])
data["corners2d_ud"] = data["corners2d"]
data["corners3d_ud"] = data["corners3d"]
datafile.close()
return data
def load_preprocessed(self):
files = walkdir(self.data_path)
files.sort(key=lambda f: int(os.path.splitext(os.path.basename(f))[0]))
if len(files) == 0:
err_msg = "No data files found in [%s]!" % (self.data_path)
raise RuntimeError(err_msg)
for f in files:
data = self.load_preprocessed_file(f)
self.target_points.append(data["target_points"])
self.corners2d.append(data["corners2d"])
self.corners3d.append(data["corners3d"])
self.target_points = np.array(self.target_points)
self.corners2d = np.array(self.corners2d)
self.corners3d = np.array(self.corners3d)
self.corners2d_ud = self.corners2d
self.corners3d_ud = self.corners3d
def load(self):
if self.data_type == "IMAGES":
self.preprocess()
elif self.data_type == "PREPROCESSED":
self.load_preprocessed()
class DataLoader:
""" Gimbal extrinsics calibration data loader
Attributes
----------
data_path : str
Data path
cam0_dir : str
Camera 0 image dir
cam1_dir : str
Camera 1 image dir
imu_filename : str
IMU data path
chessboard : Chessboard
Chessboard
imu_data : np.array
IMU data
"""
def __init__(self, **kwargs):
self.data_path = kwargs.get("data_path")
self.preprocessed = kwargs.get("preprocessed", False)
self.inspect_data = kwargs.get("inspect_data", False)
self.joint_file = kwargs["joint_file"]
if self.preprocessed is False:
self.image_dirs = kwargs["image_dirs"]
self.intrinsic_files = kwargs["intrinsic_files"]
self.chessboard = Chessboard(**kwargs)
else:
self.data_dirs = kwargs["data_dirs"]
self.intrinsic_files = kwargs["intrinsic_files"]
def load_joint_data(self):
""" Load joint data
Parameters
----------
joint_fpath : str
Joint data file path
Returns
-------
joint_data : np.array
IMU data
"""
joint_file = open(join(self.data_path, self.joint_file), "r")
joint_data = np.loadtxt(joint_file, delimiter=",")
joint_file.close()
return joint_data
def draw_corners(self, image, corners, color=(0, 255, 0)):
""" Draw corners
Parameters
----------
image : np.array
Image
corners : np.array
Corners
"""
image = np.copy(image)
for i in range(len(corners)):
corner = tuple(corners[i][0].astype(int).tolist())
image = cv2.circle(image, corner, 2, color, -1)
return image
def check_nb_images(self, data):
""" Check number of images in data """
nb_cameras = len(self.image_dirs)
nb_images = len(data[0].images)
for i in range(1, nb_cameras):
if len(data[i].images) != nb_images:
err = "Number of images mismatch! [{0}] - [{1}]".format(
self.image_dirs[0],
self.image_dirs[i]
)
raise RuntimeError(err)
return True
def preprocess_images(self):
""" Preprocess images """
# Load camera data
nb_cameras = len(self.image_dirs)
data = []
for i in range(nb_cameras):
image_dir = join(self.data_path, self.image_dirs[i])
intrinsics_file = join(self.data_path, self.intrinsic_files[i])
intrinsics = CameraIntrinsics(intrinsics_file)
data_entry = PreprocessData("IMAGES",
images_dir=image_dir,
chessboard=self.chessboard,
intrinsics=intrinsics)
data_entry.load()
data.append(data_entry)
# Inspect data
self.check_nb_images(data)
if self.inspect_data is False:
return data
nb_images = len(data[0].images)
for i in range(nb_images):
viz = data[0].get_viz(i)
for n in range(1, nb_cameras):
viz = np.vstack((viz, data[n].get_viz(i)))
cv2.imshow("Image", viz)
cv2.waitKey(0)
return data
def filter_common_observations(self, i, data):
cam0_idx = 0
cam1_idx = 0
P_s = []
P_d = []
Q_s = []
Q_d = []
# Find common target points and store the
# respective points in 3d and 2d
for pt_a in data[0].target_points[i]:
for pt_b in data[1].target_points[i]:
if np.array_equal(pt_a, pt_b):
# Corners 3d observed in both the static and dynamic cam
P_s.append(data[0].corners3d_ud[i][cam0_idx])
P_d.append(data[1].corners3d_ud[i][cam1_idx])
# Corners 2d observed in both the static and dynamic cam
Q_s.append(data[0].corners2d_ud[i][cam0_idx])
Q_d.append(data[1].corners2d_ud[i][cam1_idx])
break
else:
cam1_idx += 1
cam0_idx += 1
cam1_idx = 0
P_s = np.array(P_s)
P_d = np.array(P_d)
Q_s = np.array(Q_s)
Q_d = np.array(Q_d)
return [P_s, P_d, Q_s, Q_d]
def load_preprocessed(self):
# Load data from each camera
data = []
for i in range(len(self.data_dirs)):
intrinsics_path = join(self.data_path, self.intrinsic_files[i])
intrinsics = CameraIntrinsics(intrinsics_path)
data_path = join(self.data_path, self.data_dirs[i])
data_entry = PreprocessData("PREPROCESSED",
data_path=data_path,
intrinsics=intrinsics)
data_entry.load()
data.append(data_entry)
# Find common measurements between cameras
Z = []
nb_measurements = len(data[0].target_points)
# -- Iterate through measurement sets
for i in range(nb_measurements):
Z_i = self.filter_common_observations(i, data)
Z.append(Z_i)
# Camera intrinsics
intrinsics_path = join(self.data_path, self.intrinsic_files[0])
# K_s = CameraIntrinsics(intrinsics_path).K()
C_s_intrinsics = CameraIntrinsics(intrinsics_path)
K_s = C_s_intrinsics.calc_Knew()
D_s = C_s_intrinsics.distortion_coeffs
intrinsics_path = join(self.data_path, self.intrinsic_files[1])
# K_d = CameraIntrinsics(intrinsics_path).K()
C_d_intrinsics = CameraIntrinsics(intrinsics_path)
K_d = C_d_intrinsics.calc_Knew()
D_d = C_d_intrinsics.distortion_coeffs
return Z, K_s, K_d, D_s, D_d
def load(self):
""" Load calibration data """
# Load joint data
joint_data = self.load_joint_data()
# Load data
if self.preprocessed is False:
data = self.preprocess_images()
K = len(data[0].corners2d_ud)
# Setup measurement sets
Z = []
for i in range(K):
# Corners 3d observed in both the static and dynamic cam
P_s = data[0].corners3d_ud[i]
P_d = data[1].corners3d_ud[i]
# Corners 2d observed in both the static and dynamic cam
Q_s = data[0].corners2d_ud[i]
Q_d = data[1].corners2d_ud[i]
Z_i = [P_s, P_d, Q_s, Q_d]
Z.append(Z_i)
K_s = data[0].intrinsics.K_new
K_d = data[1].intrinsics.K_new
D_s = data[0].intrinsics.distortion_coeffs
D_d = data[1].intrinsics.distortion_coeffs
return Z, K_s, K_d, D_s, D_d, joint_data
else:
Z, K_s, K_d, D_s, D_d = self.load_preprocessed()
return Z, K_s, K_d, D_s, D_d, joint_data
class GimbalCalibrator:
""" Gimbal Extrinsics Calibrator
Attributes
----------
gimbal_model : GimbalModel
Gimbal model
data : GECDataLoader
Calibration data
"""
def __init__(self, **kwargs):
self.gimbal_model = kwargs.get("gimbal_model", GimbalModel())
if kwargs.get("sim_mode", False):
# Load sim data
self.Z = kwargs["Z"]
self.K_s = kwargs["K_s"]
self.K_d = kwargs["K_d"]
self.D_s = kwargs["D_s"]
self.D_d = kwargs["D_d"]
self.joint_data = kwargs["joint_data"]
self.K = len(self.Z)
else:
# Load data
self.loader = DataLoader(**kwargs)
# -- Measurement set and joint data
data = self.loader.load()
self.Z, self.K_s, self.K_d, self.D_s, self.D_d, self.joint_data = data
# -- Number of measurement set
self.K = len(self.Z)
def setup_problem(self):
""" Setup the calibration optimization problem
Returns
-------
x : np.array
Vector of optimization parameters to be optimized
"""
print("Setting up optimization problem ...")
# Parameters to be optimized
# x_size = 6 + 5 + 3 + self.K * 2
x_size = 6 + 5 + 3
x = np.zeros(x_size)
# -- tau_s
x[0] = self.gimbal_model.tau_s[0]
x[1] = self.gimbal_model.tau_s[1]
x[2] = self.gimbal_model.tau_s[2]
x[3] = self.gimbal_model.tau_s[3]
x[4] = self.gimbal_model.tau_s[4]
x[5] = self.gimbal_model.tau_s[5]
# -- tau_d
x[6] = self.gimbal_model.tau_d[0]
x[7] = self.gimbal_model.tau_d[1]
x[8] = self.gimbal_model.tau_d[2]
x[9] = self.gimbal_model.tau_d[4]
x[10] = self.gimbal_model.tau_d[5]
# -- alpha, a, d
x[11] = self.gimbal_model.link[1]
x[12] = self.gimbal_model.link[2]
x[13] = self.gimbal_model.link[3]
# -- Joint angles
# x[14:] = self.joint_data[:, 0:2].ravel()
return x, self.Z, self.K_s, self.K_d, self.D_s, self.D_d
def reprojection_error(self, x, *args):
"""Reprojection Error
Parameters
----------
x : np.array
Parameters to be optimized
args : tuple of (Z, K_s, K_d)
Z: list of measurement sets
K_s: np.array static camera intrinsics matrix K
K_d: np.array dynamic camera intrinsics matrix K
Returns
-------
residual : np.array
Reprojection error
"""
# Map the optimization params back into the transforms
# -- tau_s
tau_s = x[0:6]
# -- tau_d
tau_d_tx = x[6]
tau_d_ty = x[7]
tau_d_tz = x[8]
tau_d_pitch = x[9]
tau_d_yaw = x[10]
# -- alpha, a, d
alpha, a, d = x[11:14]
# -- Joint angles
# roll_angles = []
# pitch_angles = []
# for i in range(self.K):
# roll_angles.append(x[14 + (2 * i)])
# pitch_angles.append(x[14 + (2 * i) + 1])
roll_angles = self.joint_data[:, 0]
pitch_angles = self.joint_data[:, 1]
# Set gimbal model
self.gimbal_model.tau_s = tau_s
self.gimbal_model.tau_d = [tau_d_tx, tau_d_ty, tau_d_tz,
None, tau_d_pitch, tau_d_yaw]
self.gimbal_model.link = [None, alpha, a, d]
# Loop through all measurement sets
Z, K_s, K_d, D_s, D_d = args
residuals = []
for k in range(int(self.K)):
# Get the k-th measurements
P_s, P_d, Q_s, Q_d = Z[k]
# Get joint angles
roll = roll_angles[k]
pitch = pitch_angles[k]
self.gimbal_model.set_attitude([roll, pitch])
# Get static to dynamic camera transform
T_sd = self.gimbal_model.calc_transforms()[2]
# Calculate reprojection error in the static camera
nb_P_d_corners = len(P_d)
err_s = np.zeros(nb_P_d_corners * 2)
for i in range(nb_P_d_corners):
# -- Transform 3D world point from dynamic to static camera
P_d_homo = np.append(P_d[i], 1.0)
P_s_cal = dot(T_sd, P_d_homo)[0:3]
# -- Project 3D world point to image plane
Q_s_cal = project_pinhole_equi(P_s_cal, K_s, D_s)
# -- Calculate reprojection error
err_s[(i * 2):(i * 2 + 2)] = Q_s[i] - Q_s_cal
# Calculate reprojection error in the dynamic camera
nb_P_s_corners = len(P_s)
err_d = np.zeros(nb_P_s_corners * 2)
for i in range(nb_P_s_corners):
# -- Transform 3D world point from dynamic to static camera
P_s_homo = np.append(P_s[i], 1.0)
P_d_cal = dot(np.linalg.inv(T_sd), P_s_homo)[0:3]
# -- Project 3D world point to image plane
Q_d_cal = project_pinhole_equi(P_d_cal, K_d, D_d)
# -- Calculate reprojection error
err_d[(i * 2):(i * 2 + 2)] = Q_d[i] - Q_d_cal
# # Stack residuals
residuals += err_s.tolist() + err_d.tolist()
# Calculate Sum of Squared Differences (SSD)
cost = np.sum(np.array(residuals)**2)
return cost
def optimize(self):
""" Optimize Gimbal Extrinsics """
# Setup
x, Z, K_s, K_d, D_s, D_d = self.setup_problem()
args = (Z, K_s, K_d, D_s, D_d)
# Optimize
print("Optimizing!")
print("This can take a while...")
# result = least_squares(fun=self.reprojection_error,
# x0=x,
# args=args,
# method="Nelder-Mead",
# options={'disp': True})
tau_s_tx = self.gimbal_model.tau_s[0]
tau_s_ty = self.gimbal_model.tau_s[1]
tau_s_tz = self.gimbal_model.tau_s[2]
tau_s_roll = self.gimbal_model.tau_s[3]
tau_s_pitch = self.gimbal_model.tau_s[4]
tau_s_yaw = self.gimbal_model.tau_s[5]
# -- tau_d
tau_d_tx = self.gimbal_model.tau_d[0]
tau_d_ty = self.gimbal_model.tau_d[1]
tau_d_tz = self.gimbal_model.tau_d[2]
tau_d_pitch = self.gimbal_model.tau_d[4]
tau_d_yaw = self.gimbal_model.tau_d[5]
# -- alpha, a, d
alpha = self.gimbal_model.link[1]
a = self.gimbal_model.link[2]
d = self.gimbal_model.link[3]
bounds = [
(tau_s_tx - 0.2, tau_s_tx + 0.2),
(tau_s_ty - 0.2, tau_s_ty + 0.2),
(tau_s_tz - 0.2, tau_s_tz + 0.2),
(tau_s_roll - 0.2, tau_s_roll + 0.2),
(tau_s_pitch - 0.2, tau_s_pitch + 0.2),
(tau_s_yaw - 0.2, tau_s_yaw + 0.2),
(tau_d_tx - 0.2, tau_d_tx + 0.2),
(tau_d_ty - 0.2, tau_d_ty + 0.2),
(tau_d_tz - 0.2, tau_d_tz + 0.2),
(tau_d_pitch - 0.2, tau_d_pitch + 0.2),
(tau_d_yaw - 0.2, tau_d_yaw + 0.2),
(alpha - 0.1, alpha + 0.1),
(a - 0.1, a + 0.1),
(d - 0.1, d + 0.1)
]
result = differential_evolution(func=self.reprojection_error,
bounds=bounds,
maxiter=1000,
args=args,
disp=True)
# Parse results
tau_s = result.x[0:6]
tau_d_tx = result.x[6]
tau_d_ty = result.x[7]
tau_d_tz = result.x[8]
tau_d_roll = 0.0
tau_d_pitch = result.x[9]
tau_d_yaw = result.x[10]
tau_d = [tau_d_tx, tau_d_ty, tau_d_tz,
tau_d_roll, tau_d_pitch, tau_d_yaw]
alpha, a, d = result.x[11:14]
self.gimbal_model.tau_s = tau_s
self.gimbal_model.tau_d = tau_d
self.gimbal_model.link = [0.0, alpha, a, d]
print("Results:")
print("---------------------------------")
print("tau_s: ", self.gimbal_model.tau_s)
print("tau_d: ", self.gimbal_model.tau_d)
print("w1: ", self.gimbal_model.link)
# Plot gimbal
self.gimbal_model.set_attitude([0.0, 0.0])
plot_gimbal = PlotGimbal(gimbal=self.gimbal_model)
plot_gimbal.plot()
plt.show()
class GimbalDataGenerator:
def __init__(self, intrinsics_file):
self.intrinsics = CameraIntrinsics(intrinsics_file)
# Chessboard
self.chessboard = Chessboard(t_G=np.array([0.3, 0.1, 0.1]),
nb_rows=11,
nb_cols=11,
square_size=0.02)
self.plot_chessboard = PlotChessboard(chessboard=self.chessboard)
# Gimbal
self.gimbal = GimbalModel()
self.gimbal.set_attitude([0.0, 0.0])
self.plot_gimbal = PlotGimbal(gimbal=self.gimbal)
# Cameras
self.static_camera = self.setup_static_camera()
self.gimbal_camera = self.setup_gimbal_camera()
def setup_static_camera(self):
image_width = 640
image_height = 480
fov = 120
fx, fy = focal_length(image_width, image_height, fov)
cx, cy = (image_width / 2.0, image_height / 2.0)
K = camera_intrinsics(fx, fy, cx, cy)
cam_model = PinholeCameraModel(image_width, image_height, K)
return cam_model
def setup_gimbal_camera(self):
image_width = 640
image_height = 480
fov = 120
fx, fy = focal_length(image_width, image_height, fov)
cx, cy = (image_width / 2.0, image_height / 2.0)
K = camera_intrinsics(fx, fy, cx, cy)
cam_model = PinholeCameraModel(image_width, image_height, K)
return cam_model
def calc_static_camera_view(self):
# Transforming chessboard grid points in global to camera frame
R = np.eye(3)
t = np.zeros(3)
R_CG = euler2rot([-pi / 2.0, 0.0, -pi / 2.0], 123)
X = dot(R_CG, self.chessboard.grid_points3d.T)
x = self.static_camera.project(X, R, t).T[:, 0:2]
return x
def calc_gimbal_camera_view(self):
# Create transform from global to static camera frame
t_g_sg = np.array([0.0, 0.0, 0.0])
R_sg = euler2rot([-pi / 2.0, 0.0, -pi / 2.0], 321)
T_gs = np.array([[R_sg[0, 0], R_sg[0, 1], R_sg[0, 2], t_g_sg[0]],
[R_sg[1, 0], R_sg[1, 1], R_sg[1, 2], t_g_sg[1]],
[R_sg[2, 0], R_sg[2, 1], R_sg[2, 2], t_g_sg[2]],
[0.0, 0.0, 0.0, 1.0]])
# Calculate transform from global to dynamic camera frame
links = self.gimbal.calc_transforms()
T_sd = links[-1]
T_gd = dot(T_gs, T_sd)
# Project chessboard grid points in global to dynamic camera frame
# -- Convert 3D points to homogeneous coordinates
X = self.chessboard.grid_points3d.T
X = np.block([[X], [np.ones(X.shape[1])]])
# -- Project to dynamica camera image frame
X = dot(np.linalg.inv(T_gd), X)[0:3, :]
x = dot(self.gimbal_camera.K, X)
# -- Normalize points
x[0, :] = x[0, :] / x[2, :]
x[1, :] = x[1, :] / x[2, :]
x = x[0:2, :].T
return x, X.T
def plot_static_camera_view(self, ax):
x = self.calc_static_camera_view()
ax.scatter(x[:, 0], x[:, 1], marker="o", color="red")
def plot_gimbal_camera_view(self, ax):
x, X = self.calc_gimbal_camera_view()
ax.scatter(x[:, 0], x[:, 1], marker="o", color="red")
def plot_camera_views(self):
# Plot static camera view
ax = plt.subplot(211)
ax.axis('square')
self.plot_static_camera_view(ax)
ax.set_title("Static Camera View", y=1.08)
ax.set_xlim((0, self.static_camera.image_width))
ax.set_ylim((0, self.static_camera.image_height))
ax.invert_yaxis()
ax.xaxis.tick_top()
# Plot gimbal camera view
ax = plt.subplot(212)
ax.axis('square')
self.plot_gimbal_camera_view(ax)
ax.set_title("Gimbal Camera View", y=1.08)
ax.set_xlim((0, self.gimbal_camera.image_width))
ax.set_ylim((0, self.gimbal_camera.image_height))
ax.invert_yaxis()
ax.xaxis.tick_top()
# Overall plot settings
plt.tight_layout()
def plot(self):
# Plot camera views
self.plot_camera_views()
# Plot gimbal and chessboard
fig = plt.figure()
ax = fig.gca(projection='3d')
self.plot_gimbal.plot(ax)
self.plot_chessboard.plot(ax)
axis_equal_3dplot(ax)
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
plt.show()
def calc_roll_pitch_combo(self, nb_images):
nb_combo = int(sqrt(nb_images))
roll_lim = [radians(-10), radians(10)]
pitch_lim = [radians(-10), radians(10)]
roll_vals = np.linspace(roll_lim[0], roll_lim[1], num=nb_combo)
pitch_vals = np.linspace(pitch_lim[0], pitch_lim[1], num=nb_combo)
return roll_vals, pitch_vals
def generate(self):
# Setup
nb_images = 4
R_CG = euler2rot([-pi / 2.0, 0.0, -pi / 2.0], 123)
# Generate static camera data
self.intrinsics.K_new = self.intrinsics.K()
static_cam_data = PreprocessData("IMAGES",
images_dir=None,
intrinsics=self.intrinsics,
chessboard=self.chessboard)
x = self.calc_static_camera_view()
X = dot(R_CG, self.chessboard.grid_points3d.T).T
for i in range(nb_images):
static_cam_data.corners2d_ud.append(x)
static_cam_data.corners3d_ud.append(X)
static_cam_data.corners2d_ud = np.array(static_cam_data.corners2d_ud)
static_cam_data.corners3d_ud = np.array(static_cam_data.corners3d_ud)
# Generate gimbal data
roll_vals, pitch_vals = self.calc_roll_pitch_combo(nb_images)
gimbal_cam_data = PreprocessData("IMAGES",
images_dir=None,
intrinsics=self.intrinsics,
chessboard=self.chessboard)
joint_data = []
for roll in roll_vals:
for pitch in pitch_vals:
self.gimbal.set_attitude([roll, pitch])
x, X = self.calc_gimbal_camera_view()
gimbal_cam_data.corners2d_ud.append(x)
gimbal_cam_data.corners3d_ud.append(X)
joint_data.append([roll, pitch])
gimbal_cam_data.corners2d_ud = np.array(gimbal_cam_data.corners2d_ud)
gimbal_cam_data.corners3d_ud = np.array(gimbal_cam_data.corners3d_ud)
joint_data = np.array(joint_data)
# Setup measurement sets
Z = []
for i in range(nb_images):
# Corners 3d observed in both the static and dynamic cam
P_s = static_cam_data.corners3d_ud[i]
P_d = gimbal_cam_data.corners3d_ud[i]
# Corners 2d observed in both the static and dynamic cam
Q_s = static_cam_data.corners2d_ud[i]
Q_d = gimbal_cam_data.corners2d_ud[i]
Z_i = [P_s, P_d, Q_s, Q_d]
Z.append(Z_i)
K_s = static_cam_data.intrinsics.K_new
K_d = gimbal_cam_data.intrinsics.K_new
# Distortion - assume no distortion
D_s = np.zeros((4,))
D_d = np.zeros((4,))
return Z, K_s, K_d, D_s, D_d, joint_data
|
chutsu/robotics
|
prototype/calibration/gimbal.py
|
Python
|
gpl-3.0
| 32,140
|
from sys import stdin as sin
list_index=[]
list=dict()
def fn(n):
f=1
#check if a value less that that has already been calculated
for i in range(1,n+1):
f*=i
return f
t=int(input())
for i in range(t):
n=int(sin.readline().rstrip())
print(fn(n))
|
parthapritam2717/CodeChef
|
FCTRL2.py
|
Python
|
gpl-3.0
| 283
|
from typing import List, Optional, Sequence, Union
from decksite.data import achievements, deck, preaggregation, query
from decksite.data.models.person import Person
from decksite.database import db
from shared import dtutil, guarantee, logger
from shared.container import Container
from shared.database import sqlescape
from shared.decorators import retry_after_calling
from shared.pd_exception import AlreadyExistsException, DoesNotExistException
def load_person_by_id(person_id: int, season_id: Optional[int] = None) -> Person:
return load_person(f'p.id = {person_id}', season_id=season_id)
def load_person_by_mtgo_username(username: str, season_id: Optional[int] = None) -> Person:
return load_person('p.mtgo_username = {username}'.format(username=sqlescape(username, force_string=True)), season_id=season_id)
def load_person_by_discord_id(discord_id: int, season_id: Optional[int] = None) -> Person:
return load_person(f'p.discord_id = {discord_id}', season_id=season_id)
# pylint: disable=invalid-name
def load_person_by_discord_id_or_username(person: str, season_id: int = 0) -> Person:
# It would probably be better if this method did not exist but for now it's required by the API.
# The problem is that Magic Online usernames can be integers so we cannot be completely unambiguous here.
# We can make a really good guess, though.
# See https://discordapp.com/developers/docs/reference#snowflakes
# Unix timestamp (ms) for 2015-01-01T00:00:00.0000 = 1420070400000
# Unix timestamp (ms) for 2015-01-01T00:00:00.0001 = 1420070400001
# Unix timestamp (ms) for 2015-02-01T00:00:00.0000 = 1422748800000
# Unix timestamp (ms) for 2100-01-01T00:00:00.0000 = 4102444800000
# Discord timestamp (ms) for 2015-01-01T00:00:00.0000 = 0
# Discord timestamp (ms) for 2015-01-01T00:00:00.0001 = 1
# Discord timestamp (ms) for 2015-02-01T00:00:00.0000 = 2678400000
# Min Discord snowflake for 2015-01-01T00:00:00.0000 = 0 ( 00000000000000000000000 in binary)
# Min Discord snowflake for 2015-01-01T00:00:00.0001 = 4194304 ( 10000000000000000000000 in binary)
# Min Discord snowflake for 2015-02-01T00:00:00.0000 = 11234023833600000 ( 100111111010010100100100000000000000000000000000000000 in binary)
# Min Discord snowflake for 2100-01-01T00:00:00.0000 = 5625346837708800000 (100111000010001001111110010010100000000000000000000000000000000 in binary)
# Discord snowflakes created between 2015-01-01T00:00:00.001Z and 2100-01-01T00:00:00.000Z will therefore fall in the range 2097152-5625346837708800000 if created before the year 2100.
# We use 2015-02-01T00:00:00.000Z (11234023833600000) as the start of the range instead because it greatly reduces the range and we have seen no evidence of Discord snowflakes from before December 28th 2015.
# This function will fail or (very unlikely) return incorrect results if we ever have a player with a Magic Online username that falls numerically between MIN_DISCORD_ID and MAX_DISCORD_ID.
MIN_DISCORD_ID = 11234023833600000
MAX_DISCORD_ID = 5625346837708800000
if person.isdigit() and int(person) >= MIN_DISCORD_ID and int(person) <= MAX_DISCORD_ID:
return load_person_by_discord_id(int(person), season_id=season_id)
return load_person_by_mtgo_username(person, season_id=season_id)
# pylint: disable=invalid-name
def maybe_load_person_by_discord_id(discord_id: Optional[int]) -> Optional[Person]:
if discord_id is None:
return None
return guarantee.at_most_one(load_people(f'p.discord_id = {discord_id}'))
# pylint: disable=invalid-name
def maybe_load_person_by_tappedout_name(username: str) -> Optional[Person]:
return guarantee.at_most_one(load_people('p.tappedout_username = {username}'.format(username=sqlescape(username))))
# pylint: disable=invalid-name
def maybe_load_person_by_mtggoldfish_name(username: str) -> Optional[Person]:
return guarantee.at_most_one(load_people('p.mtggoldfish_username = {username}'.format(username=sqlescape(username))))
def load_person(where: str, season_id: Optional[int] = None) -> Person:
people = load_people(where, season_id=season_id)
if len(people) == 0: # We didn't find an entry for that person with decks, what about without?
person = load_person_statless(where, season_id)
else:
person = guarantee.exactly_one(people)
set_achievements([person], season_id)
return person
# Sometimes (person detail page) we want to load what we know about a person even though they had no decks in the specified season.
def load_person_statless(where: str = 'TRUE', season_id: Optional[int] = None) -> Person:
person_query = query.person_query()
sql = f"""
SELECT
p.id,
{person_query} AS name,
p.mtgo_username,
p.tappedout_username,
p.mtggoldfish_username,
p.discord_id,
p.elo,
p.locale
FROM
person AS p
WHERE
{where}
"""
people = [Person(r) for r in db().select(sql)]
for p in people:
p.season_id = season_id
return guarantee.exactly_one(people)
def load_people_count(where: str = 'TRUE', season_id: Optional[Union[str, int]] = None) -> int:
season_join = query.season_join() if season_id else ''
season_query = query.season_query(season_id, 'season.id')
sql = f"""
SELECT
COUNT(DISTINCT p.id)
FROM
person AS p
LEFT JOIN
deck AS d ON d.person_id = p.id
LEFT JOIN
deck_cache AS dc ON d.id = dc.deck_id
{season_join}
WHERE
({where}) AND ({season_query})
"""
return db().value(sql) or 0
# Note: This only loads people who have decks in the specified season.
def load_people(where: str = 'TRUE',
order_by: str = 'num_decks DESC, p.name',
limit: str = '',
season_id: Optional[Union[str, int]] = None) -> Sequence[Person]:
person_query = query.person_query()
season_join = query.season_join() if season_id else ''
season_query = query.season_query(season_id, 'season.id')
sql = f"""
SELECT
p.id,
{person_query} AS name,
p.mtgo_username,
p.tappedout_username,
p.mtggoldfish_username,
p.discord_id,
p.elo,
p.locale,
SUM(1) AS num_decks,
SUM(dc.wins) AS wins,
SUM(dc.losses) AS losses,
SUM(dc.draws) AS draws,
SUM(wins - losses) AS record,
SUM(CASE WHEN dc.wins >= 5 AND dc.losses = 0 AND d.source_id IN (SELECT id FROM source WHERE name = 'League') THEN 1 ELSE 0 END) AS perfect_runs,
SUM(CASE WHEN d.finish = 1 THEN 1 ELSE 0 END) AS tournament_wins,
SUM(CASE WHEN d.finish <= 8 THEN 1 ELSE 0 END) AS tournament_top8s,
IFNULL(ROUND((SUM(dc.wins) / NULLIF(SUM(dc.wins + dc.losses), 0)) * 100, 1), '') AS win_percent,
SUM(DISTINCT CASE WHEN d.competition_id IS NOT NULL THEN 1 ELSE 0 END) AS num_competitions
FROM
person AS p
LEFT JOIN
deck AS d ON d.person_id = p.id
LEFT JOIN
deck_cache AS dc ON d.id = dc.deck_id
{season_join}
WHERE
({where}) AND ({season_query})
GROUP BY
p.id
ORDER BY
{order_by}
{limit}
"""
people = [Person(r) for r in db().select(sql)]
for p in people:
p.season_id = season_id
return people
def seasons_active(person_id: int) -> List[int]:
sql = f"""
SELECT
DISTINCT season.id
FROM
deck AS d
{query.season_join()}
WHERE
d.person_id = {person_id}
ORDER BY
season.id
"""
return db().values(sql)
def preaggregate() -> None:
achievements.preaggregate_achievements()
preaggregate_head_to_head()
def preaggregate_head_to_head() -> None:
table = '_head_to_head_stats'
sql = """
CREATE TABLE IF NOT EXISTS _new{table} (
person_id INT NOT NULL,
opponent_id INT NOT NULL,
season_id INT NOT NULL,
num_matches INT NOT NULL,
wins INT NOT NULL,
losses INT NOT NULL,
draws INT NOT NULL,
PRIMARY KEY (season_id, person_id, opponent_id),
FOREIGN KEY (season_id) REFERENCES season (id) ON UPDATE CASCADE ON DELETE CASCADE,
FOREIGN KEY (person_id) REFERENCES person (id) ON UPDATE CASCADE ON DELETE CASCADE,
FOREIGN KEY (opponent_id) REFERENCES person (id) ON UPDATE CASCADE ON DELETE CASCADE
) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci AS
SELECT
p.id AS person_id,
opp.id AS opponent_id,
season.id AS season_id,
COUNT(p.id) AS num_matches,
SUM(CASE WHEN dm.games > odm.games THEN 1 ELSE 0 END) AS wins,
SUM(CASE WHEN dm.games < odm.games THEN 1 ELSE 0 END) AS losses,
SUM(CASE WHEN dm.games = odm.games THEN 1 ELSE 0 END) AS draws
FROM
person AS p
INNER JOIN
deck AS d ON p.id = d.person_id
INNER JOIN
deck_match AS dm ON dm.deck_id = d.id
INNER JOIN
deck_match AS odm ON dm.match_id = odm.match_id AND dm.deck_id <> IFNULL(odm.deck_id, 0)
INNER JOIN
deck AS od ON odm.deck_id = od.id
INNER JOIN
person AS opp ON od.person_id = opp.id
{season_join}
GROUP BY
p.id,
opp.id,
season.id
""".format(table=table, season_join=query.season_join())
preaggregation.preaggregate(table, sql)
@retry_after_calling(achievements.preaggregate_achievements)
def set_achievements(people: List[Person], season_id: int = None) -> None:
people_by_id = {person.id: person for person in people}
sql = achievements.load_query(people_by_id, season_id)
results = [Container(r) for r in db().select(sql)]
for result in results:
people_by_id[result['id']].num_achievements = len([k for k, v in result.items() if k != 'id' and v > 0])
people_by_id[result['id']].achievements = result
people_by_id[result['id']].achievements.pop('id')
@retry_after_calling(preaggregate_head_to_head)
def load_head_to_head_count(person_id: int, where: str = 'TRUE', season_id: Optional[int] = None) -> int:
season_query = query.season_query(season_id)
sql = f'SELECT COUNT(*) FROM _head_to_head_stats AS hths INNER JOIN person AS opp ON hths.opponent_id = opp.id WHERE ({where}) AND (hths.person_id = {person_id}) AND ({season_query})'
return db().value(sql)
@retry_after_calling(preaggregate_head_to_head)
def load_head_to_head(person_id: int, where: str = 'TRUE', order_by: str = 'num_matches DESC, record DESC, win_percent DESC, wins DESC, opp_mtgo_username', limit: str = '', season_id: int = None) -> Sequence[Container]:
season_query = query.season_query(season_id)
sql = f"""
SELECT
hths.person_id AS id,
LOWER(opp.mtgo_username) AS opp_mtgo_username,
SUM(num_matches) AS num_matches,
SUM(wins) - SUM(losses) AS record,
SUM(wins) AS wins,
SUM(losses) AS losses,
SUM(draws) AS draws,
IFNULL(ROUND((SUM(wins) / NULLIF(SUM(wins + losses), 0)) * 100, 1), '') AS win_percent
FROM
_head_to_head_stats AS hths
INNER JOIN
person AS opp ON hths.opponent_id = opp.id
WHERE
({where}) AND (hths.person_id = {person_id}) AND ({season_query})
GROUP BY
hths.person_id,
hths.opponent_id
ORDER BY
{order_by}
{limit}
"""
return [Container(r) for r in db().select(sql)]
def associate(d: deck.Deck, discord_id: int) -> int:
person_id = db().value('SELECT person_id FROM deck WHERE id = %s', [d.id], fail_on_missing=True)
sql = 'UPDATE person SET discord_id = %s WHERE id = %s'
return db().execute(sql, [discord_id, person_id])
def is_allowed_to_retire(deck_id: Optional[int], discord_id: Optional[int]) -> bool:
if not deck_id:
return False
if not discord_id:
return True
person = maybe_load_person_by_discord_id(discord_id)
if person is None:
return True
return any(int(deck_id) == deck.id for deck in person.decks)
def get_or_insert_person_id(mtgo_username: Optional[str], tappedout_username: Optional[str], mtggoldfish_username: Optional[str]) -> int:
sql = 'SELECT id FROM person WHERE LOWER(mtgo_username) = LOWER(%s) OR LOWER(tappedout_username) = LOWER(%s) OR LOWER(mtggoldfish_username) = LOWER(%s)'
person_id = db().value(sql, [mtgo_username, tappedout_username, mtggoldfish_username])
if person_id:
return person_id
sql = 'INSERT INTO person (mtgo_username, tappedout_username, mtggoldfish_username) VALUES (%s, %s, %s)'
return db().insert(sql, [mtgo_username, tappedout_username, mtggoldfish_username])
def load_aliases() -> List[Container]:
sql = """
SELECT
pa.person_id,
pa.alias,
p.mtgo_username
FROM
person_alias AS pa
INNER JOIN
person AS p ON p.id = pa.person_id
"""
return [Container(r) for r in db().select(sql)]
def add_alias(person_id: int, alias: str) -> None:
db().begin('add_alias')
try:
p = load_person_by_mtgo_username(alias)
db().execute('UPDATE deck SET person_id = %s WHERE person_id = %s', [person_id, p.id])
db().execute('DELETE FROM person WHERE id = %s', [p.id])
except DoesNotExistException:
pass
db().execute('INSERT INTO person_alias (person_id, alias) VALUES (%s, %s)', [person_id, alias])
db().commit('add_alias')
def load_notes(person_id: int = None) -> List[Container]:
where = f'subject_id = {person_id}' if person_id else 'TRUE'
sql = """
SELECT
pn.created_date,
pn.creator_id,
{creator_query} AS creator,
pn.subject_id,
{subject_query} AS subject,
note
FROM
person_note AS pn
INNER JOIN
person AS c ON pn.creator_id = c.id
INNER JOIN
person AS s ON pn.subject_id = s.id
WHERE
{where}
ORDER BY
s.id,
pn.created_date DESC
""".format(creator_query=query.person_query('c'), subject_query=query.person_query('s'), where=where)
notes = [Container(r) for r in db().select(sql)]
for n in notes:
n.created_date = dtutil.ts2dt(n.created_date)
n.display_date = dtutil.display_date(n.created_date)
return notes
def add_note(creator_id: int, subject_id: int, note: str) -> None:
sql = 'INSERT INTO person_note (created_date, creator_id, subject_id, note) VALUES (UNIX_TIMESTAMP(NOW()), %s, %s, %s)'
db().execute(sql, [creator_id, subject_id, note])
def link_discord(mtgo_username: str, discord_id: int) -> Person:
person_id = deck.get_or_insert_person_id(mtgo_username, None, None)
p = load_person_by_id(person_id)
if p.discord_id is not None:
raise AlreadyExistsException('Player with mtgo username {mtgo_username} already has discord id {old_discord_id}, cannot add {new_discord_id}'.format(mtgo_username=mtgo_username, old_discord_id=p.discord_id, new_discord_id=discord_id))
sql = 'UPDATE person SET discord_id = %s WHERE id = %s'
db().execute(sql, [discord_id, p.id])
return p
def unlink_discord(person_id: int) -> int:
sql = 'UPDATE person SET discord_id = NULL WHERE id = %s'
return db().execute(sql, [person_id])
def remove_discord_link(discord_id: int) -> int:
sql = 'UPDATE person SET discord_id = NULL WHERE discord_id = %s'
return db().execute(sql, [discord_id])
def is_banned(mtgo_username: str) -> bool:
return db().value('SELECT banned FROM person WHERE mtgo_username = %s', [mtgo_username]) == 1
def squash(p1id: int, p2id: int, col1: str, col2: str) -> None:
logger.warning('Squashing {p1id} and {p2id} on {col1} and {col2}'.format(p1id=p1id, p2id=p2id, col1=col1, col2=col2))
db().begin('squash')
new_value = db().value('SELECT {col2} FROM person WHERE id = %s'.format(col2=col2), [p2id])
db().execute('UPDATE deck SET person_id = %s WHERE person_id = %s', [p1id, p2id])
db().execute('DELETE FROM person WHERE id = %s', [p2id])
db().execute('UPDATE person SET {col2} = %s WHERE id = %s'.format(col2=col2), [new_value, p1id])
db().commit('squash')
def set_locale(person_id: int, locale: str) -> None:
db().execute('UPDATE person SET locale = %s WHERE id = %s', [locale, person_id])
|
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
|
decksite/data/person.py
|
Python
|
gpl-3.0
| 17,080
|
import cv2
import random
from moviepy.editor import VideoFileClip
#from modules.ssd.main import ImageNetwork
"""
def pipeline_yolo(img):
img_undist, img_lane_augmented, lane_info = lane_process(img)
output = vehicle_detection_yolo(img, img_lane_augmented, lane_info)
return output
def process_image(img):
output = vehicle_only_yolo(img)
return output
def process_video():
video_output = 'examples/project_YOLO.mp4'
clip1 = VideoFileClip("examples/project_video.mp4").subclip(30,32)
clip = clip1.fl_image(pipeline_yolo)
clip.write_videofile(video_output, audio=False)
"""
def save_image(image):
"""Save an image to the test directory"""
image = image[:,:,::-1]
frame = random.randint(0,100)
filename = 'test/frames/frame-{0}.png'.format(frame)
cv2.imwrite(filename,image)
return image
def process_video(video_input, video_output):
"""Process a video using the SSD network"""
#with ImageNetwork() as ssd:
clip = VideoFileClip(video_input).subclip(26,31)
#clip = clip.fl_image(ssd.ssd_process_frame)
clip = clip.fl_image(save_image)
clip.write_videofile(video_output, audio=False)
if __name__ == "__main__":
# SSD Pipeline
video_input = 'test/project_video.mp4'
video_output = 'test/labelled_ssd.mp4'
process_video(video_input, video_output)
|
maxkferg/smart-city-model
|
birdseye.py
|
Python
|
gpl-3.0
| 1,354
|
# -*- coding: utf-8 -*-
#************************************************************************
#
# TeX-9 library: Python module
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright Elias Toivanen, 2011-2014
#
#************************************************************************
import re
import vim
import sys
# Utility functions
def echoerr(errorstr):
sys.stderr.write("TeX-9: {0}\n".format(str(errorstr)))
def echomsg(msgstr):
sys.stdout.write("TeX-9: {0}\n".format(str(msgstr)))
def get_latex_environment(vim_window):
"""Get information about the current LaTeX environment.
Returns a dictionary with keys
'environment': the name of the current LaTeX environment
'range': 2-tuple of the beginning and ending line numbers
"""
pat = re.compile(r'^\s*\\(begin|end){([^}]+)}')
b = list(vim_window.buffer)
row = vim_window.cursor[0] - 1
environment = ""
begin = end = 0
current_line = b[row]
head = b[row - 1::-1] # From line above to the start
tail = b[row + 1:] # From next line to the end
c = pat.match(current_line)
if c:
environment = c.group(2)
if c.group(1) == 'end':
end = row + 1
elif c.group(1) == 'begin':
begin = row + 1
if not begin:
envs = {}
for i, line in enumerate(head):
m = pat.match(line)
if m:
e = m.group(2)
envs[m.groups()] = i
if ('begin', e) in envs and ('end', e) in envs and envs[('end', e)] < envs[('begin', e)]:
# Eliminate nested environments
del envs[('begin', e)]
del envs[('end', e)]
elif ('end', e) not in envs:
begin = row - i
environment = e
break
if not end:
envs = {}
for i, line in enumerate(tail):
m = pat.match(line)
if m:
envs[m.groups()] = i
e = m.group(2)
if ('begin', e) in envs and ('end', e) in envs:
#and envs[('end', e)] > envs[('begin', e)]:
# Eliminate nested environments
del envs[('begin', e)]
del envs[('end', e)]
elif m.groups() == ('end', environment):
end = row + i + 2
break
return {'environment': environment, 'range': (begin, end)}
def is_latex_math_environment(vim_window,
environments = re.compile(r"matrix|cases|math|equation|align|array")):
"""Returns True if the cursor is currently on a maths environment."""
e = get_latex_environment(vim_window)
return bool(environments.search(e['environment']))
def find_compiler(vimbuffer, nlines=10):
"""Finds the compiler from the header."""
lines = "\n".join(vimbuffer[:nlines])
if lines:
c = re.search("^%\s*Compiler:\s*(\S+)", lines, re.M)
if c:
return c.group(1).strip()
else:
return ""
else:
#Cannot determine the compiler
return ""
class TeXNineError(Exception):
pass
|
vim-scripts/TeX-9
|
ftplugin/tex_nine/tex_nine_utils.py
|
Python
|
gpl-3.0
| 3,875
|
#To-DO:NEXT: Write the show_keys email command [admin level]
#To-Do:NEXT: Write the other templates
#To-Do:NEXT: write the connect command [user level]
#TO-DO:NEXT: Fix subject bug.
#To-DO:NEXT: Make the html emails look prettier somehow.
#listening script
from core import *
import codebase
import poplib
import email
import email.header
import sched, time
import smtplib
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
orders = []
log = []
e = str(raw_input('Enter the email id :'))
p = str(raw_input('Enter the password : '))
def send_email(user, body):
fromaddr = e
to = user
body = unicode(body)
msg = MIMEMultipart('alternative')
msg['From'] = str(fromaddr)
msg['To'] = str(to)
msg['Subject'] = 'EMS' #not working. Why?
chunk = MIMEText(body,'html')
msg.attach(chunk)
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(e, p)
server.sendmail(e, user, chunk.as_string())
server.quit()
def check_for_orders(emailid, password):
#we only check for max 10 orders every refresh
max_orders = 10
#log into pop
pop_conn = poplib.POP3_SSL('pop.gmail.com')
pop_conn.user(emailid)
pop_conn.pass_(password)
#counting number of messages
msgcount = pop_conn.stat()[0]
#main loop checking the subjects and adding them to orders list
for i in range(msgcount, max(0, msgcount - max_orders), -1):
response, msg_as_list, size = pop_conn.retr(i)
msg = email.message_from_string('\r\n'.join(msg_as_list))
if "subject" in msg:
decheader = email.header.decode_header(msg["subject"])
subject = decheader[0][0]
charset = decheader[0][1]
if charset:
subject = subject.decode(charset)
orders.append(subject)
orders.reverse() #for sequence
pop_conn.quit()
def mainloop(emailid, password):
check_for_orders(emailid, password)
if orders == []:
log.append('Searching for orders...')
else:
for items in orders:
#admin command to create key -> crea [x] [MASTERKEY]
if str(items)[0:4] == 'CREA':
codebase.keys_create(int(str(items)[5:6]), str(items)[7:len(items)])
log.append(items)
print '\tKey(s) created.'
orders.remove(items)
#admin command to stop the program -> EXIT [MASTERKEY]
if str(items)[0:4] == 'EXIT':
mkey = str(items)[5:len(str(items))]
if codebase.inv.key_mcheck(mkey) is 1:
exit()
#user command to activate a key -> acti [key] [ph|em|pk]
if str(items)[0:4] == 'acti':
key = str(items)[5:9]
otherstuff = str(items)[10:len(str(items))]
ph = otherstuff.split('|')[0]
em = otherstuff.split('|')[1]
pk = otherstuff.split('|')[2]
for keys in codebase.inv.keys:
if key == keys:
for pkeys in codebase.inv.keys:
try:
if codebase.inv.mappedkeys[key] == pk:
print 'Pkey already exists.' #nevergonnahappen
except KeyError:
codebase.inv.map_keys(key, ph, em)
codebase.inv.map_pkey(key, pk)
log.append(items)
orders.remove(items)
#user command to add item -> add [key] [name|pricepu|quan|units|pkey]
if str(items)[0:3] == 'add':
log.append(items)
key = str(items)[4:8]
otherstuff = str(items)[9:len(str(items))]
name = otherstuff.split('|')[0]
pricepu = otherstuff.split('|')[1]
quan = otherstuff.split('|')[2]
units = otherstuff.split('|')[3]
pkey = otherstuff.split('|')[4]
for keys in codebase.inv.keys:
if key == keys:
for pkeys in codebase.inv.pkeys:
try:
if codebase.inv.decpt(codebase.inv.pkeys[key], codebase.inv.enckey, 'NA', 'NA') == pkey:
codebase.inv.add_item(codebase.item(name, pricepu, quan, units, key, 'NA'))
except KeyError:
print '\tOops.'
orders.remove(items)
if str(items)[0:4] == 'show':
key = str(items)[5:9]
try:
useremail = codebase.inv.mappedkeys[key].split('|')[1]
thetext = str(codebase.inv.create_html_Market(key))
body = MIMEText(thetext, 'html')
send_email(useremail, body)
print 'IREmail sent to : ' + str(useremail)
except KeyError:
print '\tOops'
orders.remove(items)
#user command to update item -> updt [key] [label|pricepu|quan|units|pkey]
if str(items)[0:4] == 'updt':
log.append(items)
key = str(items)[5:9]
otherstuff = str(items)[10:len(str(items))]
name = otherstuff.split('|')[0]
pricepu = otherstuff.split('|')[1]
quan = otherstuff.split('|')[2]
units = otherstuff.split('|')[3]
pkey = otherstuff.split('|')[4]
for keys in codebase.inv.keys:
if key == keys:
for pkeys in codebase.inv.pkeys:
try:
if codebase.inv.decpt(codebase.inv.pkeys[key], codebase.inv.enckey, 'NA', 'NA') == pkey:
codebase.inv.update(name, pricepu, quan, key)
print 'item updated'
except KeyError:
print '\tOops.'
orders.remove(items)
if str(items)[0:4] == 'BACK':
log.append(items)
key = str(items)[5:len(str(items))]
if codebase.inv.key_mcheck(key) is 1:
codebase.inv.back_up(key)
print 'Backed up'
orders.remove(items)
if str(items)[0:3] == 'BAN':
log.append(items)
bankey = str(items)[4:8]
masterkey = str(items)[9:len(str(items))]
if codebase.inv.key_mcheck(masterkey) is 1:
codebase.inv.remove_key(bankey, masterkey)
print 'Removed: ' + str(bankey)
orders.remove(items)
s.enter(120,1,mainloop(emailid, password), (sc,)) #change 1 -> 10 or 20
choice = int(raw_input('1 - Start new \n2 - Backup old\nEnter choice : '))
if choice is 1:
initialMkey = str(raw_input('Set the MASTERKEY: '))
codebase.int_m(initialMkey)
print 'Masterkey Created. EMS service online. Receiving Orders'
log.append('Masterkey Created. EMS service started. Receiving Orders:')
if choice is 2:
old_key = str(raw_input('Enter mkey of the backup : '))
codebase.inv.restore(old_key)
else:
print 'Unrecognized command'
s = sched.scheduler(time.time, time.sleep)
s.enter(120,1,mainloop(e, p), (sc,))
s.run
|
Suranjandas7/EMS
|
admin.py
|
Python
|
gpl-3.0
| 6,167
|
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2009 Doug Hellmann All rights reserved.
#
"""
"""
__version__ = "$Id$"
#end_pymotw_header
import tarfile
import time
t = tarfile.open('example.tar', 'r')
for filename in [ 'README.txt', 'notthere.txt' ]:
try:
info = t.getmember(filename)
except KeyError:
print 'ERROR: Did not find %s in tar archive' % filename
else:
print '%s is %d bytes' % (info.name, info.size)
|
qilicun/python
|
python2/PyMOTW-1.132/PyMOTW/tarfile/tarfile_getmember.py
|
Python
|
gpl-3.0
| 463
|
from semeval import helper as helper
from semeval.lstms.LSTMModel import LSTMModel
import numpy
from keras.models import Sequential
from keras.layers import Dense, Activation, Bidirectional, LSTM, Dropout
from keras.callbacks import EarlyStopping
class EarlyStoppingLSTM(LSTMModel):
'''Model that can train an LSTM and apply the trainned model to unseen
data. Inherits from LSTMModel.
Instance Arguments:
self._word2vec_model - gensim.models.Word2Vec required as an argument to __init__
self._max_length = 0
self._model = None
public methods:
train - trains a Bi-directional LSTM with dropout and early stopping on
the texts and sentiment values given.
test - Using the trained model saved at self._model will return a list of
sentiment values given the texts in the argument of the method.
'''
def __init__(self, word2vec_model):
super().__init__(word2vec_model)
def fit(self, train_texts, sentiment_values):
'''Given a list of Strings and a list of floats (sentiments) or numpy
array of floats. It will return a trained LSTM model and `save` the model to
self._model for future use using self.test(texts).
The model converts the list of strings into list of numpy matrixs
which has the following dimensions:
length of the longest train text broken down into tokens
by
the vector size of the word2vec model given in the constructor
e.g. 21, 300 if the word2vec model vector size if 300 and the length of
the longest train text in tokens is 21.
For more details on the layers use read the source or after training
visualise using visualise_model function.
'''
super().fit()
max_length = self._set_max_length(train_texts)
vector_length = self._word2vec_model.vector_size
train_vectors = self._text2vector(train_texts)
model = Sequential()
model.add(Dropout(0.5, input_shape=(max_length, vector_length)))
# Output of this layer is of max_length by max_length * 2 dimension
# instead of max_length, vector_length
model.add(Bidirectional(LSTM(max_length, activation='softsign',
return_sequences=True)))
model.add(Dropout(0.5))
model.add(Bidirectional(LSTM(max_length, activation='softsign')))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('linear'))
model.compile(loss='mse',
optimizer='rmsprop',
metrics=['cosine_proximity'],
clipvalue=5)
early_stopping = EarlyStopping(monitor='val_loss', patience=10)
model.fit(train_vectors, sentiment_values, validation_split=0.1,
callbacks=[early_stopping] , nb_epoch=100)
return self._set_model(model)
|
apmoore1/semeval
|
lstms/EarlyStoppingLSTM.py
|
Python
|
gpl-3.0
| 2,914
|
from __future__ import absolute_import
from celery import shared_task
import praw
from .commonTasks import *
from .models import Redditor, RedditorStatus, Status
@shared_task
def test(param):
return 'The test task executed with argument "%s" ' % param
@shared_task
def update_user(redditor):
update_user_status(redditor, 10)
get_submissions(redditor)
update_user_status(redditor, 20)
get_comments(redditor)
update_user_status(redditor, 30)
@shared_task
def write_user(user):
create_user(user)
|
a-harper/RedditorProfiler
|
tasks.py
|
Python
|
gpl-3.0
| 532
|
#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
import argparse
import numpy as np
import pandas as pd
import sys
import os
import matplotlib as mpl
#mpl.use('Agg')
from matplotlib import ticker
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import matplotlib.dates as md
from matplotlib.collections import LineCollection
import pylab
mpl.rc('text.latex', preamble='\usepackage{color}')
from scipy.signal import argrelextrema
from scipy import interpolate
from scipy.optimize import curve_fit
from sklearn.kernel_ridge import KernelRidge
from sklearn.model_selection import GridSearchCV
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import RBF, WhiteKernel, ExpSineSquared
from scipy import stats
from scipy.spatial.distance import pdist,cdist
import datetime
import time
import glob
import numpy.ma as ma
import importlib
import time_tools_attractor as ti
import io_tools_attractor as io
import data_tools_attractor as dt
import stat_tools_attractor as st
fmt1 = "%.1f"
fmt2 = "%.2f"
fmt3 = "%.3f"
np.set_printoptions(precision=4)
################# DEFAULT ARGS #########################
inBaseDir = '/scratch/lforesti/data/' # '/store/msrad/radar/precip_attractor/data/' #'/scratch/lforesti/data/'
outBaseDir = '/users/lforesti/results/'
tmpBaseDir = '/scratch/lforesti/tmp/'
pltType = 'spread' #'evolution' 'spread'
timeSampMin = 5
spreadMeasure = 'scatter'#'std' or 'scatter'
########GET ARGUMENTS FROM CMD LINE####
parser = argparse.ArgumentParser(description='Plot radar rainfall field statistics.')
parser.add_argument('-start', default='201601310000', type=str,help='Starting date YYYYMMDDHHmmSS.')
parser.add_argument('-end', default='201601310000', type=str,help='Starting date YYYYMMDDHHmmSS.')
parser.add_argument('-product', default='AQC', type=str,help='Which radar rainfall product to use (AQC, CPC, etc).')
parser.add_argument('-wols', default=0, type=int,help='Whether to use the weighted ordinary leas squares or not in the fitting of the power spectrum.')
parser.add_argument('-minR', default=0.08, type=float,help='Minimum rainfall rate for computation of WAR and various statistics.')
parser.add_argument('-minWAR', default=1, type=float,help='Minimum WAR threshold for plotting.')
parser.add_argument('-minCorrBeta', default=0.5, type=float,help='Minimum correlation coeff. for beta for plotting.')
parser.add_argument('-accum', default=5, type=int,help='Accumulation time of the product [minutes].')
parser.add_argument('-temp', default=5, type=int,help='Temporal sampling of the products [minutes].')
parser.add_argument('-format', default='netcdf', type=str,help='Format of the file containing the statistics [csv,netcdf].')
parser.add_argument('-plt', default='spread', type=str,help='Plot type [spread, evolution].')
parser.add_argument('-refresh', default=0, type=int,help='Whether to refresh the binary .npy archive or not.')
args = parser.parse_args()
refreshArchive = bool(args.refresh)
print('Refresh archive:', refreshArchive)
product = args.product
pltType = args.plt
timeAccumMin = args.accum
timeSampMin = args.temp
timeAccumMinStr = '%05i' % timeAccumMin
timeSampMinStr = '%05i' % timeSampMin
if (int(args.start) > int(args.end)):
print('Time end should be after time start')
sys.exit(1)
if (int(args.start) < 198001010000) or (int(args.start) > 203001010000):
print('Invalid -start or -end time arguments.')
sys.exit(1)
else:
timeStartStr = args.start
timeEndStr = args.end
timeStart = ti.timestring2datetime(timeStartStr)
timeEnd = ti.timestring2datetime(timeEndStr)
if spreadMeasure != 'std' and spreadMeasure != 'scatter':
print('The measure of spread should be either std or scatter')
sys.exit(1)
if spreadMeasure == 'std':
txtYlabel = 'Normalized st. deviation'
if spreadMeasure == 'scatter':
txtYlabel = 'Normalized robust spread'
txtYlabel = 'Normalized half scatter'
############### OPEN FILES WITH STATS
## Open single binary python file with stats to speed up (if it exists)
tmpArchiveFileName = tmpBaseDir + timeStartStr + '-' + timeEndStr + '_temporaryAttractor.npy'
tmpArchiveFileNameVariables = tmpBaseDir + timeStartStr + '-' + timeEndStr + '_temporaryAttractor_varNames.npy'
if (os.path.isfile(tmpArchiveFileName) == True) and (refreshArchive == False):
arrayStats = np.load(tmpArchiveFileName)
arrayStats = arrayStats.tolist()
variableNames = np.load(tmpArchiveFileNameVariables)
print('Loaded:', tmpArchiveFileName)
else:
## Open whole list of CSV or netCDF files
if args.format == 'csv':
arrayStats, variableNames = io.csv_list2array(timeStart, timeEnd, inBaseDir, analysisType='STATS', \
product = product, timeAccumMin = timeSampMin, minR=args.minR, wols=args.wols)
elif args.format == 'netcdf':
arrayStats, variableNames = io.netcdf_list2array(timeStart, timeEnd, inBaseDir, analysisType='STATS', \
product = product, timeAccumMin = timeAccumMin, minR=args.minR, wols=args.wols, variableBreak = 0)
else:
print('Please provide a valid file format.')
sys.exit(1)
# Check if there are data
if (len(arrayStats) == 0) & (args.format == 'csv'):
print("No data found in CSV files.")
sys.exit(1)
if (len(arrayStats) == 0) & (args.format == 'netcdf'):
print("No data found in NETCDF files.")
sys.exit(1)
## Save data into a single binary bython file to speed up further analysis with same dataset
arrayData = []
if refreshArchive == True:
np.save(tmpArchiveFileName, arrayStats)
np.save(tmpArchiveFileNameVariables, variableNames)
print('Saved:',tmpArchiveFileName)
################ Fill both datetime and data arrays with NaNs where there is no data
# Generate list of datetime objects
timeIntList = dt.get_column_list(arrayStats, 0)
timeStamps_datetime = ti.timestring_array2datetime_array(timeIntList)
nrSamples = len(timeStamps_datetime)
print('Number of analysed radar fields in archive: ', nrSamples)
nrSamplesTotal = int((timeStamps_datetime[nrSamples-1] - timeStamps_datetime[0]).total_seconds()/(timeSampMin*60))
print('Number of missing fields: ', nrSamplesTotal-nrSamples)
# Fill attractor array with NaNs to consider every missing time stamp
arrayStats, timeStamps_datetime = dt.fill_attractor_array_nan(arrayStats, timeStamps_datetime)
print(len(arrayStats), len(timeStamps_datetime), 'samples after filling holes with NaNs.')
print('Variables from file: ', variableNames)
######## Prepare numpy arrays
timeStamps_absolute = ti.datetime2absolutetime(np.array(timeStamps_datetime))
# Convert list of lists to numpy arrays
arrayStats = np.array(arrayStats)
timeStamps_datetime = np.array(timeStamps_datetime)
timeStamps_absolute = np.array(timeStamps_absolute)
#################################################################################
####################### PARAMETERS TO ANALYZE GROWTH OF ERRORS
varNames = ['war', 'r_cmean', 'r_mean', 'eccentricity', 'beta1', 'beta2']
logIMFWAR = True
logTime = True # Keep it True (or false to check exponential growth of errors)
logSpread = True # Keep it True
maxLeadTimeHours = 96
ylims = [10**-1.7,10**0.5]
# Selection criteria for valid trajectories
warThreshold = args.minWAR
betaCorrThreshold = args.minCorrBeta
independenceTimeHours = 1
minNrTraj = 20 # Minimum number of trajectories
nrIQR = 5 # Multiplier of the IQR to define a sample as outlier
verbosity = 1
# Whether to plot the function fits to the growth of errors one by one
plotFits = False
print('Variables for plotting: ', varNames)
####################################################################################
####################### PREPARE DATA ###############################################
maxLeadTimeMin = 60*maxLeadTimeHours
# Generate labels for plotting
varLabels = []
for var in range(0, len(varNames)):
if varNames[var] == 'war':
if logIMFWAR:
varLabels.append('WAR [dB]')
else:
varLabels.append('WAR')
if varNames[var] == 'r_mean':
if logIMFWAR:
varLabels.append('IMF [dB]')
else:
varLabels.append('IMF')
if varNames[var] == 'r_cmean':
if logIMFWAR:
varLabels.append('MM [dB]')
else:
varLabels.append('MM')
if varNames[var] == 'eccentricity':
if logIMFWAR:
varLabels.append('1-eccentricity [dB]')
else:
varLabels.append('Eccentricity')
if varNames[var] == 'beta1':
varLabels.append(r'$\beta_1$')
if varNames[var] == 'beta2':
varLabels.append(r'$\beta_2$')
# Get indices of variables
indicesVars = dt.get_variable_indices(varNames, variableNames)
# Put indices into dictionary
dictIdx = dict(zip(varNames, indicesVars))
dictLabels = dict(zip(varNames, varLabels))
print(dictIdx)
# WAR threshold
boolWAR = (arrayStats[:,dictIdx['war']] >= warThreshold)
# Beta correlation threshold
boolBetaCorr = (np.abs(arrayStats[:,dictIdx['beta1']+1]) >= np.abs(betaCorrThreshold)) & (np.abs(arrayStats[:,dictIdx['beta2']+1]) >= np.abs(betaCorrThreshold))
# Combination of thresholds
boolTot = np.logical_and(boolWAR == True, boolBetaCorr == True)
############### Select subset of variables and change sign of beta
arrayStats_attractor = []
for var in range(0, len(varNames)):
varName = varNames[var]
if (varName == 'beta1') | (varName == 'beta2'):
arrayStats_attractor.append(-arrayStats[:,dictIdx[varName]])
elif (varName == 'war') | (varName == 'r_mean') | (varName == 'r_cmean') | (varName == 'eccentricity'):
if logIMFWAR == True:
if varName == 'eccentricity':
arrayStats_attractor.append(dt.to_dB(1-arrayStats[:,dictIdx[varName]]))
else:
arrayStats_attractor.append(dt.to_dB(arrayStats[:,dictIdx[varName]]))
else:
arrayStats_attractor.append(arrayStats[:,dictIdx[varName]])
else:
arrayStats_attractor.append(arrayStats[:,dictIdx[varName]])
# Convert lists to numpy arrays
arrayStats_attractor = np.array(arrayStats_attractor).T
# Replace "bad" samples with NaNs
arrayStats_attractor[boolWAR==False,:] = np.nan
# Calculate global statistics on the data
arrayStats_Mean = np.nanmean(arrayStats_attractor, axis=0)
arrayStats_Std = np.nanstd(arrayStats_attractor, axis=0)
arrayStats_Scatter = st.nanscatter(arrayStats_attractor, axis=0)
## Compute data increments (changes from one time instant to the other)
arrayStats_increments = np.diff(arrayStats_attractor, axis=0)
# Set first increment equal to the second
arrayStats_increments = np.vstack((arrayStats_increments[0,:], arrayStats_increments))
## Compute global statistics on the data increments
arrayStats_increments_Mean = np.nanmean(arrayStats_increments, axis=0)
arrayStats_increments_Std = np.nanstd(arrayStats_increments, axis=0)
Q25 = np.nanpercentile(arrayStats_increments,25, axis=0)
Q75 = np.nanpercentile(arrayStats_increments,75, axis=0)
arrayStats_increments_IQR = Q75 - Q25
# Print info on statistics of data and increments
if verbosity >= 1:
print('Means : ', arrayStats_Mean)
print('St.devs: ', arrayStats_Std)
print('Scatter: ', arrayStats_Scatter)
print('Means increments : ', arrayStats_increments_Mean)
print('St.devs increments: ', arrayStats_increments_Std)
print('IQR increments : ', arrayStats_increments_IQR)
##########PLOT INCREMENTS
# Plot time series of increments
plotIncrements = True
if plotIncrements:
nrRowsSubplots = 2
nrColsSubplots = 3
p=1
fig = plt.figure(figsize=(22,10))
for var in range(0, len(varNames)):
ax = plt.subplot(nrRowsSubplots, nrColsSubplots, p)
plt.plot(arrayStats_increments[:,var])
ax.axhline(y=Q25[var] - nrIQR*arrayStats_increments_IQR[var],color='r')
ax.axhline(y=Q75[var] + nrIQR*arrayStats_increments_IQR[var],color='r')
plt.title('Time series increments for ' + varNames[var])
p += 1
plt.show()
# Plot histogram of increments
p=1
fig = plt.figure(figsize=(22,10))
for var in range(0, len(varNames)):
plt.subplot(nrRowsSubplots, nrColsSubplots, p)
histRange = [Q25[var] - nrIQR*arrayStats_increments_IQR[var], Q75[var] + nrIQR*arrayStats_increments_IQR[var]]
bins = np.hstack((np.nanmin(arrayStats_increments[:,var]), np.linspace(histRange[0],histRange[1], 50), np.nanmax(arrayStats_increments[:,var])))
n, bins, patches = plt.hist(arrayStats_increments[:,var], 50, range=histRange, facecolor='green', alpha=0.75)
plt.title('Histogram of increments for ' + varNames[var])
p += 1
plt.show()
# Calculate global statistics on the data by removing the bad increments
arrayStats_attractor_nanincrements = arrayStats_attractor.copy()
for var in range(0, len(varNames)):
histRange = [Q25[var] - nrIQR*arrayStats_increments_IQR[var], Q75[var] + nrIQR*arrayStats_increments_IQR[var]]
boolGoodIncrementsVar = (arrayStats_increments[:,var] >= histRange[0]) & (arrayStats_increments[:,var] <= histRange[1])
arrayStats_attractor_nanincrements[~boolGoodIncrementsVar,var] = np.nan
arrayStats_Mean = np.nanmean(arrayStats_attractor_nanincrements, axis=0)
arrayStats_Std = np.nanstd(arrayStats_attractor_nanincrements, axis=0)
arrayStats_Scatter = st.nanscatter(arrayStats_attractor_nanincrements, axis=0)
# Print info on statistics of data (without bad increments)
if verbosity >= 1:
print('Means : ', arrayStats_Mean)
print('St.devs: ', arrayStats_Std)
print('Scatter: ', arrayStats_Scatter)
###########INITIAL CONDITIONS
##### Set the initial conditions of analogues intelligently (using percentiles)
arrayStats_minPerc = np.nanpercentile(arrayStats_attractor, 20, axis=0)
arrayStats_maxPerc = np.nanpercentile(arrayStats_attractor, 90, axis=0)
if verbosity >= 1:
print('MinPerc: ', arrayStats_minPerc)
print('MaxPerc: ', arrayStats_maxPerc)
initialCondIntervals = (arrayStats_maxPerc - arrayStats_minPerc)/100.0
nrIntervals = 5
initialCondRange = []
for var in range(0, len(varNames)):
initialCondRange_variable = np.linspace(arrayStats_minPerc[var], arrayStats_maxPerc[var], nrIntervals).tolist()
initialCondRange.append(initialCondRange_variable)
print('Initial conditions: ', np.array(initialCondRange))
print('Initial intervals: ', np.array(initialCondIntervals))
####################################################################################################
############### COMPUTE GROWTH OF ERRORS AND PLOT RESULTS
nrLeadTimes = int(maxLeadTimeMin/timeSampMin)
nrDimensions = arrayStats_attractor.shape[1]
# Generate lead times
leadTimesMin = []
for lt in range(0,nrLeadTimes):
leadTimesMin.append(lt*timeSampMin)
leadTimesMin = np.array(leadTimesMin)
colormap = plt.cm.gist_rainbow # plt.cm.gray
nrRowsSubplots = 2
nrColsSubplots = 3
p = 0
if nrRowsSubplots == nrColsSubplots:
fgSize = (13, 13)
else:
fgSize = (20, 13)
fig = plt.figure(figsize=fgSize)
ax = fig.add_axes()
ax = fig.add_subplot(111)
tic = time.clock()
for variable in range(0, len(varNames)): ## LOOP OVER VARIABLES
analysisSteps = initialCondRange[variable]
nrSteps = len(analysisSteps)
p = p + 1 # subplot number
print('\n')
varMax = 0
varMin = 999
axSP = plt.subplot(nrRowsSubplots, nrColsSubplots, p)
print('Subplot nr: ', p, ', variable: ', varNames[variable])
print('++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++')
plot_lines = []
decorr_time_hours = []
for step in range(0, nrSteps): ## LOOP OVER STEPS FOR INITIAL CONDITIONS
# Define min and max values for initial conditions
minInit = analysisSteps[step]
maxInit = analysisSteps[step] + initialCondIntervals[variable]
if (varNames[variable] == 'war' or varNames[variable] == 'r_mean' or varNames[variable] == 'r_cmean' or varNames[variable] == 'eccentricity') and logIMFWAR == True:
if varNames[variable] == 'eccentricity':
minInitLab = dt.from_dB(minInit)
maxInitLab = dt.from_dB(maxInit)
else:
minInitLab = dt.from_dB(minInit)
maxInitLab = dt.from_dB(maxInit)
else:
minInitLab = minInit
maxInitLab = maxInit
# Select data and time stamps of initial conditions
initialConditions_data = (arrayStats_attractor[:,variable] >= minInit) & (arrayStats_attractor[:,variable] <= maxInit)
initialConditions_timestamps = timeStamps_absolute[initialConditions_data]
nrInitPoints = np.sum(initialConditions_data == True)
print('zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz')
print(nrInitPoints, ' starting points in ', varLabels[variable], ' range ', minInit,'-',maxInit)
# Compute time differences between consecutive time stamps of initial conditions
timeDiffs = np.diff(initialConditions_timestamps)
# Create array of time stamps that have a certain temporal independence (e.g. 24 hours)
independenceTimeSecs = 60*60*independenceTimeHours
timeDiffsAccum = 0
initialConditions_timestamps_indep = []
for i in range(0,nrInitPoints-1):
if (timeDiffs[i] >= independenceTimeSecs) | (timeDiffsAccum >= independenceTimeSecs):
initialConditions_timestamps_indep.append(initialConditions_timestamps[i])
timeDiffsAccum = 0
else:
# Increment the accumulated time difference to avoid excluding the next sample
# if closer than 24 hours from the previous (if not included) but further than 24 hours than the before the previous
timeDiffsAccum = timeDiffsAccum + timeDiffs[i]
initialConditions_timestamps_indep = np.array(initialConditions_timestamps_indep)
nrInitPoints = len(initialConditions_timestamps_indep)
print(nrInitPoints, ' independent starting points in ', varLabels[variable], ' range ', minInit,'-',maxInit)
################## GET ANALOGUE DATA SEQUENCES FOLLOWING TIME STAMPS
# Loop over such points and get data sequences
trajectories = [] # list of trajectories
for i in range(0,nrInitPoints):
tsAbs = initialConditions_timestamps_indep[i]
idx = np.where(timeStamps_absolute == tsAbs)[0]
if len(idx) != 1:
print(idx)
print(timeStamps_absolute[idx[0]], timeStamps_absolute[idx[1]])
print('You have duplicate time stamps in your dataset. Taking the first...')
sys.exit(1)
indicesSequence = np.arange(idx,idx+nrLeadTimes)
# Select data sequences
# Handle sequences that go beyond the dataset limits
if np.sum(indicesSequence >= len(timeStamps_absolute)) > 0:
indicesSequence = indicesSequence[indicesSequence < len(timeStamps_absolute)]
sequenceTimes = timeStamps_absolute[indicesSequence]
sequenceData = arrayStats_attractor[indicesSequence,:]
increments = arrayStats_increments[indicesSequence,:]
# Analyse increments of each time series and replace with NaNs if jumps are unrealistic
minIncrements = Q25[variable] - nrIQR*arrayStats_increments_IQR[variable]
maxIncrements = Q75[variable] + nrIQR*arrayStats_increments_IQR[variable]
# Criterion to define whether an increment is unrealistically large
#boolLargeIncrements = np.abs(increments) >= arrayStats_Std[variable]
boolLargeIncrements = (increments[:,variable] < minIncrements) | (increments[:,variable] > maxIncrements)
boolLargeIncrements[0] = False # The increment of the first element from the one before the start of the sequence is not considered as wrong
idxFirsBadIncrement = np.argmax(boolLargeIncrements == True)
maxNrBadIncrements = 5
if np.sum(boolLargeIncrements) > maxNrBadIncrements:
# Replace all data with NaNs
sequenceData[:,variable] = np.nan
#else:
# Replace data from first bad increment till the end with NaNs
#sequenceData[idxFirsBadIncrement:,variable] = np.nan
# Check the continuity of time stamps (no holes)
timeDiffMin = np.array(np.diff(sequenceTimes)/60, dtype=int)
# Nr of invalid samples not having the correct time stamp (it should be zero if correctly filled with NaNs)
nrInvalidTimes = np.sum(timeDiffMin != timeSampMin)
# Check how many valid data (not NaNs) you have in the future sequence
nrValidSamples = np.sum(~np.isnan(sequenceData[:,variable]))
nrConsecValidSamples = np.argmax(np.isnan(sequenceData[:,variable]))
# Criteria to consider a sequence as valid
minNrValidSamples = 36
minNrConsecValidSamples = 12 # one hour from start
# Collect valid trajectories
criterion = (nrValidSamples >= minNrValidSamples) & (nrConsecValidSamples >= minNrConsecValidSamples) \
& (nrInvalidTimes == 0) & (len(sequenceTimes) == nrLeadTimes)
goodTrajectory = False
if criterion == True:
goodTrajectory = True
trajectories.append(sequenceData)
### Print info on increments and valid samples...
#print(increments[:,variable])
if verbosity >= 2:
print('Trajectory nr', i,'starting at', ti.absolutetime2datetime(tsAbs))
print('Nr. invalid increments :', np.sum(boolLargeIncrements, axis=0))
print('Valid increment limits:' , minIncrements, maxIncrements)
print('First bad increment at index', idxFirsBadIncrement, 'with value', increments[idxFirsBadIncrement, variable])
print('Nr. valid samples in sequence : ', nrValidSamples, '/',nrLeadTimes)
print('Nr. consecutive valid samples in sequence: ', nrConsecValidSamples, '/',nrLeadTimes)
print('Valid trajectory?', goodTrajectory)
print('---------------')
# Append trajectory to the list of trajectories
trajectories = np.array(trajectories)
print(len(trajectories), ' valid trajectories in ', varLabels[variable], ' range ', minInit,'-',maxInit)
if len(trajectories) > minNrTraj:
#print(trajectories.shape[0], ' x ', trajectories.shape[1], ' x ', trajectories.shape[2], '($N_{analogue}$) x ($N_{leadtimes}$) x ($N_{dim}$)')
################## COMPUTE SPREAD OF TRAJECTORIES
spreadArray = []
for lt in range(0,nrLeadTimes):
dataLeadTime = trajectories[:,lt,:]
# Evaluate number of valid data
nrValidPoints = np.sum(~np.isnan(dataLeadTime), axis=0)
boolNrPoints = nrValidPoints < 20
# Compute ensemble spread
if spreadMeasure == 'std':
spreadLeadTime = np.nanstd(dataLeadTime/arrayStats_Std, axis=0)
if spreadMeasure == 'scatter':
spreadLeadTime = st.nanscatter(dataLeadTime/(arrayStats_Scatter/2.0), axis=0)/2.0
# Replace spread with nan if not enough samples for a given lead time
if np.sum(boolNrPoints) >=1:
spreadLeadTime[boolNrPoints] = np.nan
# Append spread
spreadArray.append(spreadLeadTime)
spreadArray = np.array(spreadArray)
################## DECORRELATION TIME ESTIMATION
#### TESTS WITH DIFFERENT FITTED MODELS
dB_shift_hr = 0.5
if logTime:
predictor = dt.to_dB(leadTimesMin/60 + dB_shift_hr)
else:
predictor = leadTimesMin/60
predictand = dt.to_dB(spreadArray[:,variable])
# Remove NaNs
nans = np.isnan(predictand)
predictor = predictor[~nans]
predictand = predictand[~nans]
if varNames[variable] == 'eccentricity':
predictor = predictor[~np.isinf(predictand)]
predictand = predictand[~np.isinf(predictand)]
# Prediction grid
predictor_grid = np.linspace(np.min(predictor), np.max(predictor), 1000)
#### KERNEL RIDGE REGRESSION
alphaVec = [0.1, 0.01]
sigmaVec = np.arange(5.0, 5.5, 0.5)
if len(alphaVec) > 1 or len(sigmaVec) > 1:
# Grid search of parameters
param_grid = {"alpha": alphaVec, "kernel": [RBF(length_scale) for length_scale in sigmaVec]}
kr = KernelRidge()
kr = GridSearchCV(KernelRidge(), cv=5, param_grid=param_grid)
else:
# Run with pre-defined parameter set
kr = KernelRidge(alpha=alphaVec[0], kernel='rbf', gamma=sigmaVec[0])
# Fit model
kr.fit(predictor.reshape(-1,1), predictand.reshape(-1,1))
# Get best parameters
bestAlpha_kr = kr.best_params_['alpha']
bestSigma_kr = kr.best_params_['kernel'].length_scale
# Predict over grid
kr_fit = kr.predict(predictor_grid.reshape(-1,1))
# Compute derivatives of prediction
kr_der1 = np.gradient(kr_fit[:,0])
kr_der2 = np.gradient(kr_der1)
# Estimate decorrelation time KR
if bestSigma_kr >= 2:
minDer1 = 0.005 #0.001
else:
minDer1 = 0.0
minNormSpread = 0.75
minNormSpread = 0.75*np.nanmedian(dt.from_dB(predictand)[dt.from_dB(predictor)+dB_shift_hr >= maxLeadTimeHours/2])
print('Minimum spread to reach:', minNormSpread)
minNormSpread_dB = dt.to_dB(minNormSpread)
decorrBool = (kr_der1 <= minDer1) & (kr_der2 < 0) & (kr_fit[:,0] >= minNormSpread_dB)
decorrIndex_kr = np.where(decorrBool == True)[0]
# Find first local minimum of the derivative
firstLocalMinimumIndex = argrelextrema(kr_der1, np.less)[0]
firstLocalMinimumIndex = firstLocalMinimumIndex[0]
if len(decorrIndex_kr) == 0:
kr_decorr_bad = True
decorrIndex_kr = len(kr_der1)-1
else:
kr_decorr_bad = False
decorrIndex_kr = decorrIndex_kr[0]
# Take as decorrelation time as the first local minimum before the derivative gets to zero
criterionLocalMinimum = (decorrIndex_kr > firstLocalMinimumIndex) & (kr_fit[firstLocalMinimumIndex,0] >= minNormSpread_dB) & (bestSigma_kr >= 2)
if criterionLocalMinimum:
print('Taking first local minimum as decorrelation time')
decorrIndex_kr = firstLocalMinimumIndex
# Get decorr time
if logTime:
decorr_time_kr = dt.from_dB(predictor_grid[decorrIndex_kr])-dB_shift_hr
else:
decorr_time_kr = predictor_grid[decorrIndex_kr]
#### Spherical model fit
weighting = 1#dt.from_dB(predictor)
popt, pcov = curve_fit(st.spherical_model, predictor, predictand, sigma=weighting)
print('Spherical model params:', popt)
spherical_fit = st.spherical_model(predictor_grid, popt[0], popt[1], popt[2])
if logTime:
decorr_time_sph = dt.from_dB(popt[2])-dB_shift_hr
else:
decorr_time_sph = popt[2]
#### Exponential model fit
popt, pcov = curve_fit(st.exponential_model, predictor, predictand, sigma=weighting)
print('Exponential model params:', popt)
exponential_fit = st.exponential_model(predictor_grid, popt[0], popt[1], popt[2])
if logTime:
decorr_time_exp = dt.from_dB(popt[2])-dB_shift_hr
else:
decorr_time_exp = popt[2]
# Estimate decorrelation time simply using a threshold on the KR fit or the raw data
spreadThreshold = 0.95
idxDecorr = np.argmax(dt.from_dB(kr_fit) >= spreadThreshold, axis=0)[0]
if idxDecorr == 0:
spreadThreshold = 0.8
idxDecorr = np.argmax(dt.from_dB(kr_fit) >= spreadThreshold, axis=0)[0]
decorr_time_th = dt.from_dB(predictor_grid[idxDecorr])-dB_shift_hr
if verbosity >= 1:
print('Lifetime KR : ', decorr_time_kr, ' h')
print('Lifetime spherical : ', decorr_time_sph, ' h')
print('Lifetime exponential : ', decorr_time_exp, ' h')
print('Lifetime threshold >=', spreadThreshold, ': ',decorr_time_th, ' h')
#### PLOT THE FITS TO ERROR GROWTH FUNCTIONS
if plotFits:
plt.close()
plt.figure(figsize = (10,10))
ax1 = plt.subplot(111)
ax1.scatter(predictor, predictand, marker='o', s=5, color='k')
#ax1.plot(predictor_grid, mars_fit, 'r', label='Multivariate Adaptive Regression Splines (MARS)')
krLabel = r'Kernel Ridge Regression (KR), $\alpha$=' + str(bestAlpha_kr) + r', $\sigma$=' + str(bestSigma_kr)
p1, = ax1.plot(predictor_grid, kr_fit, 'g', label=krLabel, linewidth=2)
p2, = ax1.plot(predictor_grid, spherical_fit, 'b', label='Spherical variogram model', linewidth=2)
p3, = ax1.plot(predictor_grid, exponential_fit, 'r', label='Exponential variogram model', linewidth=2)
# Plot derivatives and decorrelation time
ax2 = ax1.twinx()
ax2.plot(predictor_grid, kr_der1, 'g--')
#ax2.plot(predictor_grid, kr_der2*20, 'g:')
ax2.axvline(x=predictor_grid[decorrIndex_kr], ymin=0.2, color='g')
ax2.axvline(x=dt.to_dB(decorr_time_sph + dB_shift_hr), ymin=0.2, color='b')
ax2.axvline(x=dt.to_dB(decorr_time_exp + dB_shift_hr), ymin=0.2, color='r')
p4 = ax2.axvline(x=dt.to_dB(decorr_time_th + dB_shift_hr), ymin=0.2, color='k')
ax2.axhline(y=0, color='g')
# Labels legend
p1_label = 'Lifetime KR : ' + fmt1 % decorr_time_kr + ' h'
p2_label = 'Lifetime spherical : ' + fmt1 % decorr_time_sph + ' h'
p3_label = 'Lifetime exponential: ' + fmt1 % decorr_time_exp + ' h'
p4_label = 'Lifetime >= ' + fmt2 % spreadThreshold + ' : ' + fmt1 % decorr_time_th + ' h'
plot_lifetimes = [p1,p2,p3,p4]
labels_lifetimes = [p1_label, p2_label, p3_label, p4_label]
# Plot legend with lifetimes
legend_lifetime = plt.legend(plot_lifetimes, labels_lifetimes, loc='upper left', labelspacing=0.1)
plt.gca().add_artist(legend_lifetime)
# Plot legend of models
ax1.legend(loc='lower right',labelspacing=0.1)
ax1.set_xlabel('Lead time, hours', fontsize=20)
# Format X and Y axis
ax1.set_ylabel(txtYlabel, fontsize=20)
ax2.set_ylabel('Function derivative', fontsize=20)
plt.setp(ax1.get_xticklabels(), fontsize=14)
plt.setp(ax1.get_yticklabels(), fontsize=14)
plt.setp(ax2.get_yticklabels(), fontsize=14)
plt.xlim([np.min(predictor)-1, np.max(predictor)+1])
if maxLeadTimeHours == 24:
xtickLabels = np.array([0.08,0.5,1,2,3,4,5,6,9,12,18,24])
if maxLeadTimeHours == 48:
xtickLabels = np.array([0.08,0.5,1,2,3,4,5,6,9,12,18,24,36,48])
if maxLeadTimeHours == 96:
xtickLabels = np.array([0.08,0.5,1,2,3,4,5,6,9,12,18,24,36,48,72,96])
xticklocations = dt.to_dB(xtickLabels + dB_shift_hr)
xtickLabels = dt.dynamic_formatting_floats(xtickLabels)
ax1.set_xticks(xticklocations)
xticks = ax1.set_xticklabels(xtickLabels, fontsize=14)
ytickLabels = [0.01,0.02,0.03,0.04,0.05,0.1,0.15,0.2,0.3,0.4,0.5,0.6,0.8,1,1.2,1.4]
yticklocations = dt.to_dB(ytickLabels)
ytickLabels = dt.dynamic_formatting_floats(ytickLabels)
ax1.set_yticks(yticklocations)
ax1.set_yticklabels(ytickLabels, fontsize=14)
strTitleLine1 = r'Spread growth for ' + varLabels[variable]
strTitleLine2 = 'Time series starting in range ' + str(fmt2 % minInitLab) + '-' + str(fmt2 % maxInitLab) + ' (N = ' + str(trajectories.shape[0]) + ')'
plt.title(strTitleLine1 + '\n' + strTitleLine2, fontsize=22)
plt.show()
# fileName = outBaseDir + product + '_' + pltType + '_' + timeStartStr + '-' + timeEndStr + '0_' + 'Rgt' + str(args.minR) + '_WOLS' + str(args.wols) + '_00005_warGt' + str("%0.1f" % warThreshold) + '_logIMFWAR' + str(int(logIMFWAR)) + '_' + timeAccumMinStr + '.png'
# print('Saving: ',fileName)
# plt.savefig(fileName, dpi=300)
#sys.exit()
################## PLOTTING ################################################################################
linewidth=2.0
labelFontSize = 16
legendFontSize = 12
axesTicksFontSize = 14
plt.tick_params(axis='both', which='major', labelsize=axesTicksFontSize)
# Plot growth of spread
legTxt = ' Range ' + str(fmt2 % minInitLab) + '-' + str(fmt2 % maxInitLab) + ' (N = ' + str(trajectories.shape[0]) + ')'
if pltType == 'spread':
if (logTime == True) & (logSpread == True):
l, = axSP.loglog(leadTimesMin/60, spreadArray[:,variable], label=legTxt, linewidth=linewidth)
elif logTime == True:
l, = axSP.semilogx(leadTimesMin/60, spreadArray[:,variable], label=legTxt, linewidth=linewidth)
elif logSpread == True:
l, = axSP.semilogy(leadTimesMin/60, spreadArray[:,variable], label=legTxt, linewidth=linewidth)
else:
l, = axSP.plot(leadTimesMin/60, spreadArray[:,variable], label=legTxt, linewidth=linewidth)
# Get lines for second legend
plot_lines.append(l)
if kr_decorr_bad == True:
strLifetime = 'Lifetime = ' + (fmt1 % decorr_time_exp) + ' h'
else:
strLifetime = 'Lifetime = ' + (fmt1 % decorr_time_kr) + ' h'
decorr_time_hours.append(strLifetime)
# Plot evolution of trajectories
stepEvol = 3
if (pltType == 'evolution') & (step == stepEvol):
if (logTime == True) & (logSpread == True):
#axSP.loglog(leadTimesMin/60,trajectories[1:20,:,variable].T, color='blue')
axSP.plot(leadTimesMin/60,trajectories[1:20,:,variable].T, color='blue')
if (logTime == True) & (logSpread == False):
axSP.semilogx(leadTimesMin/60,trajectories[1:20,:,variable].T, color='blue')
else:
axSP.plot(leadTimesMin/60,trajectories[1:20,:,variable].T, color='blue')
if pltType == 'spread':
# Line colors
colors = [colormap(i) for i in np.linspace(0, 1, len(axSP.lines))]
for i,j in enumerate(axSP.lines):
j.set_color(colors[i])
legendFontSize =12
# Add additional legend with decorrelation time
legend1 = plt.legend(plot_lines, decorr_time_hours, loc='upper left', fontsize=12, labelspacing=0.1)
plt.gca().add_artist(legend1)
# Add legend
plt.ylim(ylims)
if (logTime == True) & (logSpread == True):
plt.xlim([timeSampMin/60, maxLeadTimeMin/60])
axSP.legend(loc='lower right', fontsize=legendFontSize)
elif logTime == True:
axSP.legend(loc='upper left', fontsize=legendFontSize)
elif logSpread == True:
axSP.legend(loc='lower right', fontsize=legendFontSize)
else:
axSP.legend(loc='lower right', fontsize=legendFontSize)
# Plot line of spread saturation
plt.axhline(1.0, color='k', linestyle='dashed')
# Add labels and title
plt.xlabel('Lead time [hours]', fontsize=labelFontSize)
if (pltType == 'evolution') & (step == stepEvol):
plt.ylabel(varLabels[variable], fontsize=labelFontSize)
strTitle = 'Evolution of ' + varLabels[variable] + ' starting at ' + str(fmt2 % minInitLab) + '-' + str(fmt2 % maxInitLab)
plt.title(strTitle, fontsize=18)
if pltType == 'spread':
plt.ylabel(txtYlabel, fontsize=labelFontSize)
strTitle = 'Spread growth for ' + varLabels[variable]
plt.title(strTitle, fontsize=18)
plt.grid(True,which="both", axis='xy')
# axSP.yaxis.set_major_formatter(ticker.FormatStrFormatter("%.1d"))
# axSP.xaxis.set_major_formatter(ticker.FormatStrFormatter("%.1d"))
axSP.xaxis.set_major_formatter(ticker.FuncFormatter(dt.myLogFormat))
axSP.yaxis.set_major_formatter(ticker.FuncFormatter(dt.myLogFormat))
toc = time.clock()
print('Total elapsed time: ', toc-tic, ' seconds.')
# Main title
titleStr = 'Growth of spread in the attractor for \n' + product + ': ' + str(timeStamps_datetime[0]) + ' - ' + str(timeStamps_datetime[len(timeStamps_datetime)-1])
plt.suptitle(titleStr, fontsize=20)
# Save figure
fileName = outBaseDir + product + '_' + pltType + '_' + timeStartStr + '-' + timeEndStr + '0_' + 'Rgt' + str(args.minR) + '_WOLS' + str(args.wols) + '_00005_warGt' + str("%0.1f" % warThreshold) + '_logIMFWAR' + str(int(logIMFWAR)) + '_' + timeAccumMinStr + '.png'
print('Saving: ',fileName)
plt.savefig(fileName, dpi=300)
|
meteoswiss-mdr/precipattractor
|
pyscripts/growth_errors_nans.py
|
Python
|
gpl-3.0
| 39,057
|
from .gameserver import Game
from .example import TicTacToe
|
andydrop/ludicode
|
GameServers/__init__.py
|
Python
|
gpl-3.0
| 60
|
from .net import Net
|
SF-Zhou/TinyDNN
|
tiny_dnn/net/__init__.py
|
Python
|
gpl-3.0
| 22
|
"""
Given Style Rules, create an SLD in XML format add it to a layer
"""
if __name__=='__main__':
import os, sys
DJANGO_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(DJANGO_ROOT)
os.environ['DJANGO_SETTINGS_MODULE'] = 'geonode.settings'
import logging
import os
from random import choice
import re
from xml.etree.ElementTree import XML, ParseError
try:
from urlparse import urljoin
except:
from urllib.parse import urljoin # python 3.x
from django.utils.translation import ugettext as _
from django.conf import settings
from geonode.contrib.dataverse_connect.layer_metadata import LayerMetadata
from geonode.maps.models import Layer
from geonode.contrib.dataverse_styles.geoserver_rest_util import make_geoserver_json_put_request, make_geoserver_put_sld_request
from geonode.contrib.dataverse_styles.geonode_get_services import get_style_name_for_layer
LOGGER = logging.getLogger(__name__)
class StyleLayerMaker:
"""
Given Style Rules, create SLD XML and add it to a layer
Basic usage:
# Init object with an existing layer name
style_layer_maker = StyleLayerMaker('income_2so')
# Use some SLD info in XML format
sld_xml_content = open('test_rules.xml', 'r').read() # 'test_rules.xml' contains a SLD info in XML format
# Add sld_xml_content to the layer as the default style
success = style_layer_maker.add_sld_xml_to_layer(sld_xml_content)
# If operation failed, check error messages
if not success:
if style_layer_maker.err_found:
print ('\n'.join(err_msgs))
"""
def __init__(self, layer_name):
self.gs_catalog_obj = Layer.objects.gs_catalog
self.layer_name = layer_name
self.err_found = False
self.err_msgs = []
self.layer_metadata = None # LayerMetadata object
def add_err_msg(self, err_msg):
self.err_found = True
self.err_msgs.append(err_msg)
LOGGER.warn(err_msg)
def create_layer_metadata(self, layer_name):
if layer_name is None:
self.layer_metadata = None
return
#self.layer_metadata = LayerMetadata(**dict(geonode_layer_name=layer_name))
self.layer_metadata = LayerMetadata.create_metadata_using_layer_name(layer_name)
def get_layer_metadata(self):
"""Return a LayerMetadata object, if it exists"""
if self.layer_metadata:
return None
return self.layer_metadata
def add_sld_to_layer(self, formatted_sld_object):
# update layer via 2 PUT calls to the geoserver
return self.add_sld_xml_to_layer_via_puts(formatted_sld_object,\
self.layer_name)
# use direct python, but doesn't properly clear tile cache
#return self.add_sld_xml_to_layer(formatted_sld_object)
def get_url_to_set_sld_rules(self, style_name):
"""
Create url to set the new SLD to the layer via a put
#http://localhost:8000/gs/rest/styles/social_disorder_nydj_k_i_v.xml
This will be sent with a XML content containing the SLD rules
"""
if not style_name:
return None
# (1) Given the layer, retrieve the SLD containing the style name
#
# (to do)
# (2) Format the url for adding/retrieving styles
#
url_fragment = 'rest/styles/%s.xml' % (style_name)
full_url = urljoin(settings.GEOSERVER_BASE_URL, url_fragment)
return full_url
def get_set_default_style_url(self, layer_name):
"""
Given a layer name, return the REST url to set a default style
"""
if not layer_name:
return None
url_fragment = 'rest/layers/%s:%s' % (settings.DEFAULT_WORKSPACE, layer_name)
full_url = urljoin(settings.GEOSERVER_BASE_URL, url_fragment)
return full_url
def add_sld_xml_to_layer_via_puts(self, formatted_sld_object, layer_name):
if not formatted_sld_object or not layer_name:
return False
print '-' * 40
print 'formatted_sld_object.formatted_sld_xml'
print formatted_sld_object.formatted_sld_xml
print '-' * 40
# (1) Verify the XML
if not self.is_xml_verified(formatted_sld_object.formatted_sld_xml):
self.add_err_msg('The style information contains invalid XML')
return False
# (2) Set the new SLD to the layer via a put
#http://localhost:8000/gs/rest/styles/social_disorder_nydj_k_i_v.xml
# --------------------------------------
# Retrieve the style name for this layer
# --------------------------------------
(success, style_name_or_err_msg) = get_style_name_for_layer(layer_name)
if not success:
self.add_err_msg(style_name_or_err_msg)
return False
geoserver_sld_url = self.get_url_to_set_sld_rules(style_name_or_err_msg)
print 'geoserver_sld_url', geoserver_sld_url
print '-' * 40
print 'formatted_sld_object.formatted_sld_xml', formatted_sld_object.formatted_sld_xml
print '-' * 40
(response, content) = make_geoserver_put_sld_request(geoserver_sld_url, formatted_sld_object.formatted_sld_xml)
print 'response', response
print '-' * 40
print 'content', content
print '-' * 40
if response is None or not response.status == 200:
self.add_err_msg('Failed to set new style as the default')
return False
# (3) Set the new style as the default for the layer
# Send a PUT to the catalog to set the default style
json_str = """{"layer":{"defaultStyle":{"name":"%s"},"styles":{},"enabled":true}}""" % formatted_sld_object.sld_name
geoserver_json_url = self.get_set_default_style_url(self.layer_name)
if geoserver_json_url is None:
self.add_err_msg('Failed to format the url to set new style for layer: %s' % self.layer_name)
return False
(response, content) = make_geoserver_json_put_request(geoserver_json_url, json_str)
if response is None or not response.status in (200, 201):
self.add_err_msg('Failed to set new style as the default')
return False
self.create_layer_metadata(self.layer_name)
print '-' * 40
print ('layer %s saved with style %s' % (self.layer_name, formatted_sld_object.sld_name))
return True
def add_sld_xml_to_layer(self, formatted_sld_object):
"""
NOT USING, tiles were not getting refreshed properly
Keeping code around in case needed in the future
"""
if not formatted_sld_object:
return False
print 'type(formatted_sld_object)', type(formatted_sld_object)
# (1) Verify the XML
if not self.is_xml_verified(formatted_sld_object.formatted_sld_xml):
self.add_err_msg('The style information contains invalid XML')
return False
# (2) Retrieve the layer
layer_obj = self.gs_catalog_obj.get_layer(self.layer_name)
if layer_obj is None:
self.add_err_msg('The layer "%s" does not exist' % self.layer_name)
return False
self.show_layer_style_list(layer_obj)
#self.clear_alternate_style_list(layer_obj)
# (3) Create a style name
#stylename = self.layer_name + self.get_random_suffix()
#while self.is_style_name_in_catalog(stylename):
# stylename = self.layer_name + self.get_random_suffix()
style_name = formatted_sld_object.sld_name
# (4) Add the xml style to the catalog, with the new name
try:
# sync names
self.gs_catalog_obj.create_style(style_name, formatted_sld_object.formatted_sld_xml)
except:
self.add_err_msg('Failed to add style to the catalog: %s' % style_name)
return False
# (5) Pull the style object back from the catalog
new_style_obj = self.gs_catalog_obj.get_style(style_name)
if new_style_obj is None:
self.add_err_msg('Failed to find recently added style in the catalog: %s' % style_name)
return False
# (6) Set the new style as the default for the layer
layer_obj.default_style = new_style_obj
# Save it!
try:
self.gs_catalog_obj.save(layer_obj)
except:
self.add_err_msg('Failed to save new default style with layer' % (style_name))
return False
self.create_layer_metadata(self.layer_name)
print ('layer %s saved with style %s' % (self.layer_name, style_name))
return True
def get_random_suffix(self, num_chars=4):
return "_".join([choice('qwertyuiopasdfghjklzxcvbnm0123456789') for i in range(num_chars)])
def get_style_from_name(self, style_name):
"""
Get the style object from the style name
:returns: Style object or None
"""
if not style_name:
return None
return self.gs_catalog_obj.get_style(style_name)
def is_style_name_in_catalog(self, style_name):
"""
Is the style name in the Catalog?
"""
if not style_name:
return False
style_obj = self.get_style_from_name(style_name)
if style_obj is None:
return False
return True
def clear_alternate_style_list(self, layer_obj):
"""
Clear existing alternate styles from layer
(ask Matt how to delete a style)
"""
if not layer_obj.__class__.__name__ == 'Layer':
return False
# clear style list
layer_obj._set_alternate_styles([])
# save cleared list
self.gs_catalog_obj.save(layer_obj)
return True
def add_style_to_alternate_list(self, layer_obj, style_obj):
"""
Add a layer to the alternate list, to preserve it
"""
if not (layer_obj.__class__.__name__ == 'Layer' and style_obj.__class__.name == 'Style'):
return False
# get style list
alternate_layer_style_list = layer_obj._get_alternate_styles()
# does style already exist in list?
if self.is_style_name_in_catalog(style_obj.name) is True:
return False
# add new style to list
alternate_layer_style_list.append(style_obj)
# update the layer with the new list
layer_obj._set_alternate_styles(alternate_layer_style_list)
return True
#self.gs_catalog_obj.save(layer_obj)
def show_layer_style_list(self, layer_obj):
print('Show layer styles')
if not layer_obj.__class__.__name__ == 'Layer':
print ('not a layer', type(layer_obj))
return
sl = [layer_obj.default_style.name]
for s in layer_obj._get_alternate_styles():
sl.append(s.name)
for idx, sname in enumerate(sl):
if idx == 0:
print('%s (default)' % sname)
continue
print (sname)
def is_xml_verified(self, sld_xml_str):
if not sld_xml_str:
return False
try:
sldxml = XML(sld_xml_str)
valid_url = re.compile(settings.VALID_SLD_LINKS)
for elem in sldxml.iter(tag='{http://www.opengis.net/sld}OnlineResource'):
if '{http://www.w3.org/1999/xlink}href' in elem.attrib:
link = elem.attrib['{http://www.w3.org/1999/xlink}href']
if valid_url.match(link) is None:
err_msg = "External images in your SLD file are not permitted. Please contact us if you would like your SLD images hosted on %s" % (settings.SITENAME)
self.add_err_msg(err_msg)
return False
except ParseError, e:
self.add_err_msg('Your SLD file contains invalid XML')
return False
return True
if __name__=='__main__':
slm = StyleLayerMaker('income_2so')
sld_xml_content = open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_rules', 'test_rules_03.xml'), 'r').read()
slm.add_sld_xml_to_layer(sld_xml_content)
|
cga-harvard/cga-worldmap
|
geonode/contrib/dataverse_styles/style_layer_maker.py
|
Python
|
gpl-3.0
| 12,317
|
# -*- coding: utf-8 -*-
from openerp import api, fields, models, _
from . import exceptions
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
enable_datev_checks = fields.Boolean('Perform Datev Checks', default=True)
@api.multi
def is_datev_validation_active(self):
self.ensure_one()
return self.enable_datev_checks and self.env['res.users'].browse(self._uid).company_id.enable_datev_checks
@api.multi
def perform_datev_validation(self, silent=False):
is_valid = True
errors = list()
for rec in self:
if rec.is_datev_validation_active():
if silent: # Shorter, more performant version w/o string and exception handling
for line in rec.invoice_line:
if not line.perform_datev_validation(silent=True):
return False
else:
for line_no, line in enumerate(rec.invoice_line, start=1):
try:
line.perform_datev_validation(line_no=line_no)
except exceptions.DatevWarning as dw:
is_valid = False
errors.append(dw.message)
if not (silent or is_valid):
raise exceptions.DatevWarning(u'\n'.join(errors))
return is_valid
class AccountInvoiceLine(models.Model):
_inherit = 'account.invoice.line'
@api.multi
def perform_datev_validation(self, silent=False, line_no=None):
"""
Performs tests on an invoice line for whether the taxes are correctly set or not.
The major use of this method is in the condition of a workflow transition.
:param line_no: int Line number to be displayed in an error message.
:param silent: bool Specifies whether an exception in case of a failed test should be thrown
or if the checks should be performed silently.
:return: True if all checks were performed w/o errors or no datev checks are applicable. False otherwise.
:rtype: bool
"""
self.ensure_one()
if not self.is_datev_validation_applicable():
return True
is_valid = len(self.invoice_line_tax_id) == 1 and self.account_id.datev_steuer == self.invoice_line_tax_id
if not (silent or is_valid):
raise exceptions.DatevWarning(
_(u'Line {line}: The taxes specified in the invoice line ({tax_line}) and the corresponding account ({tax_account}) mismatch!').format(
line=line_no, tax_line=self.invoice_line_tax_id.description, tax_account=self.account_id.datev_steuer.description
)
)
return is_valid
@api.multi
def is_datev_validation_applicable(self):
"""
Tests if an invoice line is applicable to datev checks or not.
:return: True if it is applicable. Otherwise False.
:rtype: bool
"""
self.ensure_one()
return self.account_id.automatic
|
HBEE/accounting
|
ecoservice_financeinterface_datev/models/account_invoice.py
|
Python
|
gpl-3.0
| 3,072
|
# -*- coding: utf-8 -*-
from collections import defaultdict
from functools import partial
from itertools import count
import json
import networkx as nx
from networkx.algorithms import weakly_connected_component_subgraphs
from numpy import subtract
from numpy.linalg import norm
from typing import Any, DefaultDict, Dict, List, Optional, Tuple, Union
from django.db import connection
from django.http import JsonResponse
from rest_framework.decorators import api_view
from catmaid.models import UserRole
from catmaid.control.authentication import requires_user_role
from catmaid.control.common import (get_relation_to_id_map, get_request_bool,
get_request_list)
from catmaid.control.link import KNOWN_LINK_PAIRS, UNDIRECTED_LINK_TYPES
from catmaid.control.tree_util import simplify
from catmaid.control.synapseclustering import tree_max_density
def make_new_synapse_count_array() -> List[int]:
return [0, 0, 0, 0, 0]
def basic_graph(project_id, skeleton_ids, relations=None,
source_link:str="presynaptic_to", target_link:str="postsynaptic_to",
allowed_connector_ids=None) -> Dict[str, Tuple]:
if not skeleton_ids:
raise ValueError("No skeleton IDs provided")
cursor = connection.cursor()
if not relations:
relations = get_relation_to_id_map(project_id, (source_link, target_link), cursor)
source_rel_id, target_rel_id = relations[source_link], relations[target_link]
undirected_links = source_link in UNDIRECTED_LINK_TYPES and \
target_link in UNDIRECTED_LINK_TYPES
# Find all links in the passed in set of skeletons. If a relation is
# reciprocal, we need to avoid getting two result rows back for each
# treenode-connector-treenode connection. To keep things simple, we will add
# a "skeleton ID 1" < "skeleton ID 2" test for reciprocal links.
cursor.execute(f"""
SELECT t1.skeleton_id, t2.skeleton_id, LEAST(t1.confidence, t2.confidence)
FROM treenode_connector t1,
treenode_connector t2
WHERE t1.skeleton_id = ANY(%(skeleton_ids)s::bigint[])
AND t1.relation_id = %(source_rel)s
AND t1.connector_id = t2.connector_id
AND t2.skeleton_id = ANY(%(skeleton_ids)s::bigint[])
AND t2.relation_id = %(target_rel)s
AND t1.id <> t2.id
{'AND t1.skeleton_id < t2.skeleton_id' if undirected_links else ''}
{'AND t1.connector_id = ANY(%(allowed_c_ids)s::bigint[])' if allowed_connector_ids else ''}
""", {
'skeleton_ids': list(skeleton_ids),
'source_rel': source_rel_id,
'target_rel': target_rel_id,
'allowed_c_ids': allowed_connector_ids,
})
edges:DefaultDict = defaultdict(partial(defaultdict, make_new_synapse_count_array))
for row in cursor.fetchall():
edges[row[0]][row[1]][row[2] - 1] += 1
return {
'edges': tuple((s, t, count)
for s, edge in edges.items()
for t, count in edge.items())
}
def confidence_split_graph(project_id, skeleton_ids, confidence_threshold,
relations=None, source_rel:str="presynaptic_to",
target_rel:str="postsynaptic_to", allowed_connector_ids=None) -> Dict[str, Any]:
""" Assumes 0 < confidence_threshold <= 5. """
if not skeleton_ids:
raise ValueError("No skeleton IDs provided")
# We need skeleton IDs as a list
skeleton_ids = list(skeleton_ids)
cursor = connection.cursor()
if not relations:
relations = get_relation_to_id_map(project_id, (source_rel, target_rel), cursor)
source_rel_id, target_rel_id = relations[source_rel], relations[target_rel]
# Fetch (valid) synapses of all skeletons
cursor.execute(f'''
SELECT skeleton_id, treenode_id, connector_id, relation_id, confidence
FROM treenode_connector
WHERE project_id = %(project_id)s
AND skeleton_id = ANY(%(skids)s::bigint[])
AND relation_id IN (%(source_rel_id)s, %(target_rel_id)s)
{'AND connector_id = ANY(%(allowed_c_ids)s::bigint[])' if allowed_connector_ids else ''}
''', {
'project_id': int(project_id),
'skids': skeleton_ids,
'source_rel_id': source_rel_id,
'target_rel_id': target_rel_id,
'allowed_c_ids': allowed_connector_ids,
})
stc:DefaultDict[Any, List] = defaultdict(list)
for row in cursor.fetchall():
stc[row[0]].append(row[1:]) # skeleton_id vs (treenode_id, connector_id, relation_id, confidence)
# Fetch all treenodes of all skeletons
cursor.execute('''
SELECT skeleton_id, id, parent_id, confidence
FROM treenode
WHERE project_id = %(project_id)s
AND skeleton_id = ANY(%(skeleton_ids)s::bigint[])
ORDER BY skeleton_id
''', {
'project_id': project_id,
'skeleton_ids': skeleton_ids,
})
# Dictionary of connector_id vs relation_id vs list of sub-skeleton ID
connectors:DefaultDict = defaultdict(partial(defaultdict, list))
# All nodes of the graph
nodeIDs:List = []
# Read out into memory only one skeleton at a time
current_skid = None
tree:Optional[nx.DiGraph] = None
for row in cursor.fetchall():
if row[0] == current_skid:
# Build the tree, breaking it at the low-confidence edges
if row[2] and row[3] >= confidence_threshold:
# mypy cannot prove this will be a DiGraph by here
tree.add_edge(row[2], row[1]) # type: ignore
continue
if tree:
nodeIDs.extend(split_by_confidence(current_skid, tree, stc[current_skid], connectors))
# Start the next tree
current_skid = row[0]
tree = nx.DiGraph()
if row[2] and row[3] > confidence_threshold:
tree.add_edge(row[2], row[1])
if tree:
nodeIDs.extend(split_by_confidence(current_skid, tree, stc[current_skid], connectors))
# Create the edges of the graph from the connectors, which was populated as a side effect of 'split_by_confidence'
edges:DefaultDict = defaultdict(partial(defaultdict, make_new_synapse_count_array)) # pre vs post vs count
for c in connectors.values():
for pre in c[source_rel_id]:
for post in c[target_rel_id]:
edges[pre[0]][post[0]][min(pre[1], post[1]) - 1] += 1
return {
'nodes': nodeIDs,
'edges': [(s, t, count)
for s, edge in edges.items()
for t, count in edge.items()]
}
def dual_split_graph(project_id, skeleton_ids, confidence_threshold, bandwidth,
expand, relations=None, source_link="presynaptic_to",
target_link="postsynaptic_to", allowed_connector_ids=None) -> Dict[str, Any]:
""" Assumes bandwidth > 0 and some skeleton_id in expand. """
cursor = connection.cursor()
skeleton_ids = set(skeleton_ids)
expand = set(expand)
if not skeleton_ids:
raise ValueError("No skeleton IDs provided")
if not relations:
relations = get_relation_to_id_map(project_id, (source_link, target_link), cursor)
source_rel_id, target_rel_id = relations[source_link], relations[target_link]
# Fetch synapses of all skeletons
cursor.execute(f'''
SELECT skeleton_id, treenode_id, connector_id, relation_id, confidence
FROM treenode_connector
WHERE project_id = %(project_id)s
AND skeleton_id = ANY(%(skids)s::bigint[])
AND relation_id IN (%(source_rel_id)s, %(target_rel_id)s)
{'AND connector_id = ANY(%(allowed_c_ids)s::bigint[])' if allowed_connector_ids else ''}
''', {
'project_id': int(project_id),
'skids': list(skeleton_ids),
'source_rel_id': source_rel_id,
'target_rel_id': target_rel_id,
'allowed_c_ids': allowed_connector_ids,
})
stc:DefaultDict[Any, List] = defaultdict(list)
for row in cursor.fetchall():
stc[row[0]].append(row[1:]) # skeleton_id vs (treenode_id, connector_id, relation_id)
# Dictionary of connector_id vs relation_id vs list of sub-skeleton ID
connectors:DefaultDict = defaultdict(partial(defaultdict, list))
# All nodes of the graph (with or without edges. Includes those representing synapse domains)
nodeIDs:List = []
not_to_expand = skeleton_ids - expand
if confidence_threshold > 0 and not_to_expand:
# Now fetch all treenodes of only skeletons in skeleton_ids (the ones not to expand)
cursor.execute('''
SELECT skeleton_id, id, parent_id, confidence
FROM treenode
WHERE project_id = %(project_id)s
AND skeleton_id = ANY(%(skids)s::bigint[])
ORDER BY skeleton_id
''', {
'project_id': project_id,
'skids': list(not_to_expand),
})
# Read out into memory only one skeleton at a time
current_skid = None
tree:Optional[nx.DiGraph] = None
for row in cursor.fetchall():
if row[0] == current_skid:
# Build the tree, breaking it at the low-confidence edges
if row[2] and row[3] >= confidence_threshold:
# mypy cannot prove this will be a nx.DiGraph by here
tree.add_edge(row[2], row[1]) # type: ignore
continue
if tree:
nodeIDs.extend(split_by_confidence(current_skid, tree, stc[current_skid], connectors))
# Start the next tree
current_skid = row[0]
tree = nx.DiGraph()
if row[2] and row[3] > confidence_threshold:
tree.add_edge(row[2], row[1])
if tree:
nodeIDs.extend(split_by_confidence(current_skid, tree, stc[current_skid], connectors))
else:
# No need to split.
# Populate connectors from the connections among them
for skid in not_to_expand:
nodeIDs.append(skid)
for c in stc[skid]:
connectors[c[1]][c[2]].append((skid, c[3]))
# Now fetch all treenodes of all skeletons to expand
cursor.execute('''
SELECT skeleton_id, id, parent_id, confidence, location_x, location_y, location_z
FROM treenode
WHERE project_id = %(project_id)s
AND skeleton_id = ANY(%(skids)s::bigint[])
ORDER BY skeleton_id
''', {
'project_id': project_id,
'skids': list(expand),
})
# list of edges among synapse domains
intraedges:List = []
# list of branch nodes, merely structural
branch_nodeIDs:List = []
# reset
current_skid = None
tree = None
locations:Optional[Dict] = None
for row in cursor.fetchall():
if row[0] == current_skid:
# Build the tree, breaking it at the low-confidence edges
# mypy cannot prove this will have a value by here
locations[row[1]] = row[4:] # type: ignore
if row[2] and row[3] >= confidence_threshold:
# mypy cannot prove this will have a value by here
tree.add_edge(row[2], row[1]) # type: ignore
continue
if tree:
ns, bs = split_by_both(current_skid, tree, locations, bandwidth, stc[current_skid], connectors, intraedges)
nodeIDs.extend(ns)
branch_nodeIDs.extend(bs)
# Start the next tree
current_skid = row[0]
tree = nx.DiGraph()
locations = {}
locations[row[1]] = row[4:]
if row[2] and row[3] > confidence_threshold:
tree.add_edge(row[2], row[1])
if tree:
ns, bs = split_by_both(current_skid, tree, locations, bandwidth, stc[current_skid], connectors, intraedges)
nodeIDs.extend(ns)
branch_nodeIDs.extend(bs)
# Create the edges of the graph
edges:DefaultDict = defaultdict(partial(defaultdict, make_new_synapse_count_array)) # pre vs post vs count
for c in connectors.values():
for pre in c[source_rel_id]:
for post in c[target_rel_id]:
edges[pre[0]][post[0]][min(pre[1], post[1]) - 1] += 1
return {
'nodes': nodeIDs,
'edges': [(s, t, count)
for s, edge in edges.items()
for t, count in edge.items()],
'branch_nodes': branch_nodeIDs,
'intraedges': intraedges
}
def populate_connectors(chunkIDs, chunks, cs, connectors) -> None:
# Build up edges via the connectors
for c in cs:
# c is (treenode_id, connector_id, relation_id, confidence)
for chunkID, chunk in zip(chunkIDs, chunks):
if c[0] in chunk:
connectors[c[1]][c[2]].append((chunkID, c[3]))
break
def subgraphs(digraph, skeleton_id) -> Tuple[List, Tuple]:
chunks = list(weakly_connected_component_subgraphs(digraph))
if 1 == len(chunks):
chunkIDs:Tuple = (str(skeleton_id),) # Note: Here we're loosening the implicit type
else:
chunkIDs = tuple('%s_%s' % (skeleton_id, (i+1)) for i in range(len(chunks)))
return chunks, chunkIDs
def split_by_confidence(skeleton_id, digraph, cs, connectors) -> Tuple:
""" Split by confidence threshold. Populates connectors (side effect). """
chunks, chunkIDs = subgraphs(digraph, skeleton_id)
populate_connectors(chunkIDs, chunks, cs, connectors)
return chunkIDs
def split_by_both(skeleton_id, digraph, locations, bandwidth, cs, connectors, intraedges) -> Tuple[List, List]:
""" Split by confidence and synapse domain. Populates connectors and intraedges (side effects). """
nodes = []
branch_nodes = []
chunks, chunkIDs = subgraphs(digraph, skeleton_id)
for i, chunkID, chunk in zip(count(start=1), chunkIDs, chunks):
# Populate edge properties with the weight
for parent, child in chunk.edges_iter():
chunk[parent][child]['weight'] = norm(subtract(locations[child], locations[parent]))
# Check if need to expand at all
blob = tuple(c for c in cs if c[0] in chunk)
if 0 == len(blob): # type: ignore
nodes.append(chunkID)
continue
treenode_ids, connector_ids, relation_ids, confidences = list(zip(*blob)) # type: ignore
if 0 == len(connector_ids):
nodes.append(chunkID)
continue
# Invoke Casey's magic: split by synapse domain
max_density = tree_max_density(chunk.to_undirected(), treenode_ids,
connector_ids, relation_ids, [bandwidth])
# Get first element of max_density
domains = next(iter(max_density.values()))
# domains is a dictionary of index vs SynapseGroup instance
if 1 == len(domains):
for connector_id, relation_id, confidence in zip(connector_ids, relation_ids, confidences):
connectors[connector_id][relation_id].append((chunkID, confidence))
nodes.append(chunkID)
continue
# Create edges between domains
# Pick one treenode from each domain to act as anchor
anchors = {d.node_ids[0]: (i+k, d) for k, d in domains.items()}
# Create new Graph where the edges are the edges among synapse domains
mini = simplify(chunk, anchors.keys())
# Many side effects:
# * add internal edges to intraedges
# * add each domain to nodes
# * custom-apply populate_connectors with the known synapses of each domain
# (rather than having to sift through all in cs)
mini_nodes = {}
for node in mini.nodes_iter():
nblob = anchors.get(node)
if nblob:
index, domain = nblob
domainID = '%s_%s' % (chunkID, index)
nodes.append(domainID)
for connector_id, relation_id in zip(domain.connector_ids, domain.relations):
confidence = confidences[connector_ids.index(connector_id)]
connectors[connector_id][relation_id].append((domainID, confidence))
else:
domainID = '%s_%s' % (chunkID, node)
branch_nodes.append(domainID)
mini_nodes[node] = domainID
for a1, a2 in mini.edges_iter():
intraedges.append((mini_nodes[a1], mini_nodes[a2]))
return nodes, branch_nodes
def _skeleton_graph(project_id, skeleton_ids, confidence_threshold, bandwidth,
expand, compute_risk, cable_spread, path_confluence,
with_overall_counts=False, relation_map=None, link_types=None,
allowed_connector_ids=None) -> Optional[Dict]:
by_link_type = bool(link_types)
if not by_link_type:
link_types = ['synaptic-connector']
if not expand:
# Prevent expensive operations that will do nothing
bandwidth = 0
cursor = connection.cursor()
relation_map = get_relation_to_id_map(project_id, cursor=cursor)
result:Optional[Dict] = None
for link_type in link_types:
pair = KNOWN_LINK_PAIRS.get(link_type)
if not pair:
raise ValueError(f"Unknown link type: {link_type}")
source_rel = pair['source']
target_rel = pair['target']
if 0 == bandwidth:
if 0 == confidence_threshold:
graph:Dict[str, Any] = basic_graph(project_id, skeleton_ids, relation_map,
source_rel, target_rel,
allowed_connector_ids)
else:
graph = confidence_split_graph(project_id, skeleton_ids,
confidence_threshold, relation_map, source_rel,
target_rel, allowed_connector_ids)
else:
graph = dual_split_graph(project_id, skeleton_ids, confidence_threshold,
bandwidth, expand, relation_map)
if with_overall_counts:
source_rel_id = relation_map[source_rel]
target_rel_id = relation_map[target_rel]
cursor.execute(f'''
SELECT tc1.skeleton_id, tc2.skeleton_id,
tc1.relation_id, tc2.relation_id,
LEAST(tc1.confidence, tc2.confidence)
FROM treenode_connector tc1
JOIN UNNEST(%(skeleton_id)s::bigint[]) skeleton(id)
ON tc1.skeleton_id = skeleton.id
JOIN treenode_connector tc2
ON tc1.connector_id = tc2.connector_id
WHERE tc1.id != tc2.id
AND tc1.relation_id IN (%(source_rel_id)s, %(target_rel_id)s)
AND tc2.relation_id IN (%(source_rel_id)s, %(target_rel_id)s)
''', {
'skeleton_ids': skeleton_ids,
'source_rel_id': source_rel_id,
'target_rel_id': target_rel_id,
})
query_skeleton_ids = set(skeleton_ids)
overall_counts:DefaultDict = defaultdict(partial(defaultdict, make_new_synapse_count_array))
# Iterate through each pre/post connection
for skid1, skid2, rel1, rel2, conf in cursor.fetchall():
# Increment number of links to/from skid1 with relation rel1.
overall_counts[skid1][rel1][conf - 1] += 1
# Attach counts and a map of relation names to their IDs.
graph['overall_counts'] = overall_counts
graph['relation_map'] = {
source_rel: source_rel_id,
target_rel: target_rel_id
}
if by_link_type:
if not result:
result = {}
result[link_type] = graph
else:
result = graph
return result
@api_view(['POST'])
@requires_user_role([UserRole.Annotate, UserRole.Browse])
def skeleton_graph(request, project_id=None):
"""Get a synaptic graph between skeletons compartmentalized by confidence.
Given a set of skeletons, retrieve presynaptic-to-postsynaptic edges
between them, annotated with count. If a confidence threshold is
supplied, compartmentalize the skeletons at edges in the arbor
below that threshold and report connectivity based on these
compartments.
When skeletons are split into compartments, nodes in the graph take an
string ID like ``{skeleton_id}_{compartment #}``.
---
parameters:
- name: skeleton_ids[]
description: IDs of the skeletons to graph
required: true
type: array
items:
type: integer
paramType: form
- name: confidence_threshold
description: Confidence value below which to segregate compartments
type: integer
paramType: form
- name: bandwidth
description: Bandwidth in nanometers
type: number
- name: cable_spread
description: Cable spread in nanometers
type: number
- name: expand[]
description: IDs of the skeletons to expand
type: array
items:
type: integer
- name: link_types[]
description: IDs of link types to respect
type: array
items:
type: string
- name: allowed_connector_ids[]
description: (Optional) IDs of allowed conectors. All other connectors will be ignored.
required: false
type: array
items:
type: integer
models:
skeleton_graph_edge:
id: skeleton_graph_edge
properties:
- description: ID of the presynaptic skeleton or compartment
type: integer|string
required: true
- description: ID of the postsynaptic skeleton or compartment
type: integer|string
required: true
- description: number of synapses constituting this edge
$ref: skeleton_graph_edge_count
required: true
skeleton_graph_edge_count:
id: skeleton_graph_edge_count
properties:
- description: Number of synapses with confidence 1
type: integer
required: true
- description: Number of synapses with confidence 2
type: integer
required: true
- description: Number of synapses with confidence 3
type: integer
required: true
- description: Number of synapses with confidence 4
type: integer
required: true
- description: Number of synapses with confidence 5
type: integer
required: true
skeleton_graph_intraedge:
id: skeleton_graph_intraedge
properties:
- description: ID of the presynaptic skeleton or compartment
type: integer|string
required: true
- description: ID of the postsynaptic skeleton or compartment
type: integer|string
required: true
type:
edges:
type: array
items:
$ref: skeleton_graph_edge
required: true
nodes:
type: array
items:
type: integer|string
required: false
intraedges:
type: array
items:
$ref: skeleton_graph_intraedge
required: false
branch_nodes:
type: array
items:
type: integer|string
required: false
"""
compute_risk = 1 == int(request.POST.get('risk', 0))
if compute_risk:
# TODO port the last bit: computing the synapse risk
from graph import skeleton_graph as slow_graph
return slow_graph(request, project_id)
project_id = int(project_id)
skeleton_ids = set(int(v) for k,v in request.POST.items() if k.startswith('skeleton_ids['))
confidence_threshold = min(int(request.POST.get('confidence_threshold', 0)), 5)
bandwidth = float(request.POST.get('bandwidth', 0)) # in nanometers
cable_spread = float(request.POST.get('cable_spread', 2500)) # in nanometers
path_confluence = int(request.POST.get('path_confluence', 10)) # a count
expand = set(int(v) for k,v in request.POST.items() if k.startswith('expand['))
with_overall_counts = get_request_bool(request.POST, 'with_overall_counts', False)
expand = set(int(v) for k,v in request.POST.items() if k.startswith('expand['))
link_types = get_request_list(request.POST, 'link_types', None)
allowed_connector_ids = get_request_list(request.POST, 'allowed_connector_ids', None)
graph = _skeleton_graph(project_id, skeleton_ids,
confidence_threshold, bandwidth, expand, compute_risk, cable_spread,
path_confluence, with_overall_counts, link_types=link_types,
allowed_connector_ids=allowed_connector_ids)
if not graph:
raise ValueError("Could not compute graph")
return JsonResponse(graph)
|
tomka/CATMAID
|
django/applications/catmaid/control/graph2.py
|
Python
|
gpl-3.0
| 24,814
|
import datetime
import logging
import time
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.mail.message import EmailMultiAlternatives
from django.core.management.base import BaseCommand
from django.db.models import Q
from django.template.loader import render_to_string
from django.utils.timezone import utc
from custom.models import Profile, FriendJoinedEmailLog
# Get an instance of a logger
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Friend joined email daemon'
args = ''
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.site = Site.objects.get_current()
def close_db_connection(self):
from django import db
db.close_connection()
def handle(self, *args, **options):
while True:
last_hour = datetime.datetime.utcnow().replace(tzinfo=utc) - datetime.timedelta(hours=1)
profiles = Profile.objects.select_related().filter(
user__date_joined__gte=last_hour,
user_referrer__profile__enable_email_updates=True,
user_referrer__is_active=True,
)
for profile in profiles:
if not profile.user_referrer.email:
continue
try:
FriendJoinedEmailLog.objects.get(user=profile.user_referrer, user_referred=profile.user)
except FriendJoinedEmailLog.DoesNotExist:
dict_context = {
'site': self.site,
'referred_profile': profile,
'referring_profile': profile.user_referrer.get_profile(),
}
email_subject = render_to_string('emails/friend-joined/subject.txt', dict_context).strip()
email_txt = render_to_string('emails/friend-joined/message.txt', dict_context)
email_html = render_to_string('emails/friend-joined/message.html', dict_context)
email = EmailMultiAlternatives(
email_subject, email_txt, settings.DEFAULT_FROM_EMAIL, [profile.user_referrer.email,]
)
email.attach_alternative(email_html, 'text/html')
email.send()
FriendJoinedEmailLog.objects.create(user=profile.user_referrer, user_referred=profile.user)
self.close_db_connection()
time.sleep(600)
|
waterdotorg/power.Water
|
project/custom/management/commands/friend_joined_email.py
|
Python
|
gpl-3.0
| 2,527
|
import frappe
def flat_item_group_tree_list(item_group, result=None):
if not result:
result = [item_group]
child_groups = frappe.get_list(
"Item Group",
filters={"parent_item_group": item_group},
fields=["name"]
)
child_groups = [child.name for child in child_groups if child not in result]
if len(child_groups) > 0:
result = result + child_groups
for child in child_groups:
flat_item_group_tree_list(child, result)
return result
|
neilLasrado/erpnext
|
erpnext/bloombrackets/coupon_commands/utils.py
|
Python
|
gpl-3.0
| 457
|
#!/usr/bin/env python
#
# This file is part of the SSM_LinearArray (Sound Sources Mapping
# using a Linear Microphone Array)
# developed by Daobilige Su <daobilige DOT su AT student DOT uts DOT edu DOT au>
#
# This file is under the GPLv3 licence.
#
import rospy
from std_msgs.msg import String
from std_msgs.msg import Int32MultiArray
#sudo apt-get install python-pyaudio
import pyaudio
from rospy.numpy_msg import numpy_msg
import numpy as np
import time
import signal
import os
import sys
CHUNK = 3200
FORMAT = pyaudio.paInt16
CHANNELS = 4
RATE = 16000
DEV_IDX = 5
p = pyaudio.PyAudio()
pub_mic_array = rospy.Publisher("/microphone_array_raw", numpy_msg(Int32MultiArray),queue_size=1)
def callback(in_data, frame_count, time_info, status):
global np,pub_mic_array
numpydata = np.fromstring(in_data, dtype=np.int16)
print('sending...')
numpydata_msg = Int32MultiArray()
numpydata_msg.data = numpydata
pub_mic_array.publish(numpydata_msg)
return (in_data, pyaudio.paContinue)
stream = p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
input=True,
frames_per_buffer=CHUNK,
input_device_index=DEV_IDX,
stream_callback=callback)
def signal_handler(signal, frame):
print('---stopping---')
stream.close()
p.terminate()
sys.exit()
signal.signal(signal.SIGINT, signal_handler)
def talker():
rospy.init_node('microphone_array_driver', anonymous=True)
print("---recording---")
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.close()
p.terminate()
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
|
daobilige-su/SSM_LinearArray
|
ROS/SSM_LinearArray/scripts/ps3_driver.py
|
Python
|
gpl-3.0
| 1,741
|
# -*- coding: utf-8 -*-
#
# diffoscope: in-depth comparison of files, archives, and directories
#
# Copyright © 2015 Jérémy Bobbio <lunar@debian.org>
# Copyright © 2015 Clemens Lang <cal@macports.org>
#
# diffoscope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# diffoscope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with diffoscope. If not, see <https://www.gnu.org/licenses/>.
import pytest
import os.path
from diffoscope.config import Config
from diffoscope.comparators.macho import MachoFile
from diffoscope.comparators.missing_file import MissingFile
from utils.data import data, load_fixture
from utils.tools import skip_unless_tools_exist
obj1 = load_fixture('test1.macho')
obj2 = load_fixture('test2.macho')
def test_obj_identification(obj1):
assert isinstance(obj1, MachoFile)
def test_obj_no_differences(obj1):
difference = obj1.compare(obj1)
assert difference is None
@pytest.fixture
def obj_differences(obj1, obj2):
return obj1.compare(obj2).details
@skip_unless_tools_exist('otool', 'lipo')
def test_obj_compare_non_existing(monkeypatch, obj1):
monkeypatch.setattr(Config(), 'new_file', True)
difference = obj1.compare(MissingFile('/nonexisting', obj1))
assert difference.source2 == '/nonexisting'
assert len(difference.details) > 0
@skip_unless_tools_exist('otool', 'lipo')
def test_diff(obj_differences):
assert len(obj_differences) == 4
l = ['macho_expected_diff_arch', 'macho_expected_diff_headers', 'macho_expected_diff_loadcommands', 'macho_expected_diff_disassembly']
for idx, diff in enumerate(obj_differences):
with open(os.path.join(os.path.dirname(__file__), '../data', l[idx]), 'w') as f:
print(diff.unified_diff, file=f)
expected_diff = open(data('macho_expected_diff')).read()
assert obj_differences[0].unified_diff == expected_diff
|
brettcs/diffoscope
|
tests/comparators/test_macho.py
|
Python
|
gpl-3.0
| 2,299
|
"""
Forms and validation code for user registration.
Note that all of these forms assume Django's bundle default ``User``
model; since it's not possible for a form to anticipate in advance the
needs of custom user models, you will need to write your own forms if
you're using a custom model.
"""
from __future__ import unicode_literals
from captcha.fields import ReCaptchaField
from django import forms
from django.db.models import Q
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth.forms import AuthenticationForm
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.forms import UserCreationForm
from .users import UserModel, UsernameField
User = UserModel()
class RegistrationForm(UserCreationForm):
"""
Form for registering a new user account.
Validates that the requested username is not already in use, and
requires the password to be entered twice to catch typos.
Subclasses should feel free to add any additional validation they
need, but should avoid defining a ``save()`` method -- the actual
saving of collected user data is delegated to the active
registration backend.
"""
required_css_class = 'required'
email = forms.EmailField(label=_("E-mail"))
class Meta:
model = User
fields = (UsernameField(), "email")
class RegistrationFormTermsOfService(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which adds a required checkbox
for agreeing to a site's Terms of Service.
"""
tos = forms.BooleanField(widget=forms.CheckboxInput,
label=_('I have read and agree to the Terms of Service'),
error_messages={'required': _("You must agree to the terms to register")})
class RegistrationFormUniqueEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which enforces uniqueness of
email addresses.
"""
def clean_email(self):
"""
Validate that the supplied email address is unique for the
site.
"""
if User.objects.filter(email__iexact=self.cleaned_data['email']):
raise forms.ValidationError(_("This email address is already in use. Please supply a different email address."))
return self.cleaned_data['email']
class RegistrationFormNoFreeEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which disallows registration with
email addresses from popular free webmail services; moderately
useful for preventing automated spam registrations.
To change the list of banned domains, subclass this form and
override the attribute ``bad_domains``.
"""
bad_domains = ['aim.com', 'aol.com', 'email.com', 'gmail.com',
'googlemail.com', 'hotmail.com', 'hushmail.com',
'msn.com', 'mail.ru', 'mailinator.com', 'live.com',
'yahoo.com']
def clean_email(self):
"""
Check the supplied email address against a list of known free
webmail domains.
"""
email_domain = self.cleaned_data['email'].split('@')[1]
if email_domain in self.bad_domains:
raise forms.ValidationError(_("Registration using free email addresses is prohibited. Please supply a different email address."))
return self.cleaned_data['email']
class ResendActivationForm(forms.Form):
required_css_class = 'required'
email = forms.EmailField(label=_("E-mail"))
class EmailAuthenticationForm(AuthenticationForm):
def clean_username(self):
username = self.data['username']
try:
username = User.objects.get(Q(email=username) | Q(username=username)).username
except ObjectDoesNotExist:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name},
)
return username
class UserProfileRegistrationForm(RegistrationForm):
first_name = forms.CharField(label=_('First name'), max_length=30, min_length=3)
last_name = forms.CharField(label=_('Last name'), max_length=30, min_length=3)
captcha = ReCaptchaField(attrs={'theme': 'clean'})
def clean_email(self):
"""
Check the supplied email address against a list of known free
webmail domains.
"""
if User.objects.filter(email__iexact=self.cleaned_data['email']):
raise forms.ValidationError(_("This email address is already in use. Please supply a different email address."))
return self.cleaned_data['email']
|
ArtemBernatskyy/FundExpert.NET
|
mutual_funds/registration/forms.py
|
Python
|
gpl-3.0
| 4,680
|
# -*- coding: utf-8 -*-
from odoo import models, fields, api
class fixing_issues_view(models.Model):
_inherit = 'project.issue'
|
walter-trueplus/training
|
fixing_issues_view/models/models.py
|
Python
|
gpl-3.0
| 133
|
from django.contrib import admin
from .models import *
class ProductAdmin(admin.ModelAdmin):
list_display = ('id', 'prd_process_id', 'prd_name',
'prd_display_name', 'prd_owner', 'prd_product_id', 'prd_date',
'prd_class', 'prd_filter', 'prd_is_public', 'prd_is_permanent',)
list_display_links = ('id', 'prd_process_id', 'prd_name',)
search_fields = ('prd_process_id', 'prd_name', 'prd_display_name', 'prd_product_id',)
class ProductReleaseAdmin(admin.ModelAdmin):
list_display = ('id', 'product', 'release',)
list_display_links = ('id', 'product', 'release',)
search_fields = ('product', 'release',)
class ProductTagAdmin(admin.ModelAdmin):
list_display = ('id', 'product', 'tag',)
list_display_links = ('id', 'product', 'tag',)
search_fields = ('product', 'tag',)
class FileAdmin(admin.ModelAdmin):
list_display = ('id', 'prd_name', 'prd_display_name',
'prd_class', 'fli_base_path', 'fli_name',)
list_display_links = ('id', 'prd_name', 'prd_display_name', 'prd_class',)
search_fields = ('fli_name',)
class TableAdmin(admin.ModelAdmin):
list_display = ('id', 'prd_name', 'prd_display_name',
'prd_class', 'tbl_database', 'tbl_schema', 'tbl_name',)
list_display_links = ('id', 'prd_name', 'prd_display_name',
'prd_class', 'tbl_schema', 'tbl_name',)
search_fields = ('tbl_schema', 'tbl_name',)
class CatalogAdmin(admin.ModelAdmin):
list_display = (
'id', 'prd_name', 'prd_display_name', 'prd_class', 'ctl_num_objects',
)
class MapAdmin(admin.ModelAdmin):
list_display = (
'id', 'prd_name', 'prd_display_name', 'prd_class', 'mpa_nside', 'mpa_ordering', 'prd_filter', 'prd_is_public', 'prd_is_permanent'
)
list_display_links = ('id', 'prd_name')
search_fields = ('prd_name',)
class CutOutJobAdmin(admin.ModelAdmin):
list_display = (
'id', 'cjb_product', 'cjb_display_name', 'cjb_status', 'cjb_tag', 'owner',
)
list_display_links = ('id',)
search_fields = ('cjb_display_name',)
class DesjobAdmin(admin.ModelAdmin):
list_display = (
'id', 'djb_cutout_job', 'djb_jobid', 'djb_status', 'djb_start_time', 'djb_finish_time', 'djb_message',
)
list_display_links = ('id',)
search_fields = ('djb_jobid',)
class CutoutAdmin(admin.ModelAdmin):
list_display = (
'id', 'cjb_cutout_job', 'ctt_object_id', 'ctt_object_ra', 'ctt_object_dec', 'ctt_img_format', 'ctt_filter',
'ctt_file_name', 'ctt_file_path', 'ctt_file_type', 'ctt_file_size', )
list_display_links = ('id',)
search_fields = ('id',)
class MaskAdmin(admin.ModelAdmin):
list_display = (
'id', 'prd_name', 'prd_display_name', 'prd_class', 'msk_filter',
)
list_display_links = ('id', 'prd_name')
search_fields = ('prd_name',)
class ProductContentAdmin(admin.ModelAdmin):
list_display = ('id', 'pcn_product_id', 'pcn_column_name', 'pcn_ucd')
list_display_links = ('pcn_column_name',)
search_fields = ('pcn_column_name',)
class ProductContentAssociationAdmin(admin.ModelAdmin):
list_display = ('id', 'pca_product', 'pca_class_content', 'pca_product_content',)
search_fields = ('pca_product__prd_display_name', 'pca_product__prd_name')
class ProductContentSettingAdmin(admin.ModelAdmin):
list_display = ('id', 'pcs_content', 'pcs_setting', 'pcs_is_visible', 'pcs_order')
class ProductSettingAdmin(admin.ModelAdmin):
list_display = (
'id', 'cst_product', 'owner', 'cst_display_name', 'cst_description', 'cst_is_public', 'cst_is_editable',)
search_fields = ('cst_product__prd_display_name', 'cst_display_name', 'cst_description',)
class CurrentSettingAdmin(admin.ModelAdmin):
list_display = ('id', 'cst_product', 'cst_setting', 'owner',)
class WorkgroupAdmin(admin.ModelAdmin):
list_display = ('id', 'wgp_workgroup', 'owner',)
class WorkgroupUserAdmin(admin.ModelAdmin):
list_display = ('id', 'wgu_workgroup', 'wgu_user',)
class PermissionAdmin(admin.ModelAdmin):
list_display = ('id', 'prm_product', 'prm_user', 'prm_workgroup',)
class ProductRelatedAdmin(admin.ModelAdmin):
list_display = ('id', 'prl_product', 'prl_related', 'prl_relation_type', 'prl_cross_identification',)
class FiltersetdAdmin(admin.ModelAdmin):
list_display = ('id', 'product', 'owner', 'fst_name',)
class FilterConditionAdmin(admin.ModelAdmin):
list_display = ('id', 'filterset', 'fcd_property', 'fcd_property_name', 'fcd_operation', 'fcd_value')
class BookmarkedAdmin(admin.ModelAdmin):
list_display = ('id', 'product', 'owner', 'is_starred')
admin.site.register(Product, ProductAdmin)
admin.site.register(ProductRelease, ProductReleaseAdmin)
admin.site.register(ProductTag, ProductTagAdmin)
admin.site.register(File, FileAdmin)
admin.site.register(Table, TableAdmin)
admin.site.register(Catalog, CatalogAdmin)
admin.site.register(Map, MapAdmin)
admin.site.register(CutOutJob, CutOutJobAdmin)
admin.site.register(Desjob, DesjobAdmin)
admin.site.register(Cutout, CutoutAdmin)
admin.site.register(Mask, MaskAdmin)
admin.site.register(ProductContent, ProductContentAdmin)
admin.site.register(ProductContentAssociation, ProductContentAssociationAdmin)
admin.site.register(ProductContentSetting, ProductContentSettingAdmin)
admin.site.register(ProductSetting, ProductSettingAdmin)
admin.site.register(CurrentSetting, CurrentSettingAdmin)
admin.site.register(Permission, PermissionAdmin)
admin.site.register(ProductRelated, ProductRelatedAdmin)
admin.site.register(Workgroup, WorkgroupAdmin)
admin.site.register(WorkgroupUser, WorkgroupUserAdmin)
admin.site.register(Filterset, FiltersetdAdmin)
admin.site.register(FilterCondition, FilterConditionAdmin)
admin.site.register(BookmarkProduct, BookmarkedAdmin)
|
linea-it/dri
|
api/product/admin.py
|
Python
|
gpl-3.0
| 5,841
|
from django import forms
from ..models import BaseDemographic
class BaseDemographicForm(forms.ModelForm):
class Meta:
model = BaseDemographic
fields = ['first_name','last_name','phone','dob']
|
rdespoiu/QTitan
|
QTitan/QTSurvey/Controllers/BaseDemographicForm.py
|
Python
|
gpl-3.0
| 214
|
from mock import patch
from .test_helper import raises
from kiwi.exceptions import KiwiPrivilegesError
from kiwi.privileges import Privileges
class TestPrivileges(object):
@raises(KiwiPrivilegesError)
@patch('os.geteuid')
def test_check_for_root_permiossion_false(self, mock_euid):
mock_euid.return_value = 1
Privileges.check_for_root_permissions()
@patch('os.geteuid')
def test_check_for_root_permiossion_true(self, mock_euid):
mock_euid.return_value = 0
assert Privileges.check_for_root_permissions() is True
|
adrianschroeter/kiwi
|
test/unit/privileges_test.py
|
Python
|
gpl-3.0
| 568
|
import logging
import ssl
from typing import List # pylint: disable=unused-import
import aiohttp
import certifi
import trio_asyncio
from aiohttp.http_exceptions import HttpProcessingError
from .base import BufferedFree, Limit, Sink, Source
logger = logging.getLogger(__name__)
class AiohttpClientSessionMixin:
def init_client(self, client, headers={}):
ssl_context = ssl.create_default_context(cafile=certifi.where())
conn = aiohttp.TCPConnector(ssl=ssl_context)
if client:
self.client_owned, self.client = False, client
else:
self.client_owned, self.client = True, aiohttp.ClientSession(
connector=conn,
headers=headers,
skip_auto_headers=["Content-Type", "User-Agent"],
)
async def close_client(self):
if self.client_owned and not self.client.closed:
await self.client.close()
DEFAULT_CHUNK_SIZE = 1024 * 10 * 16
class URLReader(Source, AiohttpClientSessionMixin):
def __init__(self, url, client=None):
super(URLReader, self).__init__()
self.url = url
self.response = None
self.init_client(client)
@trio_asyncio.aio_as_trio
async def read(self, count=-1):
if self._eof:
return b""
if self.response is None:
self.response = await self.client.get(self.url)
self.response.raise_for_status()
if count == -1:
count = DEFAULT_CHUNK_SIZE
buf = await self.response.content.read(count)
if len(buf) == 0:
await self._close()
return buf
async def _close(self):
self._eof = True
if not self.response is None:
await self.response.release()
self.response = None
await self.close_client()
@trio_asyncio.aio_as_trio
async def close(self):
await self._close()
class URLWriter(Sink, AiohttpClientSessionMixin):
def __init__(self, url, size=None, client=None):
super(URLWriter, self).__init__()
self.url = url
self._done = False
self.response = None
self.bytes_written = 0
self.size = size
self.etag = None
self.init_client(client)
@trio_asyncio.aio_as_trio
async def read(self, count=-1):
if self._done:
return b""
if self.response is None:
@trio_asyncio.trio_as_aio
async def read_from_input():
assert self.input is not None
return (await self.input.read())
async def feed_http_upload():
while True:
buf = await read_from_input()
if len(buf) == 0:
break
yield buf
self.bytes_written += len(buf)
logger.debug('HTTP PUT %s', self.url)
self.response = await self.client.put(
self.url,
data=feed_http_upload(),
raise_for_status=True,
headers={} if self.size is None else {"Content-Length": str(self.size)},
)
content = await self.response.read()
await self.response.release()
if not self.response.status in (200, 201, 202):
raise HttpProcessingError(
code=self.response.status,
message=self.response.reason,
headers=self.response.headers,
)
self._done = True
if "ETAG" in self.response.headers:
self.etag = self.response.headers["ETAG"][1:-1]
return content
@trio_asyncio.aio_as_trio
async def close(self):
self._done = True
if not self.response is None:
await self.response.release()
self.response = None
await self.close_client()
class ChunkedURLWriter(Sink, AiohttpClientSessionMixin):
"""
The ChunkedURLWriter will instantiate an URLWriter for each URL given to
it.
"""
def __init__(self, urls, chunksize, total_size=None, client=None):
super(ChunkedURLWriter, self).__init__()
self._urls = urls
self._chunksize = chunksize
self._url_idx = 0
self.init_client(client)
self.bytes_written = 0
self.total_size = total_size
self.etags = [] # type: List[str]
def add_input(self, input):
self.input = input >> BufferedFree()
async def read(self, count=-1):
assert self.input is not None
if self._url_idx >= len(self._urls):
return b""
url = self._urls[self._url_idx]
logger.debug("Uploading to: %s (max. %d bytes)", url, self._chunksize)
size = (
None
if self.total_size is None
else min(self.total_size - self.bytes_written, self._chunksize)
)
writer = (
self.input
>> Limit(self._chunksize)
>> URLWriter(url, size=size, client=self.client)
)
result = await writer.readall()
self.etags.append(writer.etag)
self.bytes_written += writer.bytes_written
self._url_idx += 1
return result or b"<empty response>"
@trio_asyncio.aio_as_trio
async def close(self):
await self.close_client()
|
syncrypt/client
|
syncrypt/pipes/http.py
|
Python
|
gpl-3.0
| 5,339
|
#!/usr/bin/env python
import spear
# 1/ The tool
tool = spear.tools.ISVTool
# 2/ GMM Training
n_gaussians = 512
iterk = 25
iterg_train = 25
end_acc = 0.0001
var_thd = 0.0001
update_weights = True
update_means = True
update_variances = True
norm_KMeans = True
# 3/ JFA Training
ru = 100 # The dimensionality of the subspace
relevance_factor = 4
n_iter_train = 10
n_iter_enrol = 1
# 4/ JFA Enrolment and scoring
iterg_enrol = 1
convergence_threshold = 0.0001
variance_threshold = 0.0001
relevance_factor = 4
responsibilities_threshold = 0
|
guker/spear
|
config/tools/isv/isv_512g_u100.py
|
Python
|
gpl-3.0
| 542
|
from django.contrib.staticfiles.storage import staticfiles_storage
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from jinja2 import Environment
from albums.models import Album, Artist, RecordLabel
def get_spotify_search_url(term):
return 'https://open.spotify.com/search/results/'+term
def get_entity_url(watson_obj):
content_type = ContentType.objects.get(app_label=watson_obj.content_type.app_label,
model=watson_obj.content_type.model)
obj_class = content_type.model_class()
url = ''
if obj_class == Album:
url = reverse('albums:album-detail', args=[watson_obj.object_id_int])
elif obj_class == Artist:
url = reverse('albums:albums-by-artist', args=[watson_obj.object_id_int])
elif obj_class == RecordLabel:
url = reverse('albums:albums-by-label', args=[watson_obj.object_id_int])
return url
ENTITY_LABELS = {
Album: 'Album',
RecordLabel: 'Label',
Artist: 'Artist',
}
def get_entity_type_label(watson_obj):
content_type = ContentType.objects.get(app_label=watson_obj.content_type.app_label,
model=watson_obj.content_type.model)
obj_class = content_type.model_class()
return ENTITY_LABELS[obj_class]
def environment(**options):
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'get_spotify_search_url': get_spotify_search_url,
'get_entity_url': get_entity_url,
'get_entity_type_label': get_entity_type_label,
})
return env
|
chrisbay/library.kdhx.org
|
library/jinja2.py
|
Python
|
gpl-3.0
| 1,663
|
import os
from glob import glob
from itertools import chain
from typing import Iterable
import json
import jinja2
import shutil
from bank_wrangler import schema
def _generate_data_json(transactions, accounts):
transactions = [list(map(str, row))
for row in transactions]
return json.dumps({
'columns': schema.Transaction._fields,
'transactions': transactions,
'accounts': accounts
})
def _generate_pages(html_path, css_names, js_names):
env = jinja2.Environment(
undefined=jinja2.StrictUndefined,
loader = jinja2.FileSystemLoader(html_path),
lstrip_blocks=True,
trim_blocks=True,
)
pages = {
'Bank Wrangler': 'index.html',
'List': 'list.html',
'Balance': 'balance.html',
'Spending': 'spending.html',
}
# used by base.html
env.globals = {
'cssimports': css_names,
'jsimports': js_names,
'pages': [{'name': title, 'url': filename}
for title, filename in pages.items()],
}
return {filename: env.get_template(filename).render(selectedpage=filename)
for filename in pages.values()}
def generate(root, transactions, accounts: Iterable[str]):
"""Write the report to <root>/report directory."""
reportdir = os.path.dirname(os.path.abspath(__file__))
html_path = os.path.join(reportdir, 'html')
css_paths = glob(os.path.join(reportdir, 'libs', '*.css'))
js_paths = (glob(os.path.join(reportdir, 'libs', '*.js')) +
glob(os.path.join(reportdir, 'js', '*.js')))
files = {}
for path in css_paths + js_paths:
fname = os.path.basename(path)
with open(path, 'r') as f:
files[fname] = f.read()
files['data.js'] = 'const transactionModel = {};'.format(
_generate_data_json(transactions, list(accounts))
)
css_names = list(map(os.path.basename, css_paths))
js_names = list(map(os.path.basename, js_paths)) + ['data.js']
for filename, text in _generate_pages(html_path,
css_names,
js_names).items():
files[filename] = text
outdir = os.path.join(root, 'report')
try:
shutil.rmtree(outdir)
except FileNotFoundError:
pass
os.mkdir(outdir)
for filename, datastring in files.items():
path = os.path.join(outdir, filename)
with open(path, 'w') as f:
f.write(datastring)
|
tmerr/bank_wrangler
|
bank_wrangler/report/__init__.py
|
Python
|
gpl-3.0
| 2,519
|
# Generated by Django 3.1.7 on 2021-02-28 19:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('siteapp', '0041_project_tags'),
('controls', '0045_auto_20210228_1431'),
]
operations = [
migrations.AddField(
model_name='element',
name='tags',
field=models.ManyToManyField(related_name='element', to='siteapp.Tag'),
),
]
|
GovReady/govready-q
|
controls/migrations/0046_element_tags.py
|
Python
|
gpl-3.0
| 459
|
class orbitalWidget(QGroupBox):
def __init__(self):
super(QGroupBox, self).__init__()
self.initUI()
def initUI(self):
table= orbitalTable(0, 3)
table.horizontalHeader().setResizeMode(QHeaderView.Stretch)
btn_active = QPushButton('Active', self)
btn_active.setStyleSheet("background-color: red")
btn_active.clicked.connect(table.setActif)
btn_active.setStatusTip(dic_orbital_space["btn_active"])
btn_frozen = QPushButton('Frozen', self)
btn_frozen.setStyleSheet("background-color: rgb(51,153,255)")
btn_frozen.clicked.connect(table.setFrozen)
btn_frozen.setStatusTip(dic_orbital_space["btn_frozen"])
btn_inactive = QPushButton('Inactive', self)
btn_inactive.setStyleSheet("background-color: white")
btn_inactive.clicked.connect(table.setInactive)
btn_inactive.setStatusTip(dic_orbital_space["btn_inactive"])
vbox = QVBoxLayout()
vbox.addWidget(table)
vbox.addWidget(btn_active)
vbox.addWidget(btn_frozen)
vbox.addWidget(btn_inactive)
w=QWidget()
w.setLayout(vbox)
self.setTitle("Orbital class")
vbox = QVBoxLayout()
vbox.addWidget(w)
self.setLayout(vbox)
|
beangoben/toulouse_secretgui
|
cipsi/orbitalWidget.py
|
Python
|
gpl-3.0
| 1,291
|
"""SocialNewspaper URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from ArticleManagement import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^share_article/$', views.share_article, name="share_article"),
url(r'^print_sharing/(?P<article_id>[0-9]+)$', views.print_sharing, name="print_sharing"),
url(r'^insert_article/$', views.insert_article, name="insert_article"),
url(r'^add_interesting/(?P<article_id>[0-9]+)$', views.add_interesting, name="add_interesting"),
url(r'^print_articles/$', views.print_articles, name="print_articles"),
url(r'^editorial/$', views.editorial, name="editorial"),
url(r'^$', views.editorial, name="home")
]
|
Amyantis/SocialNewspaper
|
ArticleManagement/urls.py
|
Python
|
gpl-3.0
| 1,336
|
# -*- coding: utf-8 -*-
"""
PyRC module: pyrc_irc_abstract.irc_server
Purpose
=======
Establish and maintain a connection to an IRC server, generating events as
they occur, and sending data as required.
Legal
=====
All code, unless otherwise indicated, is original, and subject to the terms of
the GPLv2, which is provided in COPYING.
(C) Neil Tallim, 2004-2007
"""
import re
import threading
import time
import random
import Queue
import socket
import irc_user
import irc_channel
import resources.connection
import resources.irc_events
import resources.numeric_events
import pyrc_common.GLOBAL as GLOBAL
import pyrc_common.G_OBJECTS as G_OBJECTS
import pyrc_common.C_FUNCS as C_FUNCS
import pyrc_common.dictionaries.information as informationDictionaries
#The following dictionaries are used by this module:
##Server Data
import pyrc_common.dictionaries.outbound as outboundDictionaries
#The following dictionaries are used by this module:
##IRC Ping Timeout
##IRC Ping Timeout Check
##IRC Raw Command
##IRC Raw Event
##Server Channel Close
##Server Connection Error
##Server Connection Success
##Server Disconnection
##Server Protocol Error
##Server Reconnection Success
class Server(object):
"""
This class serves as an interface to PyRC's communication with, and
representation of, an IRC server in an IRC network.
This class provides a means of sendign and receiving information from the
IRC server, and it quietly gathers data to effectively model the status of
relevant parts of the network at all times.
"""
_context_id = None #: The unique ID number assigned to this Server upon its creation.
_channel_manager = None #: The pyrc_irc_abstract.irc_channel.ChannelManager object used to manage all channels PyRC is in.
_user_manager = None #: The pyrc_irc_abstract.irc_user.UserManagerServer object used to manage all users PyRC knows about.
_connection = None #: The _Connection object used to actually communicate with the IRC server.
_last_action = None #: A UNIX timestamp representing the last time at which the user acted.
_current_nickname = None #: The nickname PyRC is currently using on the IRC network.
_nickname_lock = None #: A lock used to prevent multiple simultaneous accesses to PyRC's current nickname.
_user_modes = None #: A list containing all modes currently assigned to PyRC by the IRC server.
_mode_lock = None #: A lock used to prevent multiple simultaneous accesses to the mode list.
_network_name = None #: The name of the IRC network to which this Server is attached.
_network_group_name = None #: The user-specified name of this network's group; this will be used for consistency if available.
_connection_data = None #: The _ConnectionData object used to retain the information used to connect to the IRC network for future reconnect() calls.
_event_queue = None #: A queue of events that will be passed to PyRC's plugins via the worker threads.
_stash = None #: The _Stash object used to collect pieces of data used to build a complete dictionary.
_worker_threads = None #: A tuple of worker threads used to send events from the IRC network to PyRC's plugins.
_local_ip = None #The IP address of the system running PyRC, as seen by the IRC server.
def __init__(self, id_number, network_group_name, thread_count):
"""
This function is invoked when a new Server object is created.
@type id_number: int
@param id_number: The unique ID number assigned to this Server object.
@type network_group_name: basestring|None
@param network_group_name: The user-specified name of the network group,
used to provide consistency to plugins if network name resolution
fails. Do not set when the user specifies an unknown address.
@type thread_count: int
@param thread_count: The number of worker threads to spawn for use by
this Server object.
@return: Nothing.
"""
self._context_id = id_number
if network_group_name:
self._network_group_name = unicode(network_group_name)
self._channel_manager = irc_channel.ChannelManager(self)
self._user_manager = irc_user.UserManagerServer()
self._stash = _Stash(self)
self._nickname_lock = threading.Lock()
self._user_modes = []
self._mode_lock = threading.Lock()
self._event_queue = Queue.Queue(0)
self.resetIdleTime()
worker_threads = []
for i in range(thread_count):
worker_thread = G_OBJECTS.WorkerThread(self._event_queue, "Context ID: %i" % id_number)
worker_threads.append(worker_thread)
worker_thread.start()
self._worker_threads = tuple(worker_threads)
def connect(self, nicknames, ident, real_name, addresses, password, channels):
"""
This function accepts and stores information required to establish a
connection to an IRC network, then attempts to connect to an IRC
server.
@type nicknames: list
@param nicknames: A list of nicknames to try, in order, in an attempt
to establish a connection to the IRC network.
@type ident: basestring
@param ident: The ident string to present to the IRC network.
@type real_name: basestring
@param real_name: The real name string to present to the IRC network.
@type addresses: tuple
@param addresses: A list of address data to cycle through in an attempt
to connect to the IRC network.
@type password: basestring|None
@param password: The password required to connect to the IRC server, if
any.
@type channels: list
@param channels: A list of channel "name[:password]" strings that may
be used to automatically join channels upon a successful
connection.
@return: Nothing.
@raise InstantiationError: If ident, real_name, nicknames, or addresses
are incomplete.
"""
self._connection_data = _ConnectionData(nicknames, (ident, real_name), addresses, password)
for i in channels:
channel_data = i.split(":", 1)
if len(channel_data) > 1:
self._channel_manager.addChannel(channel_data[0], channel_data[1])
else:
self._channel_manager.addChannel(channel_data[0])
self._connect(False)
def _connect(self, reconnection):
"""
This function handles the actual process of connecting to a server; it
should only be called internally, first through connect(), and then by
reconnect() all subsequent times.
On an error, this function's thread will be used to broadcast details to the rest
of PyRC before dying.
@type reconnection: bool
@param reconnection: True if this is an attempt at reconnecting to an
IRC server; False if this is the first connection attempt.
@return: Nothing.
"""
class ConnectionHandler(threading.Thread):
_server = None
def __init__(self, server):
threading.Thread.__init__(self)
self.setDaemon(False)
self.setName("ConnectionHandler, context ID: %i" % server._context_id)
self._server = server
def run(self):
connection_data = self._server.getConnectionData()
(ident, real_name) = connection_data.getProfile()
nickname = self._server.getNickname()
if not nickname: #If the Server has not previously connected.
nickname = connection_data.getNickname()
if nickname:
reason = None
while True:
address = connection_data.getAddress()
if not address:
break
try:
self._server.getStash().flush()
self._server._connection = _Connection(self._server, address[0], address[1], nickname, real_name, ident, connection_data.getPassword(), address[2])
self._server.setName(address[0])
if reconnection:
self._server.addEvent(outboundDictionaries.Server_Reconnection_Success(self._server.getContextID(), self._server.getName(), address[0], address[1], nickname, ident, real_name, connection_data.getPassword(), address[2]))
else:
self._server.addEvent(outboundDictionaries.Server_Connection_Success(self._server.getContextID(), self._server.getName(), address[0], address[1], nickname, ident, real_name, connection_data.getPassword(), address[2]))
reason = None
break
except resources.connection.ConnectionError, e:
reason = e.description
if reason:
self._server.addEvent(outboundDictionaries.Server_Connection_Error(self._server.getContextID(), self._server.getName(), u"Unable to connect to any given server address. Most recent error: %s" % reason))
else:
self._server.addEvent(outboundDictionaries.Server_Connection_Error(self._server.getContextID(), self._server.getName(), u"Unable to select a nickname to use for authentication."))
ConnectionHandler(self).start()
def reconnect(self):
"""
This function attempts to reconnect to an IRC network, using the data
stored when connect() was previously called.
@return: Nothing.
@raise ReconnectionError: If the Server is still connected or if
connect() was not previously called.
"""
if self.isConnected():
raise ReconnectionError(u"There is already an active connection.")
if not self._connection_data:
raise ReconnectionError(u"No prior connection has been attempted.")
self._connect(True)
def disconnect(self):
"""
This function closes this Server's link to the IRC server, taking care
of any necessary cleanup tasks.
@return: Nothing.
"""
self._closeConnection()
def close(self):
"""
This function cleans up the Server in preparation for its removal.
Any links to other parts of PyRC will be severed.
@return: Nothing.
"""
for i in self._channel_manager.emptyPool():
self.addEvent(outboundDictionaries.IRC_Channel_Close(self.getContextID(), self.getName(), i, "Closing connection", False, None))
self.disconnect()
for i in self._worker_threads:
i.kill()
def _closeConnection(self):
"""
This function handles the internal calls used to actually disconnect
from the IRC server.
This function should not be called from outside of the object.
@return: Nothing.
"""
if self._connection:
self._connection.close()
def processInput(self, raw_string):
"""
This function processes the raw input provided by the IRC server.
It works by splitting the input into lines based on linebreak
characters. If the last line is missing such a character, it is
considered a fragment and stored in the Server's _Stash to be used when
processing the next packet.
Each line is individually fed to _processInput, which handles
evaluation.
@type raw_string: basestring
@param raw_string: Raw input from the IRC server.
@rtype: variable|None
@return: None if processing went smoothly; something if there was a
problem. (The returned value is meaningless; an event dictionary
will be generated to describe the problem)
"""
fragment = self._stash.getFragment()
if fragment:
raw_string = fragment + raw_string
lines = re.split(r"\r|\n", raw_string)
if not re.match(r"\r|\n", raw_string[-1]):
self._stash.setFragment(lines.pop())
for i in lines:
if i:
result = self._processInput(i)
if result:
return result
def _processInput(self, raw_string):
"""
This function accepts raw lines from the IRC server and delegates its
processing to one of the support libraries.
A "Raw IRC Event" is generated if any plugins are waiting for them. If
not, the event is suppressed.
@type raw_string: basestring
@param raw_string: The line to be processed.
@rtype: variable|None
@return: None if processing went smoothly; something if there was a
problem. (The returned value is meaningless; an event dictionary
will be generated to describe the problem)
"""
if self._connection: #Internal events might not need a connection.
self._connection.resetTimeout()
if GLOBAL.plugin.handlesRawEvent():
self.addEvent(outboundDictionaries.IRC_Raw_Event(self.getContextID(), self.getName(), raw_string))
if not raw_string.startswith(':'):
try:
return resources.irc_events.handleNonColon(self, raw_string)
except resources.irc_events.ProtocolError, e:
self.addEvent(outboundDictionaries.Server_Protocol_Error(self.getContextID(), self.getName(), e.description))
else: #Determine what sort of event this is.
data = raw_string[1:].split(None, 2)
if data[1].isdigit(): #Server code.
try:
return resources.numeric_events.handleIRCEvent(self, data[0], data[1:], raw_string)
except resources.numeric_events.ProtocolError, e:
self.addEvent(outboundDictionaries.Server_Protocol_Error(self.getContextID(), self.getName(), e.description))
else:
try:
if data[0].find("!") == -1:
return resources.irc_events.handleServerCode(self, data[0], data[1:], raw_string)
else:
return resources.irc_events.handleResponseCode(self, data[0], data[1:], raw_string)
except resources.irc_events.ProtocolError, e:
self.addEvent(outboundDictionaries.Server_Protocol_Error(self.getContextID(), self.getName(), e.description))
def send(self, message, priority=GLOBAL.ENUM_SERVER_SEND_PRIORITY.AVERAGE):
"""
This function queues a string for transmission to the IRC server.
@type message: basestring
@param message: The string to be sent to the IRC server.
@type priority: GLOBAL.ENUM_SERVER_SEND_PRIORITY.EnumValue
@param priority: The priority the message will be assigned in the
queue.
@return: Nothing.
"""
if self._connection: #Make sure a connection has indeed been established.
self._connection.addMessage(message, priority)
def ping(self, target=None):
"""
This function sends a PING to a user on the IRC server, or to the server
itself.
This function's logic resides within the Server's _Connection object.
@type target: basestring|None
@param target: The name of the user to be pinged, or None if the server
is the intended target.
@return: Nothing.
"""
if self._connection: #Make sure a connection has indeed been established.
self._connection.ping(target)
def processPong(self, source=None):
"""
This function records the receipt of a PONG from either the IRC server
or a user and returns the time that elapsed since the PING was sent.
@type source: basestring|None
@param source: The name of the user who sent the PONG reply, or None if
the reply came from the IRC server.
@rtype: float|None
@return: The number of seconds that elapsed between the PING and this
PONG, or None if the source isn't being tracked.
"""
if self._connection: #Make sure a connection has indeed been established.
return self._connection.pong(source)
def addChannel(self, channel_name):
"""
This function adds a new Channel to the server.
The logic of this function has been exported to
irc_channel.ChannelManager.addChannel().
@type channel_name: basestring
@param channel_name: A string containing the name of the channel to be
added.
@return: Nothing.
"""
self._channel_manager.addChannel(channel_name)
def addEvent(self, event):
"""
This function adds an event to the server's broadcast queue.
@type event: dict
@param event: The event to broadcast to PyRC's plugins.
@return: Nothing.
"""
self._event_queue.put(event)
def addUser(self, user):
"""
This function adds a new user to the pool of managed users.
The logic of this function has been exported to
irc_user.UserManagerServer.addUser().
@type user: irc_user.User
@param user: The User object to be added to the pool.
@return: Nothing.
"""
self._user_manager.addUser(user)
def getChannel(self, channel_name):
"""
This function will retrieve the specified Channel from the server.
The logic of this function has been exported to
irc_channel.ChannelManager.getChannel().
@type channel_name: basestring
@param channel_name: A string containing the name of the channel to be
retrieved.
@rtype: Channel|None
@return: The requested Channel object, or None if the channel could not
be found.
"""
return self._channel_manager.getChannel(channel_name)
def getChannelManager(self):
"""
This function returns the pyrc_irc_abstract.irc_channel.ChannelManager
object owned by this Server.
@rtype: pyrc_irc_abstract.irc_channel.ChannelManager
@return: The pyrc_irc_abstract.irc_channel.ChannelManager object owned by this
Server.
"""
return self._channel_manager
def getConnectionData(self):
"""
This function returns the _ConnectionData object associated with this
Server.
@rtype: _ConnectionData
@return: The _ConnectionData object associated with this Server.
"""
return self._connection_data
def getContextID(self):
"""
This function returns the context ID assigned to this Server.
@rtype: int
@return: This Server's context ID.
"""
return self._context_id
def getData(self):
"""
This function returns a dictionary containing all information known about
the server.
@rtype: dict
@return: A dictionary of the format returned by
common.dictionaries.information.Server_Data().
"""
(address, port) = _connection.getAddress()
ident = None
realname = None
if self._connection_data:
profile = self._connection_data.getProfile()
ident = profile[0]
realname = profile[1]
return informationDictionaries.ServerData(self._context_id, self._network_group_name, self._network_name, address, port, self.getNickname(), ident, realname, self.getUserModes(), self.getUserModeString(), self._channel_manager.getChannelsData(), self.getIdleTime(), self._local_ip)
def getIdent(self):
"""
This function returns the ident this Server object is set to
provide to IRC servers.
@rtype: unicode|None
@return: The ident this Server object provides to IRC servers.
This value will not change while a connection is active, and it
will still be available through this function after a connection
ends.
It changes only when a connection is attempted; it will be None if
this Server object is new.
"""
if self._connection_data:
return self._connection_data.getProfile()[0]
return None
def getIdleTime(self):
"""
This function returns the number of seconds that have elapsed since the
user last acted.
@rtype: float
@return: The number of seconds that have elapsed since the user last
acted.
"""
return time.time() - self._last_action
def getLocalIP(self):
"""
This function returns the local IP of PyRC, as set by the user, seen by
the server, or identified by a local check, which will generally return
localhost.
@rtype: unicode
@return: The IP of the system running PyRC.
"""
if GLOBAL.DCC_LOCAL_IP:
return GLOBAL.DCC_LOCAL_IP
if self._local_ip:
return self._local_ip
try:
local_ip = socket.gethostbyname(socket.gethostname())
if local_ip:
return local_ip
return "127.0.0.1"
except:
return "127.0.0.1"
def getName(self):
"""
This function returns the name of the IRC network to which this Server
is connected.
@rtype: unicode
@return: The name of the IRC network to which this Server is connected.
"""
if self._network_group_name:
return self._network_group_name
return self._network_name
def getNickname(self):
"""
This function returns the nickname currently associated with PyRC on
the IRC server to which this Server is connected.
@rtype: unicode
@return: The nickname used by this Server on the IRC server.
"""
try:
self._nickname_lock.acquire()
return self._current_nickname
finally:
self._nickname_lock.release()
def getRealName(self):
"""
This function returns the real name this Server object is set to
provide to IRC servers.
@rtype: unicode|None
@return: The real name this Server object provides to IRC servers.
This value will not change while a connection is active, and it
will still be available through this function after a connection
ends.
It changes only when a connection is attempted; it will be None if
this Server object is new.
"""
if self._connection_data:
return self._connection_data.getProfile()[1]
return None
def getStash(self):
"""
This function returns the _Stash object associated with this Server.
@rtype: _Stash
@return: The _Stash object associated with this Server.
"""
return self._stash
def getUser(self, nickname):
"""
This function retrieves the pyrc_irc_abstract.irc_user.User object used to
represent the named user on the IRC network.
The logic of this function has been exported to
irc_user.UserManagerServer.getUser().
@type nickname: basestring
@param nickname: The nickname of the user whose representation is to be
retrieved.
@rtype: pyrc_irc_abstract.irc_user.User|None
@return: The cached object used to represent the requested user, or
None if the user is not known.
"""
return self._user_manager.getUser(nickname)
def getUserData(self, nickname):
"""
This function retrieves a standard user information dictionary that
represents the current status of the named user on the IRC network.
The logic of this function has been exported to
irc_user.UserManagerServer.getUserData().
@type nickname: basestring
@param nickname: The nickname of the user whose representation is to be
retrieved.
@rtype: dict|None
@return: The user information dictionary that currently represents the
requested user, or None if the user is not known.
"""
return self._user_manager.getUserData(nickname)
def getUserManager(self):
"""
This function retrieves this Server's UserManager object.
@rtype: irc_user.UserManager
@return: This Server's UserManager object.
"""
return self._user_manager
def getUserModes(self):
"""
This function returns a tuple of all modes the IRC server has assigned
to PyRC.
@rtype: tuple
@return: A tuple of modes set on PyRC by the server. Its members may be
single-character mode strings, or tuples formed of single-character
mode strings and a variable-length single-token paramater.
"""
try:
self._mode_lock.acquire()
return tuple(self._user_modes)
finally:
self._mode_lock.release()
def getUserModeString(self):
"""
This function returns a string representing all modes the IRC server
has assigned to PyRC.
@rtype: unicode
@return: A string representing all modes the IRC server has assigned to
PyRC, followed by their parameters.
"""
self._mode_lock.acquire()
modestring = ''
post_modestring = ''
for i in self._user_modes:
if isinstance(i, basestring):
modestring += i
else:
modestring += i[0]
post_modestring += ' %s' % i[1]
self._mode_lock.release()
return unicode(modestring + post_modestring)
def isConnected(self):
"""
This function returns whether this Server object is currently connected
to an IRC server.
It works by determining whether a nickname is currently assigned to
PyRC. When PyRC connects, it sets the _current_nickname variable; when
the connection is lost, this variable is cleared.
@rtype: bool
@return: True if this Server object is connected to an IRC server;
False otherwise.
"""
try:
self._nickname_lock.acquire()
return not self._current_nickname == None
finally:
self._nickname_lock.release()
def removeChannel(self, channel_name):
"""
This function will remove the specified Channel from the server.
The logic of this function has been exported to
irc_channel.ChannelManager.removeChannel().
@type channel_name: basestring
@param channel_name: A string containing the name of the channel to be
removed.
@return: Nothing.
"""
self._channel_manager.removeChannel(channel_name)
def removeUser(self, nickname):
"""
This function removes a user from the pool.
The logic of this function has been exported to
irc_user.UserManagerServer.removeUser().
@type nickname: basestring
@param nickname: The nickname of the user to be removed.
@return: Nothing.
"""
self._user_manager.removeUser(nickname)
def resetIdleTime(self):
"""
This function resets the counter used to determine how long the user has
been inactive.
@return: Nothing.
"""
self._last_action = time.time()
def setLocalIP(self, ip):
"""
This function is used to set the local IP of PyRC for this IRC server.
@type ip: basestring
@param ip: PyRC's IP address.
@return: Nothing.
"""
self._local_ip = unicode(ip)
def setName(self, network_name):
"""
This function sets the name of the IRC network to which this Server is
connected.
This function should only be called once per connection, based on the
value in the "Welcome" event.
@type network_name: unicode
@param network_name: The name of the IRC network to which this Server
is connected.
@return: Nothing.
"""
self._network_name = network_name
def setNickname(self, nickname):
"""
This function updates the nickname currently associated with PyRC on
the IRC server to which this Server is connected. It also updates the
value to try first when reconnecting.
@type nickname: unicode
@param nickname: The nickname to set in place of the old one.
@return: Nothing.
"""
self._nickname_lock.acquire()
self._current_nickname = nickname
self._nickname_lock.release()
def updateUserModes(self, modes):
"""
This function updates the modes the IRC server has assigned to PyRC.
@type modes: list
@param modes: A list of changed modes with which to update PyRC's
internal mode list.
These modes may be single-character mode strings or tuples
comprised of a single-character mode string and a variable-length
single-token parameter.
@return: Nothing.
"""
self._mode_lock.acquire()
for i in modes:
if i[2]:
if not i[1]:
self._user_modes.append(i[0])
else:
self._user_modes.append(i[:2])
else:
for j in self._user_modes:
if j[0] == i[0]:
self._user_modes.remove(j)
break
self._user_modes.sort()
self._mode_lock.release()
def updateUserNickname(self, nickname, new_nickname):
"""
This function ensures that a name change by another user on the IRC
network is properly reflected in all areas of PyRC's local cache.
The logic of this function has been exported to
irc_user.UserManagerServer.updateUserNickname().
@type nickname: basestring
@param nickname: The old nickname of the user.
@type new_nickname: basestring
@param new_nickname: The new nickname of the user.
@return: Nothing.
"""
self._user_manager.updateUserNickname(nickname, new_nickname)
class _ConnectionData(object):
"""
This class serves as a container for data needed to establish a connection
to an IRC server. One should be attached to each Server object.
"""
_profiles = None #: The profile data to use on this connection.
_nicknames = None #: A list of nicknames that can be used on this connection.
_nickname_counter = -1 #: A pointer to the last attempted nickname.
_addresses = None #: A list of addresses that can be connected to.
_address_counter = -1 #: A pointer to the last attemped connection.
_password = None #: The network's password, if any.
def __init__(self, nicknames, profile, addresses, password):
"""
This function is invoked when a new _ConnectionData object is created.
@type nicknames: list
@param nicknames: A list of nicknames to cycle through in order to
establish a connection to an IRC server.
@type profile: tuple
@param profile: A tuple containing the ident and real name to provide
to the IRC server once a connection has been established.
@type addresses: list
@param addresses: A list of IRC server addresses to try.
@type password: unicode|None
@param password: The password required by the IRC network, if any.
@return: Nothing.
@raise InstantiationError: If any of profile, nicknames, or addresses
are incomplete.
"""
if not profile[0] or not profile[1]:
raise InstantiationError(u"No profiles have been specified for use on this connection.")
if not nicknames:
raise InstantiationError(u"No nicknames have been specified for use on this connection.")
if not addresses:
raise InstantiationError(u"No addresses have been specified for use on this connection.")
self._nicknames = nicknames
self._profile = profile
self._addresses = addresses
self._password = password
def getAddress(self):
"""
This function will provide the next address in the list of addresses to
try in an attempt to establish a connection.
If there are no remaining addresses, it will return None, and it should
be assumed that the connection attempt failed.
@rtype: tuple|None
@return: The next URL/port/SSL triple to try in an attempt to establish
a connection, or None if all addresses have been exhausted.
"""
if self._address_counter + 1 < len(self._addresses):
self._address_counter += 1
return self._addresses[self._address_counter]
return None
def getNickname(self):
"""
This function will provide the next nickname in the list of nicknames
to try in an attempt to establish a connection.
If there are no remaining nicknames, it will return None, and it should
be assumed that the connection attempt failed.
@rtype: unicode|None
@return: The next nickname to try in an attempt to establish a
connection, or None if all nicknames have been exhausted.
"""
if self._nickname_counter + 1 < len(self._nicknames):
self._nickname_counter += 1
return self._nicknames[self._nickname_counter]
return None
def getPassword(self):
"""
This function returns the password used by the IRC network, if any.
@rtype: unicode|None
@return: The password provided to the IRC network, if any.
"""
return self._password
def getProfile(self):
"""
This function returns the ident and real name PyRC presented to the IRC
server.
@rtype: tuple
@return: The ident and real name in a tuple.
"""
return self._profile
def setConnected(self):
"""
This function resets the address cycle and returns the address that was
ultimately used to establish a connection to the server.
It should be called only once a connection has been confirmed.
@rtype: unicode
@return: The address of the IRC server to which a connection was
established.
"""
address = self._addresses[self._address_counter]
self._address_counter = -1
return address
def setAuthenticated(self):
"""
This function resets the nickname cycle and returns the profile that
was ultimately used to establish a connection to the server.
It should be called only once a connection has been confirmed and a
'Welcome' event has been received.
@rtype: unicode
@return: The nickname that was ultimately used to connect to the server.
"""
nickname = self._nicknames[self._nickname_counter]
self._nickname_counter = -1
return nickname
class _Stash(object):
"""
This class provides a resource for aggregating data received from the IRC
server in order to emit complete dictionaries.
"""
_server = None #: A reference to the Server that owns this object.
_motd = None #: A list of lines that comprise the IRC server's Message of the Day.
_banlists = None #: A dictionary of channels that are currently receiving their banlist information.
_channels = None #: A dictionary of channels that are incomplete, waiting for additional information before being presented to PyRC.
_userlists = None #: A dictionary of pyrc_irc_abstract.irc_channel.Channel or pyrc_irc_abstract.irc_channel.SimpleUserContainer objects, keyed by channel name, that are used to direct the results of an IRC server's NAME events.
_whois_replies = None #: A dictionary used to track WHOIS requests sent by PyRC. Data is received in bits and pieces, so it needs to be aggregated before a dictionary can be emitted.
_whowas_replies = None #: A dictionary used to track WHOWAS requests sent by PyRC. Data is received in bits and pieces, so it needs to be aggregated before a dictionary can be emitted.
_who_replies = None #: A dictionary used to track WHO requests sent by PyRC. Data is received in bits and pieces, so it needs to be aggregated before a dictionary can be emitted.
_fragment = None #: A string fragment of a line received from the IRC server. This is used if the data the server tried to send exceeds the allowed packet size.
def __init__(self, server):
"""
This function is invoked when a new _Stash object is created.
@type server: Server
@param server: A reference to the Server that owns this object.
@return: Nothing.
"""
self._server = server
self.flush()
def flush(self):
"""
This function is invoked when the contents of this _Stash object are no
longer needed.
@return: Nothing.
"""
self._motd = None
self._banlists = {}
self._channels = {}
self._userlists = {}
self._whois_replies = {}
self._whowas_replies = {}
self._who_replies = {}
self._fragment = None
def completeMOTD(self):
"""
This function returns the complete MOTD and frees the memory used to
build it.
This function should be called only when the server indicates that the
MOTD has been fully transmitted.
@rtype: list
@return: The list of lines comprising the server's MOTD.
"""
motd = self._motd
self._motd = None
return motd
def getMOTD(self):
"""
This function retrieves the _Stash's working MOTD list.
The object returned by this function should be modified directly using
append().
@rtype: list
@return: The working collection of MOTD lines received from the server.
"""
if not self._motd:
self._motd = []
return self._motd
def completeBanlist(self, channel_name):
"""
This function retrieves a completed banlist for a channel.
@type channel_name: basestring
@param channel_name: The name of the channel for which the banlist is to
be retrieved.
@rtype: tuple
@return: This channel's banlist. Its elements are tuples with the
following format::
(<banmask:unicode>, <server_maintaining_ban:unicode>,
<unix_ban_timestamp:int>)
"""
#Sanitize input.
channel_name = unicode(channel_name).lower()
banlist = self._banlists.get(channel_name)
if banlist:
del self._banlists[channel_name]
banlist = tuple(banlist)
else:
banlist = ()
return banlist
def getBanlist(self, channel_name):
"""
This function retrieves an active banlist that is waiting to be
populated by information as it arrives from the server.
The object returned by this function should be appended to directly.
@type channel_name: basestring
@param channel_name: The name of the channel for which the banlist is to
be retrieved.
@rtype: list
@return: The list used to store this channel's banlist. Its elements
are tuples with the following format::
(<banmask:unicode>, <server_maintaining_ban:unicode>,
<unix_ban_timestamp:int>)
"""
#Sanitize input.
channel_name = unicode(channel_name).lower()
banlist = self._banlists.get(channel_name)
if not banlist:
banlist = []
self._banlists[channel_name] = banlist
return banlist
def completeChannel(self, channel_name):
"""
This function returns a dictionary with all information required to
properly represent a channel on the IRC server, while freeing the
memory used to build it.
This function should be called only when the server indicates that the
channel data has been fully transmitted (when the last nickname is
received).
@type channel_name: basestring
@param channel_name: The name of the channel for which data is to be
retrieved.
@rtype: dict
@return: A dictionary containing all information required to properly
represent a channel on the IRC server.
"""
#Sanitize input.
channel_name = unicode(channel_name).lower()
channel = self._channels.get(channel_name)
if channel:
del self._channels[channel_name]
return channel
def createChannel(self, channel_name):
"""
This function creates a new, blank channel dictionary that will wait to
be populated by information as it arrives from the server.
The object returned by this function should be modified directly using
Python's dictionary operators.
@type channel_name: basestring
@param channel_name: The name of the channel for which data is to be
retrieved.
@rtype: dict
@return: The new dictionary created to collect information related to
the status of a channel on the IRC server.
"""
#Sanitize input.
channel_name = unicode(channel_name).lower()
channel = {
'channel': channel_name,
'topicwho': None,
'topictime': None
}
self._channels[channel_name] = channel
return channel
def getChannel(self, channel_name):
"""
This function retrieves an active channel dictionary that is waiting to
be populated by information as it arrives from the server.
The object returned by this function should be modified directly using
Python's dictionary operators.
@type channel_name: basestring
@param channel_name: The name of the channel for which data is to be
retrieved.
@rtype: dict|None
@return: The dictionary used to collect information related to the
status of a channel on the IRC server, or None if the channel
dictionary was not previously created with createChannel().
"""
return self._channels.get(unicode(channel_name).lower())
def completeUserList(self, channel_name):
"""
This function returns an object with all information required to
properly represent a list of users in a channel on the IRC server,
while freeing the memory used to build it.
This function should be called only when the server indicates that the
name list has been fully transmitted.
@type channel_name: basestring
@param channel_name: The name of the channel for which data is to be
retrieved.
@rtype: pyrc_irc_abstract.irc_channel._UserContainer
@return: Either a pyrc_irc_abstract.irc_channel.Channel object
containing a full list of users if PyRC is in the requested channel,
or a pyrc_irc_abstract.irc_channel.SimpleUserContainer object
containing a full list of users if PyRC is scanning another channel.
"""
#Sanitize input.
channel_name = unicode(channel_name.lower())
channel = self._userlists.get(channel_name)
if channel:
del self._userlists[channel_name]
return channel
def createUserList(self, channel_name):
"""
This function indicates that an object should be provided to collect
the names of users in a channel. The nature of this object will vary
depending on whether PyRC happens to be in the channel in question, but
their relevant functions have identical signatures.
@type channel_name: basestring
@param channel_name: The name of the channel for which data is to be
retrieved.
@rtype: pyrc_irc_abstract.irc_channel._UserContainer
@return: Either a pyrc_irc_abstract.irc_channel.Channel object
containing an empty list of users if PyRC is in the requested
channel, or a pyrc_irc_abstract.irc_channel.SimpleUserContainer
object containing an empty list of users if PyRC is scanning another
channel.
"""
#Sanitize input.
channel_name = unicode(channel_name.lower())
channel = self._server.getChannel(channel_name)
if channel:
self._userlists[channel_name] = channel
else:
channel = irc_channel.SimpleUserContainer(self._server)
self._userlists[channel_name] = channel
return channel
def getUserList(self, channel_name):
"""
This function provides an object to use to collect the names of users
in a channel. The nature of this object will vary depending on whether
PyRC happens to be in the channel in question, but their relevant
functions have identical signatures.
@type channel_name: basestring
@param channel_name: The name of the channel for which data is to be
retrieved.
@rtype: pyrc_irc_abstract.irc_channel._UserContainer
@return: Either a pyrc_irc_abstract.irc_channel.Channel object
containing a list of users if PyRC is in the requested channel, or a
pyrc_irc_abstract.irc_channel.SimpleUserContainer object containing
a list of users if PyRC is scanning another channel.
"""
return self._userlists.get((unicode(channel_name.lower())))
def completeWho(self, username):
"""
This function returns a dictionary with all information required to
properly represent a WHO response from an IRC server, while freeing
the memory used to build it.
This function should be called only when the server indicates that the
WHO information has been fully transmitted.
@type username: basestring
@param username: The name of the user for whom information was
requested.
@rtype: dict
@return: A dictionary containing all elements necessary to build a
complete WHO event dictionary.
"""
#Sanitize input.
username = unicode(username).lower()
who = self._who_replies.get(username)
if who:
del self._who_replies[username]
return who
def createWho(self, username):
"""
This function creates a dictionary that will collect information
required to properly represent a WHO response from an IRC server.
The dictionary returned by this function should be manipulated using
the normal Python dictionary interfaces.
@type username: basestring
@param username: The name of the user for whom information was
requested.
@rtype: dict
@return: A dictionary used to store the elements necessary to build a
complete WHO event dictionary.
"""
who = {
'channels': None,
'userdata': None
}
self._who_replies[unicode(username).lower()] = who
return who
def completeWhoIs(self, username):
"""
This function returns a dictionary with all information required to
properly represent a WHOIS response from an IRC server, while freeing
the memory used to build it.
This function should be called only when the server indicates that the
WHOIS information has been fully transmitted.
@type username: basestring
@param username: The name of the user for whom information was
requested.
@rtype: dict
@return: A dictionary containing all elements necessary to build a
complete WHOIS event dictionary.
"""
#Sanitize input.
username = unicode(username).lower()
whois = self._whois_replies.get(username)
if whois:
del self._whois_replies[username]
return whois
def createWhoIs(self, username):
"""
This function creates a dictionary that will collect information
required to properly represent a WHOIS response from an IRC server.
The dictionary returned by this function should be manipulated using
the normal Python dictionary interfaces.
@type username: basestring
@param username: The name of the user for whom information was
requested.
@rtype: dict
@return: A dictionary used to store the elements necessary to build a
complete WHOIS event dictionary.
"""
whois = {
'ircserver': None,
'servername': None,
'address': None,
'idletime': None,
'channels': [],
'modes': None,
'bot': None,
'chanop': None,
'help': None,
'operator': None,
'registered': [],
'secure': None,
'data': [],
'userdata': None
}
self._whois_replies[unicode(username).lower()] = whois
return whois
def getWhoIs(self, username):
"""
This function returns a dictionary that will collect information
required to properly represent a WHOIS response from an IRC server.
The dictionary returned by this function should be manipulated using
the normal Python dictionary interfaces.
@type username: basestring
@param username: The name of the user for whom information was
requested.
@rtype: dict
@return: A dictionary used to store the elements necessary to build a
complete WHOIS event dictionary.
"""
return self._whois_replies.get(unicode(username).lower())
def completeWhoWas(self, username):
"""
This function returns a dictionary with all information required to
properly represent a WHOWAS response from an IRC server, while freeing
the memory used to build it.
This function should be called only when the server indicates that the
WHOWAS information has been fully transmitted.
@type username: basestring
@param username: The name of the user for whom information was
requested.
@rtype: dict
@return: A dictionary containing all elements necessary to build a
complete WHOWAS event dictionary.
"""
#Sanitize input.
username = unicode(username).lower()
whowas = self._whowas_replies.get(username)
if whowas:
del self._whowas_replies[username]
return whowas
def createWhoWas(self, username):
"""
This function creates a dictionary that will collect information
required to properly represent a WHOWAS response from an IRC server.
The dictionary returned by this function should be manipulated using
the normal Python dictionary interfaces.
@type username: basestring
@param username: The name of the user for whom information was
requested.
@rtype: dict
@return: A dictionary used to store the elements necessary to build a
complete WHOWAS event dictionary.
"""
whowas = {
'lastserver': None,
'lastseen': None,
'userdata': None
}
self._whowas_replies[unicode(username).lower()] = whowas
return whowas
def getWhoWas(self, username):
"""
This function returns a dictionary that will collect information
required to properly represent a WHOWAS response from an IRC server.
The dictionary returned by this function should be manipulated using
the normal Python dictionary interfaces.
@type username: basestring
@param username: The name of the user for whom information was
requested.
@rtype: dict
@return: A dictionary used to store the elements necessary to build a
complete WHOWAS event dictionary.
"""
return self._whowas_replies.get(unicode(username).lower())
def getFragment(self):
"""
This function retrieves any partial line that may have been cut if the
IRC server tried to send more information than it could fit in a
packet.
@rtype: basestring|None
@return: A line fragment or None if the last packet ended cleanly.
"""
fragment = self._fragment
self._fragment = None
return fragment
def setFragment(self, fragment):
"""
This function is used to save a partial line that was cut because the
IRC server tried to send more data than could be accepted in a packet.
@type fragment: basestring
@param fragment: The partial line to be stored.
@return: Nothing.
"""
self._fragment = fragment
class _Connection(object):
"""
This class maintains a connection to an IRC server, and handles all data
traffic over the connection's lifetime.
"""
_server = None #: The Server that owns this object.
_socket = None #: A resources.connection._Socket used to communicate with the IRC server.
_socket_reader = None #: A _SocketReader used to generate events from messages sent by the IRC server.
_socket_sender = None #: A _SocketSender used to feed new messages to the IRC server.
_ping_core = None #: A _PingCore used to manage all PING-related services on this connection.
_priority_queue = None #: A _PriorityQueue object used to manage outbound data.
def __init__(self, server, host, port, nickname, realname, ident, password, ssl):
"""
This function is invoked when creating a new _Connection object.
It connects to the specified IRC server and authenticates the connection.
@type server: Server
@param server: A reference to the Server that owns this object.
@type host: basestring
@param host: The DNS, IP, or host address of the IRC server to which a
connection will be made.
@type port: int
@param port: The port on the IRC server to which a connection will be
made.
@type nickname: basestring
@param nickname: The nickname to be used by this connection.
@type realname: basestring
@param realname: The real name to be used by this connection.
@type ident: basestring
@param ident: The ident string to the used by this connection.
@type password: basestring|None
@param password: The password used to log into the IRC server, if
required.
@type ssl: bool
@param ssl: True if an encrypted connection is to be used.
@return: Nothing.
@raise resources.connection.ConnectionError: If a connection could not be
established at the specified host/port.
"""
self._server = server
if ssl:
self._socket = resources.connection.SSLSocket()
else:
self._socket = resources.connection.BasicSocket()
self._socket.connect(host, port)
time.sleep(0.5) #Prevent "client too fast" errors.
if password: #Authenticate.
self.send("PASS %s" % password)
self.send("NICK %s" % nickname)
self.send("USER %s %s %s :%s" % (ident, socket.gethostname(), host, realname))
self._socket_reader = _SocketReader(self)
self._socket_sender = _SocketSender(self)
self._ping_core = _PingCore(self)
self._priority_queue = _PriorityQueue()
self._socket_reader.start()
self._socket_sender.start()
self._ping_core.start()
def addMessage(self, message, priority=GLOBAL.ENUM_SERVER_SEND_PRIORITY.AVERAGE):
"""
This function queues a message to be sent to the IRC server.
@type message: basestring
@param message: The message to be sent to the IRC server.
@type priority: GLOBAL.ENUM_SERVER_SEND_PRIORITY.EnumValue
@param priority: An enumeration value used to determine the priority at
which this message should be pulled out of the queue.
@return: Nothing.
"""
if priority == GLOBAL.ENUM_SERVER_SEND_PRIORITY.NOW:
try:
self.send(message)
except resources.connection.InvalidStateError: #The socket must have been closed prior to this instruction.
pass
else:
self._priority_queue.addMessage(message, priority)
def close(self):
"""
This function terminates all connections and threads in use by this
_Connection object.
@return: Nothing.
"""
self._ping_core.kill()
self._socket_reader.kill()
self._socket_sender.kill()
self._socket.close()
def getLatency(self):
"""
This function returns the number of seconds that have elapsed since the
IRC server was last pinged by PyRC.
@rtype: float
@return: The number of seconds that have elapsed since PyRC last pinged
the server.
"""
return self._ping_core.getServerPingTime()
def getMessage(self):
"""
This function returns the next message to be sent to the IRC server.
@rtype: unicode|None
@return: The next message to be sent, if any, or None if no message is
waiting.
"""
return self._priority_queue.getMessage()
def getMessageCount(self):
"""
This function will return the number of unsent messages.
It might be useful to provide throttle information.
@rtype: int
@return: The number of unsent messages in the queue.
"""
return self._priority_queue.getMessageCount()
def getServer(self):
"""
This function returns a reference to the Server that owns this object.
@rtype: Server
@return: A reference to the Server that owns this object.
"""
return self._server
def read(self):
"""
This function reads data from the IRC server.
@rtype: basestring|None
@return: The data received from the IRC server, or None if no data was
available.
@raise InvalidStateError: If the socket is dead.
@raise IncomingTransmissionError: If a problem occurred when reading data
from the connection.
"""
return self._socket.readData(GLOBAL.IRC_PACKET_SIZE)
def send(self, message):
"""
This function sends data to the IRC server.
@type message: basestring
@param message: The string to be sent to the IRC server.
@return: Nothing.
@raise InvalidStateError: If the socket is dead.
@raise OutgoingTransmissionError: If a problem occurred when writing data
to the connection.
"""
if GLOBAL.plugin.handlesRawCommand():
self._server.addEvent(outboundDictionaries.IRC_Raw_Command(self._server.getContextID(), self._server.getName(), message))
self._socket.sendData(message.encode("utf-8") + GLOBAL.IRC_LINE_TERMINATOR)
def ping(self, target=None):
"""
This function sends a PING to a user on the IRC server, or to the server
itself.
@type target: basestring|None
@param target: The name of the user to be pinged, or None if the server
is the intended target.
@return: Nothing.
"""
self._ping_core.sendPing(target)
def pong(self, source=None):
"""
This function is called to indicate that a user has replied with a PONG.
@type source: basestring|None
@param source: The name of the user who responded with a PONG, or None if
it was the server.
@rtype: float|None
@return: The number of seconds that passed since the PING was sent or
None if the source isn't being tracked.
"""
if source:
return self._ping_core.removeUser(source)
else:
return self._ping_core.getServerPingTime()
def resetTimeout(self):
"""
This function prevents a fatal PING timeout event from being raised. It
should be called every time data is received from the server.
@return: Nothing.
"""
self._ping_core.resetCountdown()
class _PingCore(threading.Thread):
"""
This class defines an object that manages all PING-related activity on a
server, including user PING timeouts, and server PING-accessibility.
"""
_alive = True #: True until the thread is no longer useful.
_connection = None #: A reference to the _Connection that owns this object.
_server = None #: A reference to the _Server that owns this object.
_user_lock = None #: A lock used to prevent multiple simultaneous accesses to the user list.
_time_of_server_ping = None #: The time at which the last PING was sent to the server.
_server_timeout = None #: The timestamp against which timeout events will be processed.
_server_pinged = False #: Set to True when the server is PINGed to test for activity.
_time_lock = None #: A lock used to prevent multiple simultaneous accesses to the timeout counters.
_users = None
"""
A dictionary of users to whom PINGs have been sent, but who have yet to
reply with a PONG.
Elements in this dictionary take the following form::
{
<username:unicode>: <time_of_ping:float>
}
"""
def __init__(self, connection):
"""
This function is invoked when creating a new _PingCore object.
@type connection: _Connection
@param connection: A reference to the _Connection that owns this object.
@return: Nothing.
"""
threading.Thread.__init__(self)
self._connection = connection
self._server = connection.getServer()
self._user_lock = threading.Lock()
self._time_lock = threading.RLock()
self._users = {}
self._time_of_server_ping = time.time()
self._server_timeout = time.time()
self.setDaemon(True)
self.setName("Ping Core, server %i" % self._server.getContextID())
def kill(self):
"""
This function terminates the _PingCore's execution after its current
iteration.
It should be called when its parent is destroyed.
@return: Nothing.
"""
self._alive = False
def run(self):
"""
This function is executed over the course of the _PingCore's lifetime.
It decrements the time remaining before a PING timeout for each tracked
user, and the time remaining before an automatic PING is sent to the
server to ensure that the connection is still active.
Alternatively, if such a PING has been sent to the server, it counts down
the time before declaring the server unresponsive and raising a
Ping Timeout event.
@return: Nothing.
"""
while self._alive:
start_time = time.time()
self._user_lock.acquire()
timeouts = []
for i, user_time in self._users.iteritems():
if start_time - user_time >= GLOBAL.IRC_IDLE_WAIT_TIME:
timeouts.append(i)
for i in timeouts:
self._server.addEvent(outboundDictionaries.IRC_Ping_Timeout(self._server.getContextID(), i, self._server.getName()))
del self._users[i]
self._user_lock.release()
self._time_lock.acquire()
working_time = start_time - self._server_timeout
if working_time >= GLOBAL.IRC_IDLE_WAIT_TIME:
if not self._server_pinged:
self._server.addEvent(outboundDictionaries.IRC_Ping_Timeout_Check(self._server.getContextID(), self._server.getName()))
self.sendPing()
elif working_time >= GLOBAL.IRC_IDLE_WAIT_TIME + GLOBAL.IRC_PING_TIMEOUT:
self._server.addEvent(outboundDictionaries.IRC_Ping_Timeout(self._server.getContextID(), self._server.getName(), None))
self._server.addEvent(outboundDictionaries.Server_Disconnection(self._server.getContextID(), self._server.getName(), "Ping timeout.", False))
self._server.disconnect()
self._time_lock.release()
time.sleep(1)
def getServerPingTime(self):
"""
This function returns the number of seconds that have elapsed since the
IRC server was last pinged by PyRC.
This function can be used to provide a reasonably relevant latency
counter, as long as it is called only when a PONG is received, or
after a "Ping Request Automated" event.
@rtype: float
@return: The number of seconds that have elapsed since PyRC last pinged
the server.
"""
try:
self._time_lock.acquire()
return time.time() - self._time_of_server_ping
finally:
self._time_lock.release()
def removeUser(self, username):
"""
This function is called to indicate that a user has replied with a PONG.
It aborts the user's timeout countdown.
@type username: basestring
@param username: The name of the user who responded with a PONG.
@rtype: float|None
@return: The time at which the user was PINGed, or None if the user
hasn't been PINGed.
"""
#Sanitize input.
username = unicode(username).lower()
self._user_lock.acquire()
ping_time = self._users.get(username)
if ping_time:
ping_time = time.time() - ping_time
del self._users[username]
self._user_lock.release()
return ping_time
def resetCountdown(self):
"""
This function prevents a fatal PING timeout event from being raised. It
should be called every time data is received from the server.
@return: Nothing.
"""
self._time_lock.acquire()
self._server_timeout = time.time()
self._server_pinged = False
self._time_lock.release()
def sendPing(self, username=None):
"""
This function sends a PING to a user on the IRC server, or to the server
itself.
@type username: basestring|None
@param username: The name of the user to be pinged, or None if the server
is the intended target.
@return: Nothing.
"""
ping_time = time.time()
current_time = str(ping_time)
current_time = current_time[:current_time.find('.')]
ping_string = None
if username:
#Sanitize input.
username = unicode(username)
self._user_lock.acquire()
self._users[username.lower()] = ping_time
self._user_lock.release()
ping_string = "PRIVMSG %s :\001PING %s\001" % (username, current_time)
else:
self._time_lock.acquire()
self._server_pinged = True
self._time_of_server_ping = ping_time
self._time_lock.release()
ping_string = "PING :%s" % current_time
try:
self._connection.send(ping_string)
except resources.connection.OutgoingTransmissionError:
self._server.addEvent(outboundDictionaries.Server_Disconnection(self._server.getContextID(), self._server.getName(), u"Connection reset by peer.", False))
self._server.disconnect()
except resources.connection.InvalidStateError: #The socket must have been closed prior to this instruction.
pass
class _PriorityQueue(object):
"""
This class maintains a series of queues, which are used to prioritise
messages sent to the IRC server, since they need to be throttled to avoid
an "Excessive Flood" kick.
Under a five-queue system, the following guidelines should be used when
assigning priorities::
1: Absolute must-send (KICK)
2: Important (NICK)
3: Significant (NOTICE)
4: Normal (PRIVMSG)
5: Whenever (WHO)
"""
_length = None #: The number of messages sitting in the various queues.
_queues = None #: A list of lists that will behave like queues to organize messages.
_queue_lock = None #: A lock used to prevent multiple simultaneous access to the queue lists.
def __init__(self):
"""
This function is invoked when creating a new _PriorityQueue object.
@return: Nothing.
"""
self._queues = []
self._queue_lock = threading.Lock()
self._length = 0
for i in range(len(GLOBAL.ENUM_SERVER_SEND_PRIORITY) - 1):
self._queues.append([])
def addMessage(self, message, priority):
"""
This function adds a new message to the queue structure.
@type message: basestring
@param message: The string to be sent to the IRC server.
@type priority: GLOBAL.ENUM_SERVER_SEND_PRIORITY.EnumValue
@param priority: The priority at which the message should be queued.
As may be expected, the higher the priority, the sooner the send.
@return: Nothing.
"""
self._queue_lock.acquire()
self._queues[priority.index - 1].insert(0, unicode(message))
self._length += 1
self._queue_lock.release()
def getMessage(self):
"""
This function pops the next message to be sent to the IRC server.
@rtype: unicode|None
@return: The next message to be sent, if any, or None if the queue
structure is unpopulated.
"""
self._queue_lock.acquire()
message = None
for i in range(len(self._queues)):
if self._queues[i]:
message = self._queues[i].pop()
self._length -= 1
break
self._queue_lock.release()
return message
def getMessageCount(self):
"""
This function will return the number of unsent messages.
It might be useful to provide throttle information.
@rtype: int
@return: The number of unsent messages in the queue.
"""
try:
self._queue_lock.acquire()
return self._length
finally:
self.queue_lock.release()
class _SocketReader(threading.Thread):
"""
This class regularly checks its parent's socket for new data, and sends
what it finds to its parent Server for processing.
"""
_connection = None #: A reference to the _Connection that owns this object.
_server = None #: A reference to the Server that owns this object.
_alive = True #: True until the thread is no longer useful.
def __init__(self, connection):
"""
This function is invoked when creating a new _SocketReader object.
@type connection: _Connection
@param connection: A reference to the _Connection that owns this object.
@return: Nothing.
"""
threading.Thread.__init__(self)
self._connection = connection
self._server = self._connection.getServer()
self.setDaemon(True)
self.setName("Socket Reader, server %i" % self._server.getContextID())
def kill(self):
"""
This function terminates the _SocketReader's execution after its current
iteration.
It should be called when its parent is destroyed.
@return: Nothing.
"""
self._alive = False
def run(self):
"""
This function is executed over the course of the _SocketReader's
lifetime.
It checks the socket for new data continuously, and sends anything it
finds to the Server for processing.
@return: Nothing.
"""
while self._alive:
data = None
try:
data = self._connection.read()
except resources.connection.InvalidStateError: #The socket must have been closed prior to this instruction.
break
except resources.connection.IncomingTransmissionError:
self._server.addEvent(outboundDictionaries.Server_Disconnection(self._server.getContextID(), self._server.getName(), "Connection reset by peer.", False))
self._server.disconnect()
except resources.connection.SocketPollError:
self._server.addEvent(outboundDictionaries.IRC_Ping_Timeout_Check(self._server.getContextID(), self._server.getName()))
try:
self._connection.ping()
except resources.connection.OutgoingTransmissionError:
self._server.addEvent(outboundDictionaries.Server_Disconnection(self._server.getContextID(), self._server.getName(), "Remote host closed socket.", False))
self._server.disconnect()
if data:
data = self._server.processInput(data)
if data: #The server told us to disconnect.
if data[0]:
self._server.addEvent(outboundDictionaries.Server_Disconnection(self._server.getContextID(), self._server.getName(), data[0], not data[1]))
self._server.disconnect()
else:
self._connection.resetTimeout()
class _SocketSender(threading.Thread):
"""
This class regularly checks its parent _Connection for new messages, and
sends them to the IRC server.
"""
_connection = None #: The _Connection that owns this object.
_alive = True #: True until the thread is no longer useful.
def __init__(self, connection):
"""
This function is invoked when creating a new _SocketSender object.
@type connection: _Connection
@param connection: A reference to the _Connection that owns this object.
@return: Nothing.
"""
threading.Thread.__init__(self)
self._connection = connection
self.setDaemon(True)
self.setName("Socket Sender, server %i" % self._connection.getServer().getContextID())
def kill(self):
"""
This function terminates the _SocketSender's execution after its current
iteration.
It should be called when its parent is destroyed.
@return: Nothing.
"""
self._alive = False
def run(self):
"""
This function is executed over the course of the _SocketSender's
lifetime.
It checks its parent for new messages every 0.1 seconds, and sends
anything it finds to the IRC server.
@return: Nothing.
"""
while self._alive:
message = self._connection.getMessage()
if message:
try:
self._connection.send(message)
except resources.connection.InvalidStateError: #The socket must have been closed prior to this instruction.
break
except resources.connection.OutgoingTransmissionError:
server = self._connection.getServer()
server.addEvent(outboundDictionaries.Server_Disconnection(server.getContextID(), server.getName(), "Remote host closed socket."))
server.disconnect()
time.sleep(0.1)
class ServerManager(object):
"""
This class maintains a server-specific list of servers.
"""
_server_lock = None #: A lock used to prevent multiple simultaneous accesses to the server pool.
_connection_counter = 0 #: A counter used to ensure that every server object has a unique ID number.
_servers = None
"""
A dictionary containing a list of all servers managed by this object.
Its elements take the following form::
{
<id_number:int>: <:Server>
}
"""
def __init__(self):
"""
This function is invoked when a new ServerManager object is created.
@return: Nothing.
"""
self._server_lock = threading.Lock()
self._servers = {}
def addServer(self, name, thread_count):
"""
This function creates a blank Server object.
@type name: basestring|None
@param name: The name to use to identify this Server object. None to
allow IRC-network-based resolution.
@type thread_count: int
@param thread_count: The number of worker threads to spawn for this
Server.
@rtype: Server
@return: The newly created Server.
"""
self._server_lock.acquire()
self._connection_counter += 1
server = Server(self._connection_counter, name, thread_count)
self._servers[self._connection_counter] = server
self._server_lock.release()
return server
def getServer(self, id_number):
"""
This function will retrieve the specified Server from the pool.
@type id_number: int
@param id_number: An int containing the number of the server to be
retrieved.
@rtype: Server|None
@return: The requested Server object, or None if the server could not
be found.
"""
try:
self._server_lock.acquire()
return self._servers.get(id_number)
finally:
self._server_lock.release()
def getServers(self):
"""
This function will return a list of all Servers in the pool.
@rtype: list
@return: A list containing all Servers in the pool, ordered by ID.
"""
self._server_lock.acquire()
servers = []
for i in sorted(self._servers.keys()):
servers.append(self._servers[i])
self._server_lock.release()
return servers
def removeServer(self, id_number):
"""
This function will remove the specified Server from the pool.
@type id_number: int
@param id_number: An int containing the number of the server to be
removed.
@return: Nothing.
"""
self._server_lock.acquire()
server = self._servers.get(id_number)
if server:
server.close()
del self._servers[id_number]
self._server_lock.release()
class Error(Exception):
"""
This class serves as the base from which all exceptions native to this
module are derived.
"""
description = None #: A description of the error.
def __str__(self):
"""
This function returns an ASCII version of the description of this Error.
When possible, the Unicode version should be used instead.
@rtype: str
@return: The description of this error.
"""
return str(self.description)
def __unicode__(self):
"""
This function returns the description of this Error.
@rtype: unicode
@return: The description of this error.
"""
return self._description
def __init__(self, description):
"""
This function is invoked when creating a new Error object.
@type description: basestring
@param description: A description of the problem that this object
represents.
@return: Nothing.
"""
self.description = unicode(description)
class InstantiationError(Error):
"""
This class represents problems that might occur during class instantiation.
"""
def __init__(self, description):
"""
This function is invoked when creating a new InstantiationError object.
@type description: basestring
@param description: A description of the problem that this object
represents.
@return: Nothing.
"""
Error.__init__(self, description)
class ReconnectionError(Error):
"""
This class represents problems that might occur when attempting to reconnect
to a server.
"""
def __init__(self, description):
"""
This function is invoked when creating a new ReconnectionError object.
@type description: basestring
@param description: A description of the problem that this object
represents.
@return: Nothing.
"""
Error.__init__(self, description)
|
flan/puukusoft-pyrc
|
pyrc_irc_abstract/irc_server.py
|
Python
|
gpl-3.0
| 71,250
|
import squeakspace.common.util as ut
import squeakspace.common.util_http as ht
import squeakspace.proxy.server.db_sqlite3 as db
import squeakspace.common.squeak_ex as ex
import config
def post_handler(environ):
query = ht.parse_post_request(environ)
cookies = ht.parse_cookies(environ)
user_id = ht.get_required_cookie(cookies, 'user_id')
session_id = ht.get_required_cookie(cookies, 'session_id')
node_name = ht.get_required(query, 'node_name')
url = ht.get_required(query, 'url')
real_node_name = ht.get_required(query, 'real_node_name')
fingerprint = ht.get_optional(query, 'fingerprint')
conn = db.connect(config.db_path)
try:
c = db.cursor(conn)
db.set_node_addr(c, user_id, session_id, node_name, url, real_node_name, fingerprint)
db.commit(conn)
raise ht.ok_json({'status' : 'ok'})
except ex.SqueakException as e:
raise ht.convert_squeak_exception(e)
finally:
db.close(conn)
def get_handler(environ):
query = ht.parse_get_request(environ)
cookies = ht.parse_cookies(environ)
user_id = ht.get_required_cookie(cookies, 'user_id')
session_id = ht.get_required_cookie(cookies, 'session_id')
node_name = ht.get_required(query, 'node_name')
conn = db.connect(config.db_path)
try:
c = db.cursor(conn)
addr = db.read_node_addr(c, user_id, session_id, node_name)
raise ht.ok_json({'status' : 'ok', 'addr' : addr})
except ex.SqueakException as e:
raise ht.convert_squeak_exception(e)
finally:
db.close(conn)
def delete_handler(environ):
query = ht.parse_post_request(environ)
cookies = ht.parse_cookies(environ)
user_id = ht.get_required_cookie(cookies, 'user_id')
session_id = ht.get_required_cookie(cookies, 'session_id')
node_name = ht.get_required(query, 'node_name')
conn = db.connect(config.db_path)
try:
c = db.cursor(conn)
db.delete_node_addr(c, user_id, session_id, node_name)
db.commit(conn)
raise ht.ok_json({'status' : 'ok'})
except ex.SqueakException as e:
raise ht.convert_squeak_exception(e)
finally:
db.close(conn)
def main_handler(environ):
ht.dispatch_on_method(environ, {
'POST' : post_handler,
'GET' : get_handler,
'DELETE' : delete_handler})
def application(environ, start_response):
return ht.respond_with_handler(environ, start_response, main_handler)
|
eek6/squeakspace
|
www/proxy/scripts/local/node_addr.py
|
Python
|
gpl-3.0
| 2,510
|
import os
from PyQt4 import QtCore, QtGui
from Extensions.Global import sizeformat
class SearchWidget(QtGui.QLabel):
def __init__(self, parent):
QtGui.QLabel.__init__(self, parent)
self._parent = parent
self.setStyleSheet("""background: rgba(0, 0, 0, 50); border-radius: 0px;""")
self.setFixedSize(300, 28)
self.setPixmap(QtGui.QPixmap("Icons\\line"))
self.setScaledContents(True)
self.searchTimer = QtCore.QTimer()
self.searchTimer.setSingleShot(True)
self.searchTimer.setInterval(200)
self.searchTimer.timeout.connect(self.gotoText)
self.textFindLine = QtGui.QLineEdit(self)
self.textFindLine.setStyleSheet("background: white; border-radius: 0px;")
self.textFindLine.setGeometry(3, 2, 270, 23)
self.textFindLine.grabKeyboard()
self.textFindLine.setTextMargins(2, 1, 22, 1)
self.textFindLine.textChanged.connect(self.show)
self.textFindLine.textChanged.connect(self.searchTimer.start)
self.clearTextFindLineButton = QtGui.QPushButton(self.textFindLine)
self.clearTextFindLineButton.setGeometry(250, 2, 15, 15)
self.clearTextFindLineButton.setFlat(True)
self.clearTextFindLineButton.setIcon(QtGui.QIcon("Icons\\clearLeft"))
self.clearTextFindLineButton.setStyleSheet("background: white; border: none;")
self.clearTextFindLineButton.clicked.connect(self.textFindLine.clear)
self.finderCloseButton = QtGui.QToolButton(self)
self.finderCloseButton.setStyleSheet("background: none;")
self.finderCloseButton.setGeometry(278, 6, 15, 15)
self.finderCloseButton.setAutoRaise(True)
self.finderCloseButton.setIconSize(QtCore.QSize(25, 25))
self.finderCloseButton.setIcon(QtGui.QIcon("Icons\\Cross"))
self.finderCloseButton.clicked.connect(self.hide)
def gotoText(self):
text = self.textFindLine.text()
self._parent.gotoText(text)
class VaultManager(QtGui.QListWidget):
def __init__(self, vaultItemCountLabel, sizeLabel, busyIndicatorWidget, parent):
QtGui.QListWidget.__init__(self, parent)
self.redCenter = parent
self.setLayoutMode(1)
self.setBatchSize(1)
self.setUniformItemSizes(True)
self.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.setAlternatingRowColors(True)
self.setIconSize(QtCore.QSize(30, 30))
self.itemSelectionChanged.connect(self.selectionMade)
searchWidget = SearchWidget(self)
searchWidget.move(80, 0)
searchWidget.hide()
self.vaultItemCountLabel = vaultItemCountLabel
self.sizeLabel = sizeLabel
self.busyIndicatorWidget = busyIndicatorWidget
self.vaultZeroContentLabel = QtGui.QLabel("Empty", self)
self.vaultZeroContentLabel.setGeometry(150, 20, 100, 50)
self.vaultZeroContentLabel.setAlignment(QtCore.Qt.AlignCenter)
self.vaultZeroContentLabel.setStyleSheet("background: none; font: 20px; color: lightgrey;")
self.vaultZeroContentLabel.hide()
self.vaultCleanUp()
def gotoText(self, text):
for i in self.vaultKeyList:
if self.logDict[i].split('|')[0].startswith(text):
index = self.vaultKeyList.index(i)
self.setCurrentRow(index)
break
def loadVault(self):
try:
logList = []
self.vaultKeyList = []
file = open("Vault\\LOG","r")
for i in file.readlines():
if i.strip() == '':
pass
else:
logList.append(tuple(i.strip().split('||')))
file.close()
self.logDict = dict(logList)
self.vaultContentsSize = 0
self.clear()
size = QtCore.QSize()
size.setHeight(40)
for key, property in self.logDict.items():
self.vaultKeyList.append(key)
## extract attributes
attrib = self.logDict[key].split('|')
# get locking time
time_split = key.split('=')[0].split('-')
date = QtCore.QDate(int(time_split[0]), int(time_split[1]),
int(time_split[3])).toString()
item = QtGui.QListWidgetItem(attrib[0])
item.setToolTip('Original Location: ' + attrib[2] + '\nModified: ' + date)
item.setSizeHint(size)
# assign icon
if attrib[1] == "exec":
item.setIcon(QtGui.QIcon("Icons\\executable"))
else:
item.setIcon(QtGui.QIcon("Icons\\unknown"))
self.addItem(item)
self.vaultContentsSize += int(attrib[3])
self.vaultItemCountLabel.setText("Items: " + str(len(self.logDict)))
# display size of total files
self.sizeLabel.setText(sizeformat(self.vaultContentsSize))
self.showVaultEmptyLabel()
except:
self.redCenter.showMessage("Problem loading items in the vault.")
self.redCenter.hideMessage()
def showVaultEmptyLabel(self):
if self.count() > 0:
self.vaultZeroContentLabel.hide()
else:
self.vaultZeroContentLabel.show()
def selectionMade(self):
self.selected = self.selectedItems()
if len(self.selected) > 0:
self.redCenter.unlockButton.setEnabled(True)
self.redCenter.deleteButton.setEnabled(True)
else:
self.redCenter.unlockButton.setEnabled(False)
self.redCenter.deleteButton.setEnabled(False)
def vaultCleanUp(self):
logList = []
file = open("Vault\\LOG","r")
for i in file.readlines():
if i.strip() == '':
pass
else:
logList.append(tuple(i.strip().split('||')))
file.close()
logDict = dict(logList)
filesList = os.listdir("Vault\\Files")
bookedFilesList = []
for i, v in logDict.items():
bookedFilesList.append(i)
for i in filesList:
if i not in bookedFilesList:
path = os.path.join("Vault\\Files", i)
try:
os.remove(path)
except:
pass
|
fortharris/RedCenter
|
Extensions/VaultManager.py
|
Python
|
gpl-3.0
| 6,621
|
"""
Copyright (C) 2015 Quinn D Granfor <spootdev@gmail.com>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2, as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License version 2 for more details.
You should have received a copy of the GNU General Public License
version 2 along with this program; if not, write to the Free
Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
import gzip
import inspect
import json
import time
from common import common_file
from common import common_logging_elasticsearch_httpx
from common import common_network_async
class CommonMetadataANIdb:
"""
Class for interfacing with anidb
"""
def __init__(self, db_connection):
self.adba_connection = None
self.db_connection = db_connection
async def com_net_anidb_fetch_titles_file(self):
"""
Fetch the tarball of anime titles
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
# check to see if local titles file is older than 24 hours
if common_file.com_file_modification_timestamp('./cache/anidb_titles.gz') \
< (time.time() - 86400):
await common_network_async.mk_network_fetch_from_url_async(
'http://anidb.net/api/anime-titles.xml.gz',
'./cache/anidb_titles.gz')
return True # new file
return False
async def com_net_anidb_save_title_data_to_db(self, title_file='./cache/anidb_titles.gz'):
"""
Save anidb title data to database
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
file_handle = gzip.open(title_file, 'rb')
# file_handle = gzip.open(title_file, 'rt', encoding='utf-8') # python 3.3+
anime_aid = None
anime_title = None
anime_title_ja = None
for file_line in file_handle.readlines():
# common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info', message_text=
# {'stuff':'line: %s', file_line.decode('utf-8'))
if file_line.decode('utf-8').find('<anime aid="') != -1:
anime_aid = file_line.decode(
'utf-8').split('"', 1)[1].rsplit('"', 1)[0]
# common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info', message_text=
# {'stuff':'aid: %s', anime_aid)
elif file_line.decode('utf-8').find('title xml:lang="ja"') != -1:
anime_title_ja = file_line.decode(
'utf-8').split('>', 1)[1].rsplit('<', 1)[0]
# common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info', message_text=
# {'stuff':'title: %s', anime_title_ja)
elif file_line.decode('utf-8').find('title xml:lang="en"') != -1:
anime_title = file_line.decode(
'utf-8').split('>', 1)[1].rsplit('<', 1)[0]
# common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info', message_text=
# {'stuff':'title: %s', anime_title)
elif file_line.decode('utf-8').find('</anime>') != -1:
if self.db_connection.db_meta_anime_meta_by_id(anime_aid) is None:
if anime_title is None:
anime_title = anime_title_ja
self.db_connection.db_meta_anime_title_insert(
{'anidb': anime_aid}, anime_title,
None, None, None, None, None)
# reset each time to handle ja when this doesn't exist
anime_title = None
file_handle.close()
common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info',
message_text={'stuff': 'end'})
async def com_net_anidb_aid_by_title(self, title_to_search):
"""
Find AID by title
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
# check the local DB
local_db_result = self.db_connection.db_meta_anime_title_search(
title_to_search)
if local_db_result is None:
# check to see if local titles file is older than 24 hours
if self.com_net_anidb_fetch_titles_file():
# since new titles file....recheck by title
self.com_net_anidb_aid_by_title(title_to_search)
else:
return None
else:
return local_db_result
async def com_net_anidb_connect(self, user_name, user_password):
"""
Remote api calls
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
self.adba_connection = adba.Connection(log=True)
try:
self.adba_connection.auth(user_name, user_password)
except Exception as err_code:
common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='error',
message_text={"exception msg":
err_code})
return self.adba_connection
async def com_net_anidb_logout(self):
"""
Logout of anidb
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
self.adba_connection.logout()
async def com_net_anidb_stop(self):
"""
Close the anidb connect and stop the thread
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
self.adba_connection.stop()
|
MediaKraken/MediaKraken_Deployment
|
source/metadata/metadata_provider_anidb.py
|
Python
|
gpl-3.0
| 10,547
|
"""template_III URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from app import views as app_views # new
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', app_views.home, name='home'), # new
url(r'^add/(\d+)/(\d+)/$', app_views.add, name='add'), # new
# if method is replaced with method sum below, the app can still works fine
# url(r'^sum/(\d+)/(\d+)/$', app_views.add, name='add'), # new
]
|
mjiang-27/django_learn
|
template_III/template_III/urls.py
|
Python
|
gpl-3.0
| 1,076
|
# -*- coding: utf-8 -*-
# Copyright (c) 2009 Raymond Hettinger
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from UserDict import DictMixin
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
if len(self) != len(other):
return False
for p, q in zip(self.items(), other.items()):
if p != q:
return False
return True
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
|
WouterVH/bots
|
src/bots/bots_ordereddict.py
|
Python
|
gpl-3.0
| 4,249
|
class GameMenu(object):
def __init__(self, menu_name, **options):
self.menu_name = menu_name
self.options = options
|
Jazende/Jaztroids
|
gamemenu.py
|
Python
|
gpl-3.0
| 156
|
#!/usr/bin/python3
# Copyright 2018 Francisco Pina Martins <f.pinamartins@gmail.com>
# This file is part of geste2lfmm.
# geste2lfmm is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# geste2lfmm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with geste2lfmm. If not, see <http://www.gnu.org/licenses/>.
# Usage: python3 geste2lfmm.py file.geste file.lfmm
from collections import OrderedDict
def parse_geste(infile_name):
"""
Parses a GESTE file and retuns an OrderedDict with:
{"Population_name":[Freq_ref_allele_on SNP_1,Freq_ref_allele_on SNP_2,...]}
"""
infile = open(infile_name, "r")
pop_freqs = OrderedDict()
pop_starter = "[pop]="
popname = ""
for line in infile:
# Neat trick to ignore data that is not SNP info
# This code section should be very performant since it replaces most
# if - else tests with try -> except statements
line = line.split()
try:
int(line[0])
except ValueError: # In case it's a new section
if line[0].startswith(pop_starter):
popname = "Pop %s" % line[0].strip().replace(pop_starter, "")
pop_freqs[popname] = []
continue
except IndexError: # In case it's an empty line
continue
try:
ref_frequency = round(int(line[3]) / int(line[1]), 3)
except ZeroDivisionError:
ref_frequency = 9
pop_freqs[popname].append(ref_frequency)
infile.close()
return pop_freqs
def write_lfmm(pop_freqs, lfmm_filename):
"""
Write a LFMM inpt file based on the OrderedDict extracted from the GESTE
file.
"""
outfile = open(lfmm_filename, 'w')
for name, freq in pop_freqs.items():
outfile.write(name + "\t")
outfile.write("\t".join(map(str, freq)) + "\n")
outfile.close()
if __name__ == "__main__":
from sys import argv
POP_FREQS = parse_geste(argv[1])
write_lfmm(POP_FREQS, argv[2])
|
StuntsPT/pyRona
|
helper_scripts/geste2lfmm.py
|
Python
|
gpl-3.0
| 2,430
|