repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
sniemi/SamPy
|
refs/heads/master
|
focus/phaseretrievalresults.py
|
1
|
"""
Combines the Phase Retrieval software results, produces plots and calculates focus with and without breathing correction.
USAGE:
python PhaseRetResults.py
HISTORY:
Created on Dec 17, 2009
:author: Sami-Matias Niemi
:contact: niemi@stsci.edu
:version: 0.91
:todo: maybe introduce sigma clipping to the means?
:todo: how the legend has been implemented is really dirty,
it should be done better.
"""
import matplotlib
matplotlib.rc('text', usetex=True)
matplotlib.use('PDF')
matplotlib.rcParams['legend.fontsize'] = 9
import pyfits as PF
import pylab as P
import numpy as N
import scipy.interpolate as I
import glob as G
import time
from matplotlib import cm
from matplotlib.patches import Circle
__author__ = 'Sami-Matias Niemi'
__version__ = '0.91'
class PhaseRetResults():
def __init__(self, cameras, str):
self.cameras = cameras
self.str = str
def _fromJulian(self, j):
"""
Converts Modified Julian days to human readable format
:return: human readable date and time
"""
days = j - 40587 # From Jan 1 1900
sec = days * 86400.0
return time.gmtime(sec)
def readBreathing(self, file):
jul = []
flat = []
for line in open(file).readlines():
if 'Julian' in line: continue
else:
t = line.strip().split()
jul.append(float(t[0]))
flat.append(float(t[-1]))
return N.array([N.array(jul), N.array(flat)])
def readResults(self, file):
"""
"""
x, y = -999, -999
fh = open(file)
out = []
for line in fh.readlines():
for camera in self.cameras:
if camera in line:
#print 'Found %s' % camera
tmp = line.strip().split()
x = tmp[1]
y = tmp[2]
cam = camera
break
tmp = line.strip().split()
try:
out.append([cam, int(x), int(y), tmp[self.str['file']],
float(tmp[self.str['mjd']]), float(tmp[self.str['focus']])])
except:
pass
return out
def findFiles(self, data):
tmp = []
for line in data:
new = True
for x in tmp:
if line[3] == tmp: new = False
if new: tmp.append(line[3])
return tmp
def plotStars(self, file, ext, xpos, ypos, rad=25):
"""
"""
if ext == 1: chip = 2
if ext == 4: chip = 1
#manipulate data
data = PF.open(file)[ext].data
data[data <= 1.0] = 1.0
data = N.log10(data)
ax = P.subplot(111)
b = P.gca()
ims = ax.imshow(data,
origin='lower',
cmap=cm.gray,
interpolation=None,
vmin=0.0,
vmax=3.0)
cb = P.colorbar(ims, orientation='horizontal')
cb.set_label('$\log_{10}(Counts)$')
#loop over xpos and ypos and ratio and draw circles
count = 1
for x, y in zip(xpos, ypos):
cir = Circle((x + 1, y + 1), radius=rad, fc='none', ec='r')
b.add_patch(cir)
P.annotate('Star %i' % count,
xy=(x, y + 70),
horizontalalignment='center',
verticalalignment='center',
style='italic', size='xx-small',
color='red')
count += 1
P.title('Focus Stars of %s Chip %i' % (file[:-9], chip))
P.savefig('%sStarsChip%i.pdf' % (file[:-7], chip))
P.close()
def _getStats(self, data):
res = []
tmp = []
t = {}
for line in data:
diff = True
for x in tmp:
if line[0] == x: diff = False
if diff: tmp.append(line[0])
for x in tmp:
for line in data:
if line[0] == x: t.setdefault(x, []).append(line[1])
for key in t:
res.append([key, N.mean(t[key]), N.std(t[key]), N.shape(t[key])[0]])
return res
def plotFocus(self, chip1, chip2, brdata=[], noBreathing=False):
d = []
meanACS = []
meanWFC3 = []
brA = {}
brW = {}
obsj = G.glob('j*.fits')[0][:7]
obsi = G.glob('i*.fits')[0][:7]
#get some data
for line in chip1:
d.append(line[4])
if 'ACS' in line[0]:
meanACS.append([line[4], line[5]])
if 'WFC3' in line[0]:
meanWFC3.append([line[4], line[5]])
for line in chip2:
if 'ACS' in line[0]:
meanACS.append([line[4], line[5]])
if 'WFC3' in line[0]:
meanWFC3.append([line[4], line[5]])
avd = self._fromJulian(N.mean(N.array(d)))
fig = P.figure()
ax = fig.add_subplot(111)
#get stasts
statACS = self._getStats(meanACS)
statWFC3 = self._getStats(meanWFC3)
acsJDs = [line[0] for line in statACS]
wfcJDs = [line[0] for line in statWFC3]
#interpolated breathing values
if noBreathing == False:
acsBreathing = I.interp1d(brdata[0, :], brdata[1, :], kind='linear')
wfcBreathing = I.interp1d(brdata[0, :], brdata[1, :], kind='linear')
for x in acsJDs:
brA[x] = acsBreathing(x)
for x in wfcJDs:
brW[x] = wfcBreathing(x)
for line in chip1:
if 'ACS' in line[0]:
ac = ax.plot(line[4], line[5] - brA[line[4]], 'bs', zorder=7)
if 'WFC3' in line[0]:
wf = ax.plot(line[4], line[5] - brW[line[4]], 'ro', zorder=7)
for line in chip2:
if 'ACS' in line[0]:
ac2 = ax.plot(line[4], line[5] - brA[line[4]], 'gd', zorder=7)
if 'WFC3' in line[0]:
wf2 = ax.plot(line[4], line[5] - brW[line[4]], 'mx', zorder=7)
#plot mean values
acsf = [line[1] - brA[line[0]] for line in statACS]
wfcf = [line[1] - brW[line[0]] for line in statWFC3]
eac = ax.errorbar(acsJDs, acsf,
yerr=[line[2] / N.sqrt(line[3]) for line in statACS], marker='H', mfc='yellow',
ms=9, mec='magenta', ls='None', mew=1.3, ecolor='magenta', zorder=50)
ewf = ax.errorbar(wfcJDs, wfcf,
yerr=[line[2] / N.sqrt(line[3]) for line in statWFC3], marker='o', mfc='cyan',
ms=5, mec='magenta', ls='None', mew=1.3, ecolor='magenta', zorder=50)
print '\nBreathing corrections:\n Camera MJD correction'
for x in acsJDs:
print 'ACS %f %f' % (x, brA[x])
for x in wfcJDs:
print 'WFC3 %f %f' % (x, brW[x])
print '\nBreathing corrected focus:'
print 'Julian J-L focus error camera'
print '%i %9i %9.2f %9.3f %5s' % (
int(N.mean(acsJDs)), (int(N.mean(acsJDs)) - 48005), N.mean(acsf), N.std(acsf) / N.sqrt(len(acsf)),
'ACS')
print '%i %9i %9.2f %9.3f %6s' % (
int(N.mean(wfcJDs)), (int(N.mean(wfcJDs)) - 48005), N.mean(wfcf), N.std(acsf) / N.sqrt(len(wfcf)),
'WFC3')
else:
for line in chip1:
if 'ACS' in line[0]:
ac = ax.plot(line[4], line[5], 'bs', zorder=7)
if 'WFC3' in line[0]:
wf = ax.plot(line[4], line[5], 'ro', zorder=7)
for line in chip2:
if 'ACS' in line[0]:
ac2 = ax.plot(line[4], line[5], 'gd', zorder=7)
if 'WFC3' in line[0]:
wf2 = ax.plot(line[4], line[5], 'mx', zorder=7)
#plot mean values
acsf = [line[1] for line in statACS]
wfcf = [line[1] for line in statWFC3]
eac = ax.errorbar(acsJDs, acsf,
yerr=[line[2] / N.sqrt(line[3]) for line in statACS], marker='H', mfc='yellow',
ms=9, mec='magenta', ls='None', mew=1.3, ecolor='magenta', zorder=50)
ewf = ax.errorbar(wfcJDs, wfcf,
yerr=[line[2] / N.sqrt(line[3]) for line in statWFC3], marker='o', mfc='cyan',
ms=5, mec='magenta', ls='None', mew=1.3, ecolor='magenta', zorder=50)
print '\nWithout breathing correction:'
print 'OBS date JD focus error'
print '%6s %11s %7i %6.2f %8.3f' % (obsj, time.strftime(('%d/%m/%y'), avd),
int(N.mean(acsJDs)),
N.mean(acsf),
N.std(acsf) / N.sqrt(len(acsf)))
print '%6s %11s %7i %6.2f %8.3f' % (obsi, time.strftime(('%d/%m/%y'), avd),
int(N.mean(wfcJDs)),
N.mean(wfcf),
N.std(wfcf) / N.sqrt(len(wfcf)))
times = []
for m in ax.get_xticks():
x = time.strftime(("%H:%M:%S"), self._fromJulian(m))
times.append(x)
ax.set_xticklabels(times)
#zero focus line
ax.axhline(0, color='k', ls='--', lw=0.8)
if noBreathing:
P.title('Focus Measurement (No breathing correction)')
else:
P.title('Focus Measurement (breathing corrected)')
try:
P.legend((ac[0], wf[0], ac2[0], wf2[0], eac[0], ewf[0]),
['ACS chip 1', 'WFC3 chip 1', 'ACS chip 2', 'WFC3 chip 2', 'ACS Mean', 'WFC3 Mean'],
fancybox=True, shadow=True, numpoints=1)
except:
P.legend((ac[0], ac2[0], wf2[0], eac[0], ewf[0]),
['ACS chip 1', 'ACS chip 2', 'WFC3 chip 2', 'ACS Mean', 'WFC3 Mean'],
fancybox=True, shadow=True, numpoints=1)
P.xlabel('%s' % time.strftime(('%d %b %Y'), avd))
P.ylabel('Defocus [SM $\mu$m]')
if noBreathing:
P.savefig('FullFocusNoBreathing.pdf')
else:
P.savefig('FullFocus.pdf')
print '\n\n'
if __name__ == '__main__':
#define some variables
str = {'file': 0,
'target': 1,
'mjd': 2,
'date': 3,
'time': 4,
'focus': 6}
cameras = ['ACS', 'WFC3']
PR = PhaseRetResults(cameras, str)
#Read the stuff in
c1 = PR.readResults('resultsChip1.txt') #chip 1
c2 = PR.readResults('resultsChip2.txt') #chip 2
#read breathing values
brdata = PR.readBreathing('breathing.txt')
#make a plot without breathing correction
PR.plotFocus(c1, c2, noBreathing=True)
#make a plot with breathing correction
PR.plotFocus(c1, c2, brdata)
#plot stars
f1 = PR.findFiles(c1)
f2 = PR.findFiles(c2)
for file in f1:
x = []
y = []
for line in c1:
if line[3] == file:
x.append(line[1])
y.append(line[2])
PR.plotStars(file + '_flt.fits', 4, x, y)
for file in f2:
x = []
y = []
for line in c2:
if line[3] == file:
x.append(line[1])
y.append(line[2])
PR.plotStars(file + '_flt.fits', 1, x, y)
|
jsilter/scipy
|
refs/heads/master
|
scipy/stats/_distr_params.py
|
7
|
"""
Sane parameters for stats.distributions.
"""
distcont = [
['alpha', (3.5704770516650459,)],
['anglit', ()],
['arcsine', ()],
['beta', (2.3098496451481823, 0.62687954300963677)],
['betaprime', (5, 6)],
['bradford', (0.29891359763170633,)],
['burr', (10.5, 4.3)],
['cauchy', ()],
['chi', (78,)],
['chi2', (55,)],
['cosine', ()],
['dgamma', (1.1023326088288166,)],
['dweibull', (2.0685080649914673,)],
['erlang', (10,)],
['expon', ()],
['exponpow', (2.697119160358469,)],
['exponweib', (2.8923945291034436, 1.9505288745913174)],
['f', (29, 18)],
['fatiguelife', (29,)], # correction numargs = 1
['fisk', (3.0857548622253179,)],
['foldcauchy', (4.7164673455831894,)],
['foldnorm', (1.9521253373555869,)],
['frechet_l', (3.6279911255583239,)],
['frechet_r', (1.8928171603534227,)],
['gamma', (1.9932305483800778,)],
['gausshyper', (13.763771604130699, 3.1189636648681431,
2.5145980350183019, 5.1811649903971615)], # veryslow
['genexpon', (9.1325976465418908, 16.231956600590632, 3.2819552690843983)],
['genextreme', (-0.1,)],
['gengamma', (4.4162385429431925, 3.1193091679242761)],
['genhalflogistic', (0.77274727809929322,)],
['genlogistic', (0.41192440799679475,)],
['genpareto', (0.1,)], # use case with finite moments
['gilbrat', ()],
['gompertz', (0.94743713075105251,)],
['gumbel_l', ()],
['gumbel_r', ()],
['halfcauchy', ()],
['halflogistic', ()],
['halfnorm', ()],
['hypsecant', ()],
['invgamma', (4.0668996136993067,)],
['invgauss', (0.14546264555347513,)],
['invweibull', (10.58,)],
['johnsonsb', (4.3172675099141058, 3.1837781130785063)],
['johnsonsu', (2.554395574161155, 2.2482281679651965)],
['ksone', (1000,)], # replace 22 by 100 to avoid failing range, ticket 956
['kstwobign', ()],
['laplace', ()],
['levy', ()],
['levy_l', ()],
['levy_stable', (0.35667405469844993,
-0.67450531578494011)], # NotImplementedError
# rvs not tested
['loggamma', (0.41411931826052117,)],
['logistic', ()],
['loglaplace', (3.2505926592051435,)],
['lognorm', (0.95368226960575331,)],
['lomax', (1.8771398388773268,)],
['maxwell', ()],
['mielke', (10.4, 3.6)],
['nakagami', (4.9673794866666237,)],
['ncf', (27, 27, 0.41578441799226107)],
['nct', (14, 0.24045031331198066)],
['ncx2', (21, 1.0560465975116415)],
['norm', ()],
['pareto', (2.621716532144454,)],
['pearson3', (0.1,)],
['powerlaw', (1.6591133289905851,)],
['powerlognorm', (2.1413923530064087, 0.44639540782048337)],
['powernorm', (4.4453652254590779,)],
['rayleigh', ()],
['rdist', (0.9,)], # feels also slow
['recipinvgauss', (0.63004267809369119,)],
['reciprocal', (0.0062309367010521255, 1.0062309367010522)],
['rice', (0.7749725210111873,)],
['semicircular', ()],
['t', (2.7433514990818093,)],
['triang', (0.15785029824528218,)],
['truncexpon', (4.6907725456810478,)],
['truncnorm', (-1.0978730080013919, 2.7306754109031979)],
['truncnorm', (0.1, 2.)],
['tukeylambda', (3.1321477856738267,)],
['uniform', ()],
['vonmises', (3.9939042581071398,)],
['vonmises_line', (3.9939042581071398,)],
['wald', ()],
['weibull_max', (2.8687961709100187,)],
['weibull_min', (1.7866166930421596,)],
['wrapcauchy', (0.031071279018614728,)]]
distdiscrete = [
['bernoulli',(0.3,)],
['binom', (5, 0.4)],
['boltzmann',(1.4, 19)],
['dlaplace', (0.8,)], # 0.5
['geom', (0.5,)],
['hypergeom',(30, 12, 6)],
['hypergeom',(21,3,12)], # numpy.random (3,18,12) numpy ticket:921
['hypergeom',(21,18,11)], # numpy.random (18,3,11) numpy ticket:921
['logser', (0.6,)], # reenabled, numpy ticket:921
['nbinom', (5, 0.5)],
['nbinom', (0.4, 0.4)], # from tickets: 583
['planck', (0.51,)], # 4.1
['poisson', (0.6,)],
['randint', (7, 31)],
['skellam', (15, 8)],
['zipf', (6.5,)]
]
|
perryjrandall/arsenalsuite
|
refs/heads/master
|
cpp/apps/freezer/afplugins/wrangle.py
|
11
|
from blur.Stone import *
from blur.Classes import *
from blur.Freezer import *
from PyQt4.QtCore import *
from PyQt4.QtSql import *
import traceback, os
import subprocess
class WranglerViewerPlugin(JobViewerPlugin):
def __init__(self):
JobViewerPlugin.__init__(self)
def name(self):
return QString("Toggle wrangle status")
def icon(self):
return QString("images/wrangled.png")
def view(self, jobList):
for job in jobList:
if job.wrangler() == User.currentUser():
# toggle wrangle to off
job.setWrangler(User())
job.commit()
jh = JobHistory()
jh.setHost(Host.currentHost())
jh.setUser(User.currentUser())
jh.setJob(job)
jh.setMessage("Wrangler activity on job finished")
jh.commit()
else:
# toggle on and log history
job.setWrangler(User.currentUser())
job.commit()
jh = JobHistory()
jh.setHost(Host.currentHost())
jh.setUser(User.currentUser())
jh.setJob(job)
jh.setMessage("Wrangler activity on job began")
jh.commit()
JobViewerFactory.registerPlugin( WranglerViewerPlugin() )
|
oposs/check_mk_mirror
|
refs/heads/master
|
web/htdocs/index.py
|
1
|
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# ails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
from mod_python import apache
import sys, os, pprint, __builtin__
import i18n
import livestatus
import modules
import defaults, config, login, userdb
from lib import *
from html_mod_python import *
# Main entry point for all HTTP-requests (called directly by mod_apache)
def handler(req, fields = None, is_profiling = False):
# Create an object that contains all data about the request and
# helper functions for creating valid HTML. Parse URI and
# store results in the request object for later usage.
__builtin__.html = html_mod_python(req, fields)
response_code = apache.OK
try:
config.load_config() # load multisite.mk etc.
html.init_modes()
init_profiling(is_profiling)
# Make sure all plugins are avaiable as early as possible. At least
# we need the plugins (i.e. the permissions declared in these) at the
# time before the first login for generating auth.php.
modules.load_all_plugins()
# Get page handler.
handler = modules.get_handler(html.myfile, page_not_found)
# Some pages do skip authentication. This is done by adding
# noauth: to the page hander, e.g. "noauth:run_cron" : ...
if handler == page_not_found:
handler = modules.get_handler("noauth:" + html.myfile, page_not_found)
if handler != page_not_found:
try:
# Call userdb page hooks which are executed on a regular base to e.g. syncronize
# information withough explicit user triggered actions
userdb.hook_page()
handler()
except Exception, e:
html.write(str(e))
if config.debug:
html.write(html.attrencode(format_exception()))
raise FinalizeRequest()
# Is the user set by the webserver? otherwise use the cookie based auth
if not html.is_logged_in():
config.auth_type = 'cookie'
# When not authed tell the browser to ask for the password
html.login(login.check_auth())
if not html.is_logged_in():
if fail_silently():
# While api call don't show the login dialog
raise MKUnauthenticatedException(_('You are not authenticated.'))
# Redirect to the login-dialog with the current url as original target
# Never render the login form directly when accessing urls like "index.py"
# or "dashboard.py". This results in strange problems.
if html.myfile != 'login':
html.http_redirect(defaults.url_prefix + 'check_mk/login.py?_origtarget=%s' %
html.urlencode(html.makeuri([])))
# Initialize the i18n for the login dialog. This might be overridden
# later after user login
i18n.localize(html.var("lang", config.get_language()))
# This either displays the login page or validates the information submitted
# to the login form. After successful login a http redirect to the originally
# requested page is performed.
login.page_login(plain_error())
raise FinalizeRequest()
else:
# In case of basic auth the user is already known, but we still need to decide
# whether or not the user is an automation user (which is allowed to use transid=-1)
if html.var("_secret"):
login.check_auth_automation()
# Call userdb page hooks which are executed on a regular base to e.g. syncronize
# information withough explicit user triggered actions
userdb.hook_page()
# Set all permissions, read site config, and similar stuff
config.login(html.user)
html.load_help_visible()
# Initialize the multiste i18n. This will be replaced by
# language settings stored in the user profile after the user
# has been initialized
i18n.localize(html.var("lang", config.get_language()))
# All plugins might have to be reloaded due to a language change
modules.load_all_plugins()
# User allowed to login at all?
if not config.may("general.use"):
reason = _("You are not authorized to use Check_MK Multisite. Sorry. "
"You are logged in as <b>%s</b>.") % config.user_id
if len(config.user_role_ids):
reason += _("Your roles are <b>%s</b>. " % ", ".join(config.user_role_ids))
else:
reason += _("<b>You do not have any roles.</b> ")
reason += _("If you think this is an error, "
"please ask your administrator to check the permissions configuration.")
if config.auth_type == 'cookie':
reason += _('<p>You have been logged out. Please reload the page to re-authenticate.</p>')
login.del_auth_cookie()
raise MKAuthException(reason)
handler()
except FinalizeRequest, e:
response_code = e.status
except (MKUserError, MKAuthException, MKUnauthenticatedException, MKConfigError, MKGeneralException,
livestatus.MKLivestatusNotFoundError, livestatus.MKLivestatusException), e:
ty = type(e)
if ty == livestatus.MKLivestatusNotFoundError:
title = _("Data not found")
plain_title = _("Livestatus-data not found")
elif isinstance(e, livestatus.MKLivestatusException):
title = _("Livestatus problem")
plain_title = _("Livestatus problem")
else:
title = e.title
plain_title = e.plain_title
if plain_error():
html.write("%s: %s\n" % (plain_title, e))
elif not fail_silently():
html.header(title)
html.show_error(e)
html.footer()
# Some exception need to set a specific HTTP status code
if ty == MKUnauthenticatedException:
response_code = apache.HTTP_UNAUTHORIZED
elif ty == livestatus.MKLivestatusNotFoundError:
response_code = apache.HTTP_NOT_FOUND
elif ty == livestatus.MKLivestatusException:
response_code = apache.HTTP_BAD_GATEWAY
if ty in [MKConfigError, MKGeneralException]:
logger(LOG_ERR, _("%s: %s") % (plain_title, e))
except (apache.SERVER_RETURN,
(apache.SERVER_RETURN, apache.HTTP_UNAUTHORIZED),
(apache.SERVER_RETURN, apache.HTTP_MOVED_TEMPORARILY)):
release_all_locks()
html.finalize(is_error=True)
raise
except Exception, e:
html.unplug()
import traceback
msg = "%s %s: %s" % (html.request_uri(), _('Internal error'), traceback.format_exc())
if type(msg) == unicode:
msg = msg.encode('utf-8')
logger(LOG_ERR, msg)
if plain_error():
html.write(_("Internal error") + ": %s\n" % html.attrencode(e))
elif not fail_silently():
modules.get_handler("gui_crash")()
response_code = apache.OK
release_all_locks()
html.finalize()
return response_code
# Profiling of the Check_MK GUI can be enabled via global settings
def init_profiling(is_profiling):
if not is_profiling and config.profile:
import cProfile
# the profiler loses the memory about all modules. We need to hand over
# the request object in the apache module.
# Ubuntu: install python-profiler when using this feature
profile_file = defaults.var_dir + "/web/multisite.profile"
retcode = cProfile.runctx(
"import index; "
"index.handler(profile_req, profile_fields, is_profiling=True)",
{'profile_req': html.req, 'profile_fields': html.fields}, {}, profile_file)
file(profile_file + ".py", "w").write(
"#!/usr/bin/python\n"
"import pstats\n"
"stats = pstats.Stats(%r)\n"
"stats.sort_stats('time').print_stats()\n" % profile_file)
os.chmod(profile_file + ".py", 0755)
raise FinalizeRequest(apache.OK)
# Ajax-Functions want no HTML output in case of an error but
# just a plain server result code of 500
def fail_silently():
return html.has_var("_ajaxid")
# Webservice functions may decide to get a normal result code
# but a text with an error message in case of an error
def plain_error():
return html.has_var("_plain_error")
def page_not_found():
if html.has_var("_plain_error"):
html.write(_("Page not found"))
else:
html.header(_("Page not found"))
html.show_error(_("This page was not found. Sorry."))
html.footer()
# prepare local-structure within OMD sites
# FIXME: Still needed?
def init_sys_path():
if defaults.omd_root:
local_module_path = defaults.omd_root + "/local/share/check_mk/web/htdocs"
local_locale_path = defaults.omd_root + "/local/share/check_mk/locale"
if local_module_path not in sys.path:
sys.path[0:0] = [ local_module_path, defaults.web_dir + "/htdocs" ]
# Early initialization upon first start of the application by the server
def initialize():
init_sys_path()
modules.init_modules()
# Run the global application initialization code here. It is called
# only once during the startup of the application server.
initialize()
|
lawzou/shoop
|
refs/heads/master
|
shoop_tests/core/test_categories.py
|
6
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import pytest
from shoop.core.models import Category, CategoryVisibility, CategoryStatus, get_person_contact, AnonymousContact
from shoop.testing.factories import DEFAULT_NAME
from shoop_tests.utils.fixtures import regular_user
@pytest.mark.django_db
@pytest.mark.usefixtures("regular_user")
def test_category_visibility(admin_user, regular_user):
visible_public_category = Category.objects.create(status=CategoryStatus.VISIBLE, visibility=CategoryVisibility.VISIBLE_TO_ALL, identifier="visible_public", name=DEFAULT_NAME)
hidden_public_category = Category.objects.create(status=CategoryStatus.INVISIBLE, visibility=CategoryVisibility.VISIBLE_TO_ALL, identifier="hidden_public", name=DEFAULT_NAME)
deleted_public_category = Category.objects.create(status=CategoryStatus.DELETED, visibility=CategoryVisibility.VISIBLE_TO_ALL, identifier="deleted_public", name=DEFAULT_NAME)
logged_in_category = Category.objects.create(status=CategoryStatus.VISIBLE, visibility=CategoryVisibility.VISIBLE_TO_LOGGED_IN, identifier="visible_logged_in", name=DEFAULT_NAME)
group_visible_category = Category.objects.create(status=CategoryStatus.VISIBLE, visibility=CategoryVisibility.VISIBLE_TO_GROUPS, identifier="visible_groups", name=DEFAULT_NAME)
assert visible_public_category.name == DEFAULT_NAME
assert str(visible_public_category) == DEFAULT_NAME
anon_contact = AnonymousContact()
regular_contact = get_person_contact(regular_user)
admin_contact = get_person_contact(admin_user)
for (customer, category, expect) in [
(anon_contact, visible_public_category, True),
(anon_contact, hidden_public_category, False),
(anon_contact, deleted_public_category, False),
(anon_contact, logged_in_category, False),
(anon_contact, group_visible_category, False),
(regular_contact, visible_public_category, True),
(regular_contact, hidden_public_category, False),
(regular_contact, deleted_public_category, False),
(regular_contact, logged_in_category, True),
(regular_contact, group_visible_category, False),
(admin_contact, visible_public_category, True),
(admin_contact, hidden_public_category, True),
(admin_contact, deleted_public_category, False),
(admin_contact, logged_in_category, True),
(admin_contact, group_visible_category, True),
]:
result = Category.objects.all_visible(customer=customer).filter(pk=category.pk).exists()
assert result == expect, "Queryset visibility of %s for %s as expected" % (category.identifier, customer)
assert category.is_visible(customer) == expect, "Direct visibility of %s for %s as expected" % (category.identifier, customer)
assert not Category.objects.all_except_deleted().filter(pk=deleted_public_category.pk).exists(), "Deleted category does not show up in 'all_except_deleted'"
|
cernops/nova
|
refs/heads/master
|
nova/scheduler/__init__.py
|
116
|
# Copyright (c) 2010 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`nova.scheduler` -- Scheduler Nodes
=====================================================
.. automodule:: nova.scheduler
:platform: Unix
:synopsis: Module that picks a compute node to run a VM instance.
"""
|
GenericStudent/home-assistant
|
refs/heads/dev
|
tests/components/switch/test_init.py
|
14
|
"""The tests for the Switch component."""
import pytest
from homeassistant import core
from homeassistant.components import switch
from homeassistant.const import CONF_PLATFORM
from homeassistant.setup import async_setup_component
from tests.components.switch import common
@pytest.fixture(autouse=True)
def entities(hass):
"""Initialize the test switch."""
platform = getattr(hass.components, "test.switch")
platform.init()
yield platform.ENTITIES
async def test_methods(hass, entities):
"""Test is_on, turn_on, turn_off methods."""
switch_1, switch_2, switch_3 = entities
assert await async_setup_component(
hass, switch.DOMAIN, {switch.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
assert switch.is_on(hass, switch_1.entity_id)
assert not switch.is_on(hass, switch_2.entity_id)
assert not switch.is_on(hass, switch_3.entity_id)
await common.async_turn_off(hass, switch_1.entity_id)
await common.async_turn_on(hass, switch_2.entity_id)
assert not switch.is_on(hass, switch_1.entity_id)
assert switch.is_on(hass, switch_2.entity_id)
# Turn all off
await common.async_turn_off(hass)
assert not switch.is_on(hass, switch_1.entity_id)
assert not switch.is_on(hass, switch_2.entity_id)
assert not switch.is_on(hass, switch_3.entity_id)
# Turn all on
await common.async_turn_on(hass)
assert switch.is_on(hass, switch_1.entity_id)
assert switch.is_on(hass, switch_2.entity_id)
assert switch.is_on(hass, switch_3.entity_id)
async def test_switch_context(hass, entities, hass_admin_user):
"""Test that switch context works."""
assert await async_setup_component(hass, "switch", {"switch": {"platform": "test"}})
await hass.async_block_till_done()
state = hass.states.get("switch.ac")
assert state is not None
await hass.services.async_call(
"switch",
"toggle",
{"entity_id": state.entity_id},
True,
core.Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("switch.ac")
assert state2 is not None
assert state.state != state2.state
assert state2.context.user_id == hass_admin_user.id
def test_deprecated_base_class(caplog):
"""Test deprecated base class."""
class CustomSwitch(switch.SwitchDevice):
pass
CustomSwitch()
assert "SwitchDevice is deprecated, modify CustomSwitch" in caplog.text
|
YuMatsuzawa/HadoopEclipseProject
|
refs/heads/master
|
hadoop-0.20.2-cdh3u5/src/contrib/hod/hodlib/ServiceProxy/serviceProxy.py
|
182
|
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
"""HOD Service Proxy Implementation"""
# -*- python -*-
import sys, time, signal, httplib, socket, threading
import sha, base64, hmac
import xml.dom.minidom
from hodlib.Common.socketServers import hodHTTPServer
from hodlib.Common.hodsvc import hodBaseService
from hodlib.Common.threads import loop
from hodlib.Common.tcp import tcpSocket
from hodlib.Common.util import get_exception_string
from hodlib.Common.AllocationManagerUtil import *
class svcpxy(hodBaseService):
def __init__(self, config):
hodBaseService.__init__(self, 'serviceProxy', config['service_proxy'],
xrtype='twisted')
self.amcfg=config['allocation_manager']
def _xr_method_isProjectUserValid(self, userid, project, ignoreErrors = False, timeOut = 15):
return self.isProjectUserValid(userid, project, ignoreErrors, timeOut)
def isProjectUserValid(self, userid, project, ignoreErrors, timeOut):
"""Method thats called upon by
the hodshell to verify if the
specified (user, project) combination
is valid"""
self.logs['main'].info("Begin isProjectUserValid()")
am = AllocationManagerUtil.getAllocationManager(self.amcfg['id'],
self.amcfg,
self.logs['main'])
self.logs['main'].info("End isProjectUserValid()")
return am.getQuote(userid, project)
|
kirbyfan64/shedskin
|
refs/heads/master
|
examples/com/github/tarsa/tarsalzp/Options.py
|
6
|
#
# Copyright (c) 2012, Piotr Tarsa
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of the author nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from prelude.Long import Long
__author__ = 'Piotr Tarsa'
class Options(object):
def __init__(self, lzpLowContextLength, lzpLowMaskSize,
lzpHighContextLength, lzpHighMaskSize, literalCoderOrder,
literalCoderInit, literalCoderStep, literalCoderLimit):
self.lzpLowContextLength = lzpLowContextLength
self.lzpLowMaskSize = lzpLowMaskSize
self.lzpHighContextLength = lzpHighContextLength
self.lzpHighMaskSize = lzpHighMaskSize
self.literalCoderOrder = literalCoderOrder
self.literalCoderInit = literalCoderInit
self.literalCoderStep = literalCoderStep
self.literalCoderLimit = literalCoderLimit
def isValid(self):
return (self.lzpLowContextLength > self.literalCoderOrder)\
& (self.lzpLowContextLength <= self.lzpHighContextLength)\
& (self.lzpHighContextLength <= 8)\
& (self.lzpLowMaskSize >= 15)\
& (self.lzpLowMaskSize <= 30)\
& (self.lzpHighMaskSize >= 15)\
& (self.lzpHighMaskSize <= 30)\
& (self.literalCoderOrder >= 1)\
& (self.literalCoderOrder <= 2)\
& (self.literalCoderInit >= 1)\
& (self.literalCoderInit <= 127)\
& (self.literalCoderStep >= 1)\
& (self.literalCoderStep <= 127)\
& (self.literalCoderLimit >= self.literalCoderInit * 256)\
& (self.literalCoderLimit <= 32767 - self.literalCoderStep)
def toPacked(self):
a = (self.lzpLowContextLength << 8) + self.lzpLowMaskSize
b = (self.lzpHighContextLength << 8) + self.lzpHighMaskSize
c = ((self.literalCoderOrder - 1) << 15) + (self.literalCoderInit << 8)\
+ self.literalCoderStep
d = self.literalCoderLimit
return Long(a, b, c, d)
@staticmethod
def fromPacked(packed):
a = packed.a
b = packed.b
c = packed.c
d = packed.d
options = Options((a & 0xff00) >> 8, a & 0xff, (b & 0xff00) >> 8,
b & 0xff, ((c & 0x8000) >> 15) + 1, (c & 0x7f00) >> 8, c & 0xff, d)
return options if options.isValid() else None
@staticmethod
def getDefault():
lzpLowContextLength = 4
lzpLowMaskSize = 24
lzpHighContextLength = 8
lzpHighMaskSize = 27
literalCoderOrder = 2
literalCoderInit = 1
literalCoderStep = 60
literalCoderLimit = 30000
return Options(lzpLowContextLength, lzpLowMaskSize,
lzpHighContextLength, lzpHighMaskSize, literalCoderOrder,
literalCoderInit, literalCoderStep, literalCoderLimit)
|
shyamalschandra/picochess
|
refs/heads/master
|
libs/spur/results.py
|
2
|
import locale
def result(return_code, allow_error, output, stderr_output):
result = ExecutionResult(return_code, output, stderr_output)
if return_code == 0 or allow_error:
return result
else:
raise result.to_error()
class ExecutionResult(object):
def __init__(self, return_code, output, stderr_output):
self.return_code = return_code
self.output = output
self.stderr_output = stderr_output
def to_error(self):
return RunProcessError(
self.return_code,
self.output,
self.stderr_output
)
class RunProcessError(RuntimeError):
def __init__(self, return_code, output, stderr_output):
message = "return code: {0}\noutput: {1}\nstderr output: {2}".format(
return_code, _bytes_repr(output), _bytes_repr(stderr_output))
super(type(self), self).__init__(message)
self.return_code = return_code
self.output = output
self.stderr_output = stderr_output
def _bytes_repr(raw_bytes):
result = repr(raw_bytes)
if result.startswith("b"):
return result
else:
return "b" + result
|
sreichholf/python-coherence
|
refs/heads/develop
|
coherence/upnp/devices/internet_gateway_device_client.py
|
5
|
# -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2010 Frank Scholz <dev@coherence-project.org>
from coherence.upnp.devices.wan_device_client import WANDeviceClient
from coherence import log
import coherence.extern.louie as louie
class InternetGatewayDeviceClient(log.Loggable):
logCategory = 'igd_client'
def __init__(self, device):
self.device = device
self.device_type = self.device.get_friendly_device_type()
self.version = int(self.device.get_device_type_version())
self.icons = device.icons
self.wan_device = None
self.detection_completed = False
louie.connect(self.embedded_device_notified, signal='Coherence.UPnP.EmbeddedDeviceClient.detection_completed', sender=self.device)
try:
wan_device = self.device.get_embedded_device_by_type('WANDevice')[0]
self.wan_device = WANDeviceClient(wan_device)
except:
self.warning("Embedded WANDevice device not available, device not implemented properly according to the UPnP specification")
raise
self.info("InternetGatewayDevice %s" % (self.device.get_friendly_name()))
def remove(self):
self.info("removal of InternetGatewayDeviceClient started")
if self.wan_device != None:
self.wan_device.remove()
def embedded_device_notified(self, device):
self.info("EmbeddedDevice %r sent notification" % device);
if self.detection_completed == True:
return
self.detection_completed = True
louie.send('Coherence.UPnP.DeviceClient.detection_completed', None,
client=self,udn=self.device.udn)
|
colinligertwood/odoo
|
refs/heads/master
|
addons/account/edi/__init__.py
|
450
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
StephenKing/ryu
|
refs/heads/master
|
ryu/tests/unit/ofproto/test_parser_v12.py
|
23
|
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import unittest
import logging
import six
import socket
from struct import *
from nose.tools import *
from ryu.ofproto.ofproto_v1_2_parser import *
from ryu.ofproto import ofproto_v1_2_parser
from ryu.ofproto import ofproto_v1_2
from ryu.ofproto import ofproto_protocol
from ryu.ofproto import ether
from ryu.ofproto.ofproto_parser import MsgBase
from ryu import utils
from ryu.lib import addrconv
from ryu.lib import pack_utils
LOG = logging.getLogger('test_ofproto_v12')
_Datapath = ofproto_protocol.ProtocolDesc(version=ofproto_v1_2.OFP_VERSION)
class TestRegisterParser(unittest.TestCase):
""" Test case for ofproto_v1_2_parser._register_parser
"""
class _OFPDummy(MsgBase):
def __init__(self, datapath):
self.dummy = 'dummy'
def parser(self):
return self.dummy
def test_cls_msg_type(self):
msg_type = 0xff
cls = self._OFPDummy(_Datapath)
cls.cls_msg_type = msg_type
res = ofproto_v1_2_parser._register_parser(cls)
res_parser = ofproto_v1_2_parser._MSG_PARSERS[msg_type]
del ofproto_v1_2_parser._MSG_PARSERS[msg_type]
eq_(res.cls_msg_type, msg_type)
ok_(res.dummy)
eq_(res_parser(), 'dummy')
@raises(AssertionError)
def test_cls_msg_type_none(self):
cls = OFPHello(_Datapath)
cls.cls_msg_type = None
ofproto_v1_2_parser._register_parser(cls)
@raises(AssertionError)
def test_cls_msg_type_already_registed(self):
cls = OFPHello(_Datapath)
ofproto_v1_2_parser._register_parser(cls)
class TestMsgParser(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.msg_parser
"""
def _test_msg_parser(self, xid, msg_len):
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_HELLO
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
c = msg_parser(_Datapath, version, msg_type, msg_len, xid, buf)
eq_(version, c.version)
eq_(msg_type, c.msg_type)
eq_(msg_len, c.msg_len)
eq_(xid, c.xid)
# buf
fmt = ofproto.OFP_HEADER_PACK_STR
res = struct.unpack(fmt, c.buf)
eq_(version, res[0])
eq_(msg_type, res[1])
eq_(msg_len, res[2])
eq_(xid, res[3])
def test_parser_mid(self):
xid = 2147483648
msg_len = 8
self._test_msg_parser(xid, msg_len)
def test_parser_max(self):
xid = 4294967295
msg_len = 65535
self._test_msg_parser(xid, msg_len)
def test_parser_min(self):
xid = 0
msg_len = 0
self._test_msg_parser(xid, msg_len)
class TestOFPHello(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPHello
"""
def _test_parser(self, xid):
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_HELLO
msg_len = ofproto.OFP_HEADER_SIZE
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
res = OFPHello.parser(object, version, msg_type, msg_len, xid,
bytearray(buf))
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
eq_(six.binary_type(buf), six.binary_type(res.buf))
def test_parser_xid_min(self):
xid = 0
self._test_parser(xid)
def test_parser_xid_mid(self):
xid = 2183948390
self._test_parser(xid)
def test_parser_xid_max(self):
xid = 4294967295
self._test_parser(xid)
def test_serialize(self):
c = OFPHello(_Datapath)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_HELLO, c.msg_type)
eq_(0, c.xid)
class TestOFPErrorMsg(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPErrorMsg
"""
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_ERROR
msg_len = ofproto.OFP_ERROR_MSG_SIZE
xid = 2495926989
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
def test_init(self):
c = OFPErrorMsg(_Datapath)
eq_(c.code, None)
eq_(c.type, None)
eq_(c.data, None)
def _test_parser(self, type_, code, data=None):
# OFP_ERROR_MSG_PACK_STR = '!HH'
fmt = ofproto.OFP_ERROR_MSG_PACK_STR
buf = self.buf + pack(fmt, type_, code)
if data is not None:
buf += data
res = OFPErrorMsg.parser(object, self.version, self.msg_type,
self.msg_len, self.xid, buf)
eq_(res.version, self.version)
eq_(res.msg_type, self.msg_type)
eq_(res.msg_len, self.msg_len)
eq_(res.xid, self.xid)
eq_(res.type, type_)
eq_(res.code, code)
if data is not None:
eq_(res.data, data)
def test_parser_mid(self):
type_ = 32768
code = 32768
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_max(self):
type_ = 65534
code = 65535
data = b'Error Message.'.ljust(65523)
self._test_parser(type_, code, data)
def test_parser_min(self):
type_ = 0
code = 0
data = None
self._test_parser(type_, code, data)
def test_parser_p0_1(self):
type_ = ofproto.OFPET_HELLO_FAILED
code = ofproto.OFPHFC_EPERM
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_0(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_BAD_VERSION
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_1(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_BAD_TYPE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_2(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_BAD_STAT
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_3(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_BAD_EXPERIMENTER
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_4(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_BAD_EXP_TYPE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_5(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_EPERM
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_6(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_BAD_LEN
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_7(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_BUFFER_EMPTY
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_8(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_BUFFER_UNKNOWN
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_9(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_BAD_TABLE_ID
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_10(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_IS_SLAVE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_11(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_BAD_PORT
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p1_12(self):
type_ = ofproto.OFPET_BAD_REQUEST
code = ofproto.OFPBRC_BAD_PACKET
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_0(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_TYPE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_1(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_LEN
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_2(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_EXPERIMENTER
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_3(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_EXP_TYPE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_4(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_OUT_PORT
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_5(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_ARGUMENT
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_6(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_EPERM
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_7(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_TOO_MANY
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_8(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_QUEUE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_9(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_OUT_GROUP
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_10(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_MATCH_INCONSISTENT
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_11(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_UNSUPPORTED_ORDER
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_12(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_TAG
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_13(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_SET_TYPE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_14(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_SET_LEN
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p2_15(self):
type_ = ofproto.OFPET_BAD_ACTION
code = ofproto.OFPBAC_BAD_SET_ARGUMENT
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p3_0(self):
type_ = ofproto.OFPET_BAD_INSTRUCTION
code = ofproto.OFPBIC_UNKNOWN_INST
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p3_1(self):
type_ = ofproto.OFPET_BAD_INSTRUCTION
code = ofproto.OFPBIC_UNSUP_INST
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p3_2(self):
type_ = ofproto.OFPET_BAD_INSTRUCTION
code = ofproto.OFPBIC_BAD_TABLE_ID
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p3_3(self):
type_ = ofproto.OFPET_BAD_INSTRUCTION
code = ofproto.OFPBIC_UNSUP_METADATA
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p3_4(self):
type_ = ofproto.OFPET_BAD_INSTRUCTION
code = ofproto.OFPBIC_UNSUP_METADATA_MASK
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p3_5(self):
type_ = ofproto.OFPET_BAD_INSTRUCTION
code = ofproto.OFPBIC_BAD_EXPERIMENTER
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p3_6(self):
type_ = ofproto.OFPET_BAD_INSTRUCTION
code = ofproto.OFPBIC_BAD_EXP_TYPE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p3_7(self):
type_ = ofproto.OFPET_BAD_INSTRUCTION
code = ofproto.OFPBIC_BAD_LEN
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p3_8(self):
type_ = ofproto.OFPET_BAD_INSTRUCTION
code = ofproto.OFPBIC_EPERM
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_0(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_BAD_TYPE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_1(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_BAD_LEN
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_2(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_BAD_TAG
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_3(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_BAD_DL_ADDR_MASK
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_4(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_BAD_NW_ADDR_MASK
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_5(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_BAD_WILDCARDS
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_6(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_BAD_FIELD
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_7(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_BAD_VALUE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_8(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_BAD_MASK
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_9(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_BAD_PREREQ
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_10(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_DUP_FIELD
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p4_11(self):
type_ = ofproto.OFPET_BAD_MATCH
code = ofproto.OFPBMC_EPERM
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p5_0(self):
type_ = ofproto.OFPET_FLOW_MOD_FAILED
code = ofproto.OFPFMFC_UNKNOWN
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p5_1(self):
type_ = ofproto.OFPET_FLOW_MOD_FAILED
code = ofproto.OFPFMFC_TABLE_FULL
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p5_2(self):
type_ = ofproto.OFPET_FLOW_MOD_FAILED
code = ofproto.OFPFMFC_BAD_TABLE_ID
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p5_3(self):
type_ = ofproto.OFPET_FLOW_MOD_FAILED
code = ofproto.OFPFMFC_OVERLAP
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p5_4(self):
type_ = ofproto.OFPET_FLOW_MOD_FAILED
code = ofproto.OFPFMFC_EPERM
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p5_5(self):
type_ = ofproto.OFPET_FLOW_MOD_FAILED
code = ofproto.OFPFMFC_BAD_TIMEOUT
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p5_6(self):
type_ = ofproto.OFPET_FLOW_MOD_FAILED
code = ofproto.OFPFMFC_BAD_COMMAND
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p5_7(self):
type_ = ofproto.OFPET_FLOW_MOD_FAILED
code = ofproto.OFPFMFC_BAD_FLAGS
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_0(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_GROUP_EXISTS
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_1(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_INVALID_GROUP
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_2(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_WEIGHT_UNSUPPORTED
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_3(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_OUT_OF_GROUPS
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_4(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_OUT_OF_BUCKETS
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_5(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_CHAINING_UNSUPPORTED
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_6(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_WATCH_UNSUPPORTED
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_7(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_LOOP
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_8(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_UNKNOWN_GROUP
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_9(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_CHAINED_GROUP
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_10(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_BAD_TYPE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_11(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_BAD_COMMAND
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_12(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_BAD_BUCKET
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_13(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_BAD_WATCH
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p6_14(self):
type_ = ofproto.OFPET_GROUP_MOD_FAILED
code = ofproto.OFPGMFC_EPERM
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p7_0(self):
type_ = ofproto.OFPET_PORT_MOD_FAILED
code = ofproto.OFPPMFC_BAD_PORT
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p7_1(self):
type_ = ofproto.OFPET_PORT_MOD_FAILED
code = ofproto.OFPPMFC_BAD_HW_ADDR
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p7_2(self):
type_ = ofproto.OFPET_PORT_MOD_FAILED
code = ofproto.OFPPMFC_BAD_CONFIG
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p7_3(self):
type_ = ofproto.OFPET_PORT_MOD_FAILED
code = ofproto.OFPPMFC_BAD_ADVERTISE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p7_4(self):
type_ = ofproto.OFPET_PORT_MOD_FAILED
code = ofproto.OFPPMFC_EPERM
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p8_0(self):
type_ = ofproto.OFPET_TABLE_MOD_FAILED
code = ofproto.OFPTMFC_BAD_TABLE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p8_1(self):
type_ = ofproto.OFPET_TABLE_MOD_FAILED
code = ofproto.OFPTMFC_BAD_CONFIG
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p8_2(self):
type_ = ofproto.OFPET_TABLE_MOD_FAILED
code = ofproto.OFPTMFC_EPERM
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p9_0(self):
type_ = ofproto.OFPET_QUEUE_OP_FAILED
code = ofproto.OFPQOFC_BAD_PORT
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p9_1(self):
type_ = ofproto.OFPET_QUEUE_OP_FAILED
code = ofproto.OFPQOFC_BAD_QUEUE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p9_2(self):
type_ = ofproto.OFPET_QUEUE_OP_FAILED
code = ofproto.OFPQOFC_EPERM
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p10_0(self):
type_ = ofproto.OFPET_SWITCH_CONFIG_FAILED
code = ofproto.OFPSCFC_BAD_FLAGS
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p10_1(self):
type_ = ofproto.OFPET_SWITCH_CONFIG_FAILED
code = ofproto.OFPSCFC_BAD_LEN
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p10_2(self):
type_ = ofproto.OFPET_SWITCH_CONFIG_FAILED
code = ofproto.OFPSCFC_EPERM
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p11_0(self):
type_ = ofproto.OFPET_ROLE_REQUEST_FAILED
code = ofproto.OFPRRFC_STALE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p11_1(self):
type_ = ofproto.OFPET_ROLE_REQUEST_FAILED
code = ofproto.OFPRRFC_UNSUP
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_p11_2(self):
type_ = ofproto.OFPET_ROLE_REQUEST_FAILED
code = ofproto.OFPRRFC_BAD_ROLE
data = b'Error Message.'
self._test_parser(type_, code, data)
def test_parser_experimenter(self):
type_ = 0xffff
exp_type = 1
experimenter = 1
data = b'Error Experimenter Message.'
# OFP_ERROR_EXPERIMENTER_MSG_PACK_STR = '!HHI'
fmt = ofproto.OFP_ERROR_EXPERIMENTER_MSG_PACK_STR
buf = self.buf + pack(fmt, type_, exp_type, experimenter) \
+ data
res = OFPErrorMsg.parser(object, self.version, self.msg_type,
self.msg_len, self.xid, buf)
eq_(res.version, self.version)
eq_(res.msg_type, self.msg_type)
eq_(res.msg_len, self.msg_len)
eq_(res.xid, self.xid)
eq_(res.type, type_)
eq_(res.exp_type, exp_type)
eq_(res.experimenter, experimenter)
eq_(res.data, data)
def _test_serialize(self, type_, code, data):
# OFP_ERROR_MSG_PACK_STR = '!HH'
fmt = ofproto.OFP_ERROR_MSG_PACK_STR
buf = self.buf + pack(fmt, type_, code) + data
# initialization
c = OFPErrorMsg(_Datapath)
c.type = type_
c.code = code
c.data = data
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_ERROR, c.msg_type)
eq_(0, c.xid)
eq_(len(buf), c.msg_len)
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_ERROR_MSG_PACK_STR.replace('!', '') \
+ str(len(c.data)) + 's'
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_ERROR)
eq_(res[2], len(buf))
eq_(res[3], 0)
eq_(res[4], type_)
eq_(res[5], code)
eq_(res[6], data)
def test_serialize_mid(self):
type_ = 32768
code = 32768
data = b'Error Message.'
self._test_serialize(type_, code, data)
def test_serialize_max(self):
type_ = 65535
code = 65535
data = b'Error Message.'.ljust(65523)
self._test_serialize(type_, code, data)
def test_serialize_min_except_data(self):
type_ = ofproto.OFPET_HELLO_FAILED
code = ofproto.OFPHFC_INCOMPATIBLE
data = b'Error Message.'
self._test_serialize(type_, code, data)
@raises(AssertionError)
def test_serialize_check_data(self):
c = OFPErrorMsg(_Datapath)
c.serialize()
def _test_serialize_p(self, type_, code):
self._test_serialize(type_, code, b'Error Message.')
def test_serialize_p0_1(self):
self._test_serialize_p(ofproto.OFPET_HELLO_FAILED,
ofproto.OFPHFC_EPERM)
def test_serialize_p1_0(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_BAD_VERSION)
def test_serialize_p1_1(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_BAD_TYPE)
def test_serialize_p1_2(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_BAD_STAT)
def test_serialize_p1_3(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_BAD_EXPERIMENTER)
def test_serialize_p1_4(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_BAD_EXP_TYPE)
def test_serialize_p1_5(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_EPERM)
def test_serialize_p1_6(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_BAD_LEN)
def test_serialize_p1_7(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_BUFFER_EMPTY)
def test_serialize_p1_8(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_BUFFER_UNKNOWN)
def test_serialize_p1_9(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_BAD_TABLE_ID)
def test_serialize_p1_10(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_IS_SLAVE)
def test_serialize_p1_11(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_BAD_PORT)
def test_serialize_p1_12(self):
self._test_serialize_p(ofproto.OFPET_BAD_REQUEST,
ofproto.OFPBRC_BAD_PACKET)
def test_serialize_p2_0(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_TYPE)
def test_serialize_p2_1(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_LEN)
def test_serialize_p2_2(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_EXPERIMENTER)
def test_serialize_p2_3(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_EXP_TYPE)
def test_serialize_p2_4(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_OUT_PORT)
def test_serialize_p2_5(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_ARGUMENT)
def test_serialize_p2_6(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_EPERM)
def test_serialize_p2_7(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_TOO_MANY)
def test_serialize_p2_8(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_QUEUE)
def test_serialize_p2_9(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_OUT_GROUP)
def test_serialize_p2_10(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_MATCH_INCONSISTENT)
def test_serialize_p2_11(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_UNSUPPORTED_ORDER)
def test_serialize_p2_12(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_TAG)
def test_serialize_p2_13(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_SET_TYPE)
def test_serialize_p2_14(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_SET_LEN)
def test_serialize_p2_15(self):
self._test_serialize_p(ofproto.OFPET_BAD_ACTION,
ofproto.OFPBAC_BAD_SET_ARGUMENT)
def test_serialize_p3_0(self):
self._test_serialize_p(ofproto.OFPET_BAD_INSTRUCTION,
ofproto.OFPBIC_UNKNOWN_INST)
def test_serialize_p3_1(self):
self._test_serialize_p(ofproto.OFPET_BAD_INSTRUCTION,
ofproto.OFPBIC_UNSUP_INST)
def test_serialize_p3_2(self):
self._test_serialize_p(ofproto.OFPET_BAD_INSTRUCTION,
ofproto.OFPBIC_BAD_TABLE_ID)
def test_serialize_p3_3(self):
self._test_serialize_p(ofproto.OFPET_BAD_INSTRUCTION,
ofproto.OFPBIC_UNSUP_METADATA)
def test_serialize_p3_4(self):
self._test_serialize_p(ofproto.OFPET_BAD_INSTRUCTION,
ofproto.OFPBIC_UNSUP_METADATA_MASK)
def test_serialize_p3_5(self):
self._test_serialize_p(ofproto.OFPET_BAD_INSTRUCTION,
ofproto.OFPBIC_BAD_EXPERIMENTER)
def test_serialize_p3_6(self):
self._test_serialize_p(ofproto.OFPET_BAD_INSTRUCTION,
ofproto.OFPBIC_BAD_EXP_TYPE)
def test_serialize_p3_7(self):
self._test_serialize_p(ofproto.OFPET_BAD_INSTRUCTION,
ofproto.OFPBIC_BAD_LEN)
def test_serialize_p3_8(self):
self._test_serialize_p(ofproto.OFPET_BAD_INSTRUCTION,
ofproto.OFPBIC_EPERM)
def test_serialize_p4_0(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_BAD_TYPE)
def test_serialize_p4_1(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_BAD_LEN)
def test_serialize_p4_2(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_BAD_TAG)
def test_serialize_p4_3(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_BAD_DL_ADDR_MASK)
def test_serialize_p4_4(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_BAD_NW_ADDR_MASK)
def test_serialize_p4_5(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_BAD_WILDCARDS)
def test_serialize_p4_6(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_BAD_FIELD)
def test_serialize_p4_7(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_BAD_VALUE)
def test_serialize_p4_8(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_BAD_MASK)
def test_serialize_p4_9(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_BAD_PREREQ)
def test_serialize_p4_10(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_DUP_FIELD)
def test_serialize_p4_11(self):
self._test_serialize_p(ofproto.OFPET_BAD_MATCH,
ofproto.OFPBMC_EPERM)
def test_serialize_p5_0(self):
self._test_serialize_p(ofproto.OFPET_FLOW_MOD_FAILED,
ofproto.OFPFMFC_UNKNOWN)
def test_serialize_p5_1(self):
self._test_serialize_p(ofproto.OFPET_FLOW_MOD_FAILED,
ofproto.OFPFMFC_TABLE_FULL)
def test_serialize_p5_2(self):
self._test_serialize_p(ofproto.OFPET_FLOW_MOD_FAILED,
ofproto.OFPFMFC_BAD_TABLE_ID)
def test_serialize_p5_3(self):
self._test_serialize_p(ofproto.OFPET_FLOW_MOD_FAILED,
ofproto.OFPFMFC_OVERLAP)
def test_serialize_p5_4(self):
self._test_serialize_p(ofproto.OFPET_FLOW_MOD_FAILED,
ofproto.OFPFMFC_EPERM)
def test_serialize_p5_5(self):
self._test_serialize_p(ofproto.OFPET_FLOW_MOD_FAILED,
ofproto.OFPFMFC_BAD_TIMEOUT)
def test_serialize_p5_6(self):
self._test_serialize_p(ofproto.OFPET_FLOW_MOD_FAILED,
ofproto.OFPFMFC_BAD_COMMAND)
def test_serialize_p5_7(self):
self._test_serialize_p(ofproto.OFPET_FLOW_MOD_FAILED,
ofproto.OFPFMFC_BAD_FLAGS)
def test_serialize_p6_0(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_GROUP_EXISTS)
def test_serialize_p6_1(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_INVALID_GROUP)
def test_serialize_p6_2(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_WEIGHT_UNSUPPORTED)
def test_serialize_p6_3(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_OUT_OF_GROUPS)
def test_serialize_p6_4(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_OUT_OF_BUCKETS)
def test_serialize_p6_5(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_CHAINING_UNSUPPORTED)
def test_serialize_p6_6(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_WATCH_UNSUPPORTED)
def test_serialize_p6_7(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_LOOP)
def test_serialize_p6_8(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_UNKNOWN_GROUP)
def test_serialize_p6_9(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_CHAINED_GROUP)
def test_serialize_p6_10(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_BAD_TYPE)
def test_serialize_p6_11(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_BAD_COMMAND)
def test_serialize_p6_12(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_BAD_BUCKET)
def test_serialize_p6_13(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_BAD_WATCH)
def test_serialize_p6_14(self):
self._test_serialize_p(ofproto.OFPET_GROUP_MOD_FAILED,
ofproto.OFPGMFC_EPERM)
def test_serialize_p7_0(self):
self._test_serialize_p(ofproto.OFPET_PORT_MOD_FAILED,
ofproto.OFPPMFC_BAD_PORT)
def test_serialize_p7_1(self):
self._test_serialize_p(ofproto.OFPET_PORT_MOD_FAILED,
ofproto.OFPPMFC_BAD_HW_ADDR)
def test_serialize_p7_2(self):
self._test_serialize_p(ofproto.OFPET_PORT_MOD_FAILED,
ofproto.OFPPMFC_BAD_CONFIG)
def test_serialize_p7_3(self):
self._test_serialize_p(ofproto.OFPET_PORT_MOD_FAILED,
ofproto.OFPPMFC_BAD_ADVERTISE)
def test_serialize_p7_4(self):
self._test_serialize_p(ofproto.OFPET_PORT_MOD_FAILED,
ofproto.OFPPMFC_EPERM)
def test_serialize_p8_0(self):
self._test_serialize_p(ofproto.OFPET_TABLE_MOD_FAILED,
ofproto.OFPTMFC_BAD_TABLE)
def test_serialize_p8_1(self):
self._test_serialize_p(ofproto.OFPET_TABLE_MOD_FAILED,
ofproto.OFPTMFC_BAD_CONFIG)
def test_serialize_p8_2(self):
self._test_serialize_p(ofproto.OFPET_TABLE_MOD_FAILED,
ofproto.OFPTMFC_EPERM)
def test_serialize_p9_0(self):
self._test_serialize_p(ofproto.OFPET_QUEUE_OP_FAILED,
ofproto.OFPQOFC_BAD_PORT)
def test_serialize_p9_1(self):
self._test_serialize_p(ofproto.OFPET_QUEUE_OP_FAILED,
ofproto.OFPQOFC_BAD_QUEUE)
def test_serialize_p9_2(self):
self._test_serialize_p(ofproto.OFPET_QUEUE_OP_FAILED,
ofproto.OFPQOFC_EPERM)
def test_serialize_p10_0(self):
self._test_serialize_p(ofproto.OFPET_SWITCH_CONFIG_FAILED,
ofproto.OFPSCFC_BAD_FLAGS)
def test_serialize_p10_1(self):
self._test_serialize_p(ofproto.OFPET_SWITCH_CONFIG_FAILED,
ofproto.OFPSCFC_BAD_LEN)
def test_serialize_p10_2(self):
self._test_serialize_p(ofproto.OFPET_SWITCH_CONFIG_FAILED,
ofproto.OFPSCFC_EPERM)
def test_serialize_p11_0(self):
self._test_serialize_p(ofproto.OFPET_ROLE_REQUEST_FAILED,
ofproto.OFPRRFC_STALE)
def test_serialize_p11_1(self):
self._test_serialize_p(ofproto.OFPET_ROLE_REQUEST_FAILED,
ofproto.OFPRRFC_UNSUP)
def test_serialize_p11_2(self):
self._test_serialize_p(ofproto.OFPET_ROLE_REQUEST_FAILED,
ofproto.OFPRRFC_BAD_ROLE)
class TestOFPErrorExperimenterMsg(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPErrorExperimenterMsg
"""
def test_init(self):
c = OFPErrorExperimenterMsg(_Datapath)
eq_(c.type, 65535)
eq_(c.exp_type, None)
eq_(c.experimenter, None)
eq_(c.data, None)
def _test_parser(self, exp_type, experimenter, data=None):
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_ERROR
msg_len = ofproto.OFP_ERROR_MSG_SIZE
xid = 2495926989
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_ERROR_EXPERIMENTER_MSG_PACK_STR = '!HHI'
type_ = 0xffff
fmt = ofproto.OFP_ERROR_EXPERIMENTER_MSG_PACK_STR
buf += pack(fmt, type_, exp_type, experimenter)
if data is not None:
buf += data
res = OFPErrorExperimenterMsg.parser(
object, version, msg_type, msg_len, xid, buf)
eq_(res.version, version)
eq_(res.msg_type, msg_type)
eq_(res.msg_len, msg_len)
eq_(res.xid, xid)
eq_(res.type, type_)
eq_(res.exp_type, exp_type)
eq_(res.experimenter, experimenter)
if data is not None:
eq_(res.data, data)
def test_parser_mid(self):
exp_type = 32768
experimenter = 2147483648
data = b'Error Experimenter Message.'
self._test_parser(exp_type, experimenter, data)
def test_parser_max(self):
exp_type = 65535
experimenter = 4294967295
data = b'Error Experimenter Message.'.ljust(65519)
self._test_parser(exp_type, experimenter, data)
def test_parser_min(self):
exp_type = 0
experimenter = 0
self._test_parser(exp_type, experimenter)
class TestOFPEchoRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPEchoRequest
"""
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_ECHO_REQUEST
msg_len = ofproto.OFP_HEADER_SIZE
xid = 2495926989
def test_init(self):
c = OFPEchoRequest(_Datapath)
eq_(c.data, None)
def _test_parser(self, data=None):
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, self.version, self.msg_type,
self.msg_len, self.xid)
if data is not None:
buf += data
res = OFPEchoRequest.parser(object, self.version, self.msg_type,
self.msg_len, self.xid, buf)
eq_(res.version, self.version)
eq_(res.msg_type, self.msg_type)
eq_(res.msg_len, self.msg_len)
eq_(res.xid, self.xid)
if data is not None:
eq_(res.data, data)
def test_parser_mid(self):
data = b'Request Message.'
self._test_parser(data)
def test_parser_max(self):
data = b'Request Message.'.ljust(65527)
self._test_parser(data)
def test_parser_min(self):
data = None
self._test_parser(data)
def _test_serialize(self, data):
c = OFPEchoRequest(_Datapath)
c.data = data
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_ECHO_REQUEST, c.msg_type)
eq_(0, c.xid)
fmt = ofproto.OFP_HEADER_PACK_STR
if data is not None:
fmt += str(len(c.data)) + 's'
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_ECHO_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
if data is not None:
eq_(res[4], data)
def test_serialize_mid(self):
data = b'Request Message.'
self._test_serialize(data)
def test_serialize_max(self):
data = b'Request Message.'.ljust(65527)
self._test_serialize(data)
def test_serialize_min(self):
data = None
self._test_serialize(data)
class TestOFPEchoReply(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPEchoReply
"""
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_ECHO_REPLY
msg_len = ofproto.OFP_HEADER_SIZE
xid = 2495926989
def test_init(self):
c = OFPEchoReply(_Datapath)
eq_(c.data, None)
def _test_parser(self, data):
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, self.version, self.msg_type,
self.msg_len, self.xid)
if data is not None:
buf += data
res = OFPEchoReply.parser(object, self.version, self.msg_type,
self.msg_len, self.xid, buf)
eq_(res.version, self.version)
eq_(res.msg_type, self.msg_type)
eq_(res.msg_len, self.msg_len)
eq_(res.xid, self.xid)
if data is not None:
eq_(res.data, data)
def test_parser_mid(self):
data = b'Reply Message.'
self._test_parser(data)
def test_parser_max(self):
data = b'Reply Message.'.ljust(65527)
self._test_parser(data)
def test_parser_min(self):
data = None
self._test_parser(data)
def _test_serialize(self, data):
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, self.version, self.msg_type,
self.msg_len, self.xid) + data
c = OFPEchoReply(_Datapath)
c.data = data
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_ECHO_REPLY, c.msg_type)
eq_(0, c.xid)
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ str(len(c.data)) + 's'
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_ECHO_REPLY)
eq_(res[2], len(buf))
eq_(res[3], 0)
eq_(res[4], data)
def test_serialize_mid(self):
data = b'Reply Message.'
self._test_serialize(data)
def test_serialize_max(self):
data = b'Reply Message.'.ljust(65527)
self._test_serialize(data)
@raises(AssertionError)
def test_serialize_check_data(self):
c = OFPEchoReply(_Datapath)
c.serialize()
class TestOFPExperimenter(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPExperimenter
"""
c = OFPExperimenter(_Datapath)
def _test_parser(self, xid, experimenter, exp_type):
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_EXPERIMENTER
msg_len = ofproto.OFP_EXPERIMENTER_HEADER_SIZE
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_EXPERIMENTER_HEADER_PACK_STR
# '!II'...experimenter, exp_type
fmt = ofproto.OFP_EXPERIMENTER_HEADER_PACK_STR
buf += pack(fmt, experimenter, exp_type)
res = OFPExperimenter.parser(object, version, msg_type,
msg_len, xid, buf)
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
eq_(experimenter, res.experimenter)
eq_(exp_type, res.exp_type)
def test_parser_mid(self):
xid = 2495926989
experimenter = 2147483648
exp_type = 1
self._test_parser(xid, experimenter, exp_type)
def test_parser_max(self):
xid = 4294967295
experimenter = 4294967295
exp_type = 65535
self._test_parser(xid, experimenter, exp_type)
def test_parser_min(self):
xid = 0
experimenter = 0
exp_type = 0
self._test_parser(xid, experimenter, exp_type)
class TestOFPPort(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPPort
"""
def test_init(self):
# OFP_PORT_PACK_STR
# '!I4x6s2x16sIIIIIIII'... port_no, pad(4), hw_addr, pad(2),
# name, config, state, curr, advertised,
# peer, curr_speed, max_speed
port_no = 1119692796
hw_addr = 'c0:26:53:c4:29:e2'
name = b'name'.ljust(16)
config = 2226555987
state = 1678244809
curr = 2850556459
advertised = 2025421682
supported = 2120575149
peer = 2757463021
curr_speed = 2641353507
max_speed = 1797291672
fmt = ofproto.OFP_PORT_PACK_STR
c = OFPPort(port_no, hw_addr, name, config, state, curr,
advertised, supported, peer, curr_speed, max_speed)
eq_(port_no, c.port_no)
eq_(hw_addr, c.hw_addr)
eq_(name, c.name)
eq_(config, c.config)
eq_(state, c.state)
eq_(curr, c.curr)
eq_(advertised, c.advertised)
eq_(supported, c.supported)
eq_(peer, c.peer)
eq_(curr_speed, c.curr_speed)
eq_(max_speed, c.max_speed)
def _test_parser(self, port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed):
name = b'name'.ljust(16)
fmt = ofproto.OFP_PORT_PACK_STR
buf = pack(fmt, port_no, addrconv.mac.text_to_bin(hw_addr), name,
config, state, curr,
advertised, supported, peer, curr_speed, max_speed)
res = OFPPort.parser(buf, 0)
eq_(port_no, res.port_no)
eq_(hw_addr, res.hw_addr)
eq_(name, res.name)
eq_(config, res.config)
eq_(state, res.state)
eq_(curr, res.curr)
eq_(advertised, res.advertised)
eq_(supported, res.supported)
eq_(peer, res.peer)
eq_(curr_speed, res.curr_speed)
eq_(max_speed, res.max_speed)
def test_parser_mid(self):
port_no = 1119692796
hw_addr = 'c0:26:53:c4:29:e2'
config = 2226555987
state = 1678244809
curr = 2850556459
advertised = 2025421682
supported = 2120575149
peer = 2757463021
curr_speed = 2641353507
max_speed = 1797291672
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_max(self):
port_no = ofproto.OFPP_ANY
hw_addr = 'ff:ff:ff:ff:ff:ff'
config = 4294967295
state = 4294967295
curr = 4294967295
advertised = 4294967295
supported = 4294967295
peer = 4294967295
curr_speed = 4294967295
max_speed = 4294967295
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_min(self):
port_no = 0
hw_addr = '00:00:00:00:00:00'
config = 0
state = 0
curr = 0
advertised = 0
supported = 0
peer = 0
curr_speed = 0
max_speed = 0
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p1(self):
port_no = ofproto.OFPP_MAX
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_PORT_DOWN
state = ofproto.OFPPS_LINK_DOWN
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_10MB_HD
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p2(self):
port_no = ofproto.OFPP_IN_PORT
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_RECV
state = ofproto.OFPPS_BLOCKED
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_10MB_FD
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p3(self):
port_no = ofproto.OFPP_TABLE
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_FWD
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_100MB_HD
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p4(self):
port_no = ofproto.OFPP_NORMAL
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_100MB_FD
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p5(self):
port_no = ofproto.OFPP_FLOOD
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_1GB_HD
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p6(self):
port_no = ofproto.OFPP_ALL
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_1GB_FD
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p7(self):
port_no = ofproto.OFPP_CONTROLLER
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_10GB_FD
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p8(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_40GB_FD
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p9(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_100GB_FD
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p10(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_1TB_FD
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p11(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_OTHER
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p12(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_COPPER
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p13(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_FIBER
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p14(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_AUTONEG
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p15(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_PAUSE
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p16(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = 'c0:26:53:c4:29:e2'
config = ofproto.OFPPC_NO_PACKET_IN
state = ofproto.OFPPS_LIVE
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_PAUSE_ASYM
self._test_parser(port_no, hw_addr, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
class TestOFPFeaturesRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPFeaturesRequest
"""
def test_serialize(self):
c = OFPFeaturesRequest(_Datapath)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_FEATURES_REQUEST, c.msg_type)
eq_(0, c.xid)
fmt = ofproto.OFP_HEADER_PACK_STR
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_FEATURES_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
class TestOFPSwitchFeatures(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPSwitchFeatures
"""
def _test_parser(self, xid, datapath_id, n_buffers,
n_tables, capabilities, reserved, port_cnt=0):
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_FEATURES_REPLY
msg_len = ofproto.OFP_SWITCH_FEATURES_SIZE \
+ ofproto.OFP_PORT_SIZE * port_cnt
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_SWITCH_FEATURES_PACK_STR
# '!QIB3xII'...datapath_id, n_buffers, n_tables,
# pad(3), capabilities, reserved
fmt = ofproto.OFP_SWITCH_FEATURES_PACK_STR
buf += pack(fmt, datapath_id, n_buffers, n_tables,
capabilities, reserved)
for i in range(port_cnt):
# OFP_PORT_PACK_STR
# '!I4x6s2x16sIIIIIIII'... port_no, pad(4), hw_addr, pad(2),
# name, config, state, curr, advertised,
# peer, curr_speed, max_speed
port_no = i
fmt = ofproto.OFP_PORT_PACK_STR
buf += pack(fmt, port_no, b'\x00' * 6, b'\x00' * 16, 0, 0, 0,
0, 0, 0, 0, 0)
res = OFPSwitchFeatures.parser(object, version, msg_type,
msg_len, xid, buf)
eq_(res.version, version)
eq_(res.msg_type, msg_type)
eq_(res.msg_len, msg_len)
eq_(res.xid, xid)
eq_(res.datapath_id, datapath_id)
eq_(res.n_buffers, n_buffers)
eq_(res.n_tables, n_tables)
eq_(res.capabilities, capabilities)
eq_(res._reserved, reserved)
for i in range(port_cnt):
eq_(res.ports[i].port_no, i)
def test_parser_mid(self):
xid = 2495926989
datapath_id = 1270985291017894273
n_buffers = 2148849654
n_tables = 228
capabilities = 1766843586
reserved = 2013714700
port_cnt = 1
self._test_parser(xid, datapath_id, n_buffers, n_tables,
capabilities, reserved, port_cnt)
def test_parser_max(self):
xid = 4294967295
datapath_id = 18446744073709551615
n_buffers = 4294967295
n_tables = 255
capabilities = 4294967295
reserved = 4294967295
port_cnt = 1023
self._test_parser(xid, datapath_id, n_buffers, n_tables,
capabilities, reserved, port_cnt)
def test_parser_min(self):
xid = 0
datapath_id = 0
n_buffers = 0
n_tables = 0
capabilities = 0
reserved = 0
port_cnt = 0
self._test_parser(xid, datapath_id, n_buffers, n_tables,
capabilities, reserved, port_cnt)
class TestOFPGetConfigRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPGetConfigRequest
"""
def test_serialize(self):
c = OFPGetConfigRequest(_Datapath)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_GET_CONFIG_REQUEST, c.msg_type)
eq_(0, c.xid)
fmt = ofproto.OFP_HEADER_PACK_STR
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_GET_CONFIG_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
class TestOFPGetConfigReply(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPGetConfigReply
"""
def _test_parser(self, xid, flags, miss_send_len):
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_GET_CONFIG_REPLY
msg_len = ofproto.OFP_SWITCH_CONFIG_SIZE
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_SWITCH_CONFIG_PACK_STR
# '!HH'...flags, miss_send_len
fmt = ofproto.OFP_SWITCH_CONFIG_PACK_STR
buf += pack(fmt, flags, miss_send_len)
res = OFPGetConfigReply.parser(object, version, msg_type,
msg_len, xid, buf)
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
eq_(flags, res.flags)
eq_(miss_send_len, res.miss_send_len)
def test_parser_mid(self):
xid = 3423224276
flags = 41186
miss_send_len = 13838
self._test_parser(xid, flags, miss_send_len)
def test_parser_max(self):
xid = 4294967295
flags = 65535
miss_send_len = 65535
self._test_parser(xid, flags, miss_send_len)
def test_parser_min(self):
xid = 0
flags = ofproto.OFPC_FRAG_NORMAL
miss_send_len = 0
self._test_parser(xid, flags, miss_send_len)
def test_parser_p1(self):
xid = 3423224276
flags = ofproto.OFPC_FRAG_DROP
miss_send_len = 13838
self._test_parser(xid, flags, miss_send_len)
def test_parser_p2(self):
xid = 3423224276
flags = ofproto.OFPC_FRAG_REASM
miss_send_len = 13838
self._test_parser(xid, flags, miss_send_len)
def test_parser_p3(self):
xid = 3423224276
flags = ofproto.OFPC_FRAG_MASK
miss_send_len = 13838
self._test_parser(xid, flags, miss_send_len)
def test_parser_p4(self):
xid = 3423224276
flags = ofproto.OFPC_INVALID_TTL_TO_CONTROLLER
miss_send_len = 13838
self._test_parser(xid, flags, miss_send_len)
class TestOFPSetConfig(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPSetConfig
"""
def test_init(self):
# OFP_SWITCH_CONFIG_PACK_STR
# '!HH'...flags, miss_send_len
flags = 41186
miss_send_len = 13838
c = OFPSetConfig(_Datapath, flags, miss_send_len)
eq_(flags, c.flags)
eq_(miss_send_len, c.miss_send_len)
def _test_serialize(self, flags, miss_send_len):
c = OFPSetConfig(_Datapath, flags, miss_send_len)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_SET_CONFIG, c.msg_type)
eq_(0, c.xid)
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_SWITCH_CONFIG_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_SET_CONFIG)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], flags)
eq_(res[5], miss_send_len)
def test_serialize_mid(self):
flags = 41186
miss_send_len = 13838
self._test_serialize(flags, miss_send_len)
def test_serialize_max(self):
flags = 65535
miss_send_len = 65535
self._test_serialize(flags, miss_send_len)
def test_serialize_min(self):
flags = ofproto.OFPC_FRAG_NORMAL
miss_send_len = 0
self._test_serialize(flags, miss_send_len)
def test_serialize_p1(self):
flags = ofproto.OFPC_FRAG_DROP
miss_send_len = 13838
self._test_serialize(flags, miss_send_len)
def test_serialize_p2(self):
flags = ofproto.OFPC_FRAG_REASM
miss_send_len = 13838
self._test_serialize(flags, miss_send_len)
def test_serialize_p3(self):
flags = ofproto.OFPC_FRAG_MASK
miss_send_len = 13838
self._test_serialize(flags, miss_send_len)
def test_serialize_p4(self):
flags = ofproto.OFPC_INVALID_TTL_TO_CONTROLLER
miss_send_len = 13838
self._test_serialize(flags, miss_send_len)
@raises(AssertionError)
def test_serialize_check_flags(self):
flags = None
miss_send_len = 13838
c = OFPSetConfig(_Datapath, flags, miss_send_len)
c.serialize()
@raises(AssertionError)
def test_serialize_check_miss_send_len(self):
flags = 41186
miss_send_len = None
c = OFPSetConfig(_Datapath, flags, miss_send_len)
c.serialize()
class TestOFPPacketIn(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPPacketIn
"""
def _test_parser(self, xid, buffer_id, total_len=0,
reason=0, table_id=0, data=None):
if data is None:
data = b''
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_PACKET_IN
msg_len = ofproto.OFP_PACKET_IN_SIZE + len(data)
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_PACKET_IN_PACK_STR
fmt = ofproto.OFP_PACKET_IN_PACK_STR
buf += pack(fmt, buffer_id, total_len, reason, table_id)
# match
buf_match = bytearray()
match = OFPMatch()
match.serialize(buf_match, 0)
buf += six.binary_type(buf_match)
# data
buf += b'\x00' * 2
buf += data
res = OFPPacketIn.parser(object, version, msg_type, msg_len,
xid, buf)
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
eq_(buffer_id, res.buffer_id)
eq_(total_len, res.total_len)
eq_(reason, res.reason)
eq_(table_id, res.table_id)
ok_(hasattr(res, 'match'))
eq_(ofproto.OFPMT_OXM, res.match.type)
if data:
eq_(data[:total_len], res.data)
def test_data_is_total_len(self):
xid = 3423224276
buffer_id = 2926809324
reason = 128
table_id = 3
data = b'PacketIn'
total_len = len(data)
self._test_parser(xid, buffer_id, total_len, reason, table_id, data)
def test_data_is_not_total_len(self):
xid = 3423224276
buffer_id = 2926809324
reason = 128
table_id = 3
data = b'PacketIn'
total_len = len(data) - 1
self._test_parser(xid, buffer_id, total_len, reason, table_id, data)
def test_parser_max(self):
# 65535(!H max) - 24(without data) = 65511
xid = 4294967295
buffer_id = 4294967295
reason = 255
table_id = 255
data = b'data'.ljust(65511)
total_len = len(data)
self._test_parser(xid, buffer_id, total_len, reason, table_id, data)
def test_parser_min(self):
xid = 0
buffer_id = 0
reason = ofproto.OFPR_NO_MATCH
table_id = 0
total_len = 0
self._test_parser(xid, buffer_id, total_len, reason, table_id)
def test_parser_p1(self):
data = b'data'.ljust(8)
xid = 3423224276
buffer_id = 2926809324
total_len = len(data)
reason = ofproto.OFPR_ACTION
table_id = 3
self._test_parser(xid, buffer_id, total_len, reason, table_id, data)
def test_parser_p2(self):
data = b'data'.ljust(8)
xid = 3423224276
buffer_id = 2926809324
total_len = len(data)
reason = ofproto.OFPR_INVALID_TTL
table_id = 3
self._test_parser(xid, buffer_id, total_len, reason, table_id, data)
class TestOFPFlowRemoved(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPFlowRemoved
"""
def _test_parser(self, xid, cookie, priority,
reason, table_id, duration_sec,
duration_nsec, idle_timeout, hard_timeout,
packet_count, byte_count):
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_FLOW_REMOVED
msg_len = ofproto.OFP_FLOW_REMOVED_SIZE
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_FLOW_REMOVED_PACK_STR0
# '!QHBBIIHHQQ' ...cookie, priority, reason, table_id,
# duration_sec, duration_nsec, idle_timeout,
# hard_timeout, packet_count, byte_count
fmt = ofproto.OFP_FLOW_REMOVED_PACK_STR0
buf += pack(fmt, cookie, priority, reason, table_id,
duration_sec, duration_nsec, idle_timeout,
hard_timeout, packet_count, byte_count)
# OFP_MATCH_PACK_STR
match = OFPMatch()
buf_match = bytearray()
match.serialize(buf_match, 0)
buf += six.binary_type(buf_match)
res = OFPFlowRemoved.parser(object, version, msg_type,
msg_len, xid, buf)
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
eq_(cookie, res.cookie)
eq_(priority, res.priority)
eq_(reason, res.reason)
eq_(table_id, res.table_id)
eq_(duration_sec, res.duration_sec)
eq_(duration_nsec, res.duration_nsec)
eq_(idle_timeout, res.idle_timeout)
eq_(hard_timeout, res.hard_timeout)
eq_(packet_count, res.packet_count)
eq_(byte_count, res.byte_count)
ok_(hasattr(res, 'match'))
eq_(ofproto.OFPMT_OXM, res.match.type)
def test_parser_mid(self):
xid = 3423224276
cookie = 178378173441633860
priority = 718
reason = 128
table_id = 169
duration_sec = 2250548154
duration_nsec = 2492776995
idle_timeout = 60284
hard_timeout = 60285
packet_count = 6489108735192644493
byte_count = 7334344481123449724
self._test_parser(xid, cookie, priority,
reason, table_id, duration_sec,
duration_nsec, idle_timeout, hard_timeout,
packet_count, byte_count)
def test_parser_max(self):
xid = 4294967295
cookie = 18446744073709551615
priority = 65535
reason = 255
table_id = 255
duration_sec = 4294967295
duration_nsec = 4294967295
idle_timeout = 65535
hard_timeout = 65535
packet_count = 18446744073709551615
byte_count = 18446744073709551615
self._test_parser(xid, cookie, priority,
reason, table_id, duration_sec,
duration_nsec, idle_timeout, hard_timeout,
packet_count, byte_count)
def test_parser_min(self):
xid = 0
cookie = 0
priority = 0
reason = ofproto.OFPRR_IDLE_TIMEOUT
table_id = 0
duration_sec = 0
duration_nsec = 0
idle_timeout = 0
hard_timeout = 0
packet_count = 0
byte_count = 0
self._test_parser(xid, cookie, priority,
reason, table_id, duration_sec,
duration_nsec, idle_timeout, hard_timeout,
packet_count, byte_count)
def test_parser_p1(self):
xid = 3423224276
cookie = 178378173441633860
priority = 718
reason = ofproto.OFPRR_HARD_TIMEOUT
table_id = 169
duration_sec = 2250548154
duration_nsec = 2492776995
idle_timeout = 60284
hard_timeout = 60285
packet_count = 6489108735192644493
byte_count = 7334344481123449724
self._test_parser(xid, cookie, priority,
reason, table_id, duration_sec,
duration_nsec, idle_timeout, hard_timeout,
packet_count, byte_count)
def test_parser_p2(self):
xid = 3423224276
cookie = 178378173441633860
priority = 718
reason = ofproto.OFPRR_DELETE
table_id = 169
duration_sec = 2250548154
duration_nsec = 2492776995
idle_timeout = 60284
hard_timeout = 60285
packet_count = 6489108735192644493
byte_count = 7334344481123449724
self._test_parser(xid, cookie, priority,
reason, table_id, duration_sec,
duration_nsec, idle_timeout, hard_timeout,
packet_count, byte_count)
def test_parser_p3(self):
xid = 3423224276
cookie = 178378173441633860
priority = 718
reason = ofproto.OFPRR_GROUP_DELETE
table_id = 169
duration_sec = 2250548154
duration_nsec = 2492776995
idle_timeout = 60284
hard_timeout = 60285
packet_count = 6489108735192644493
byte_count = 7334344481123449724
self._test_parser(xid, cookie, priority,
reason, table_id, duration_sec,
duration_nsec, idle_timeout, hard_timeout,
packet_count, byte_count)
class TestOFPPortStatus(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPPortStatus
"""
def _test_parser(self, xid, reason,
port_no, config, state, curr, advertised,
supported, peer, curr_speed, max_speed):
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_PORT_STATUS
msg_len = ofproto.OFP_PORT_STATUS_SIZE
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_PORT_STATUS_PACK_STR = '!B7x' + _OFP_PORT_PACK_STR
# '!B7x'...reason, pad(7)
# OFP_PORT_PACK_STR
# '!I4x6s2x16sIIIIIIII'... port_no, pad(4), hw_addr, pad(2),
# name, config, state, curr, advertised,
# peer, curr_speed, max_speed
hw_addr = '80:ff:9a:e3:72:85'
name = b'name'.ljust(16)
fmt = ofproto.OFP_PORT_STATUS_PACK_STR
buf += pack(fmt, reason, port_no, addrconv.mac.text_to_bin(hw_addr),
name, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
res = OFPPortStatus.parser(object, version, msg_type, msg_len,
xid, buf)
eq_(reason, res.reason)
eq_(port_no, res.desc.port_no)
eq_(hw_addr, res.desc.hw_addr)
eq_(name, res.desc.name)
eq_(config, res.desc.config)
eq_(state, res.desc.state)
eq_(curr, res.desc.curr)
eq_(advertised, res.desc.advertised)
eq_(supported, res.desc.supported)
eq_(peer, res.desc.peer)
eq_(curr_speed, res.desc.curr_speed)
eq_(max_speed, res.desc.max_speed)
def test_parser_mid(self):
xid = 3423224276
reason = 128
port_no = 1119692796
config = 2226555987
state = 1678244809
curr = 2850556459
advertised = 2025421682
supported = 2120575149
peer = 2757463021
curr_speed = 2641353507
max_speed = 1797291672
self._test_parser(xid, reason,
port_no, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_max(self):
xid = 4294967295
reason = 255
port_no = ofproto.OFPP_ANY
config = 4294967295
state = 4294967295
curr = 4294967295
advertised = 4294967295
supported = 4294967295
peer = 4294967295
curr_speed = 4294967295
max_speed = 4294967295
self._test_parser(xid, reason,
port_no, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_min(self):
xid = 0
reason = 0
port_no = 0
config = 0
state = 0
curr = 0
advertised = 0
supported = 0
peer = 0
curr_speed = 0
max_speed = 0
self._test_parser(xid, reason,
port_no, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p1(self):
xid = 3423224276
reason = ofproto.OFPPR_DELETE
port_no = ofproto.OFPP_MAX
config = ofproto.OFPPC_PORT_DOWN
state = ofproto.OFPPS_LINK_DOWN
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_10MB_HD
self._test_parser(xid, reason,
port_no, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
def test_parser_p2(self):
xid = 3423224276
reason = ofproto.OFPPR_MODIFY
port_no = ofproto.OFPP_MAX
config = ofproto.OFPPC_PORT_DOWN
state = ofproto.OFPPS_LINK_DOWN
curr = advertised = supported \
= peer = curr_speed = max_speed \
= ofproto.OFPPF_10MB_HD
self._test_parser(xid, reason,
port_no, config, state, curr, advertised,
supported, peer, curr_speed, max_speed)
class TestOFPPacketOut(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPPacketOut
"""
def _test_init(self, in_port):
buffer_id = 0xffffffff
data = b'Message'
out_port = 0x00002ae0
actions = [OFPActionOutput(out_port, 0)]
c = OFPPacketOut(_Datapath, buffer_id, in_port, actions, data)
eq_(buffer_id, c.buffer_id)
eq_(in_port, c.in_port)
eq_(0, c.actions_len)
eq_(data, c.data)
eq_(actions, c.actions)
def test_init(self):
in_port = 0x00040455
self._test_init(in_port)
@raises(AssertionError)
def test_init_check_in_port(self):
in_port = None
self._test_init(in_port)
def _test_serialize(self, buffer_id, in_port, action_cnt=0, data=None):
actions = []
for i in range(action_cnt):
actions.append(ofproto_v1_2_parser.OFPActionOutput(i, 0))
c = OFPPacketOut(_Datapath, buffer_id, in_port, actions, data)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_PACKET_OUT, c.msg_type)
eq_(0, c.xid)
fmt = ofproto.OFP_HEADER_PACK_STR \
+ ofproto.OFP_PACKET_OUT_PACK_STR[1:] \
+ ofproto.OFP_ACTION_OUTPUT_PACK_STR[1:] * action_cnt
if data is not None:
fmt += str(len(data)) + 's'
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_PACKET_OUT)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], buffer_id)
eq_(res[5], in_port)
eq_(res[6], ofproto.OFP_ACTION_OUTPUT_SIZE * action_cnt)
for i in range(action_cnt):
index = 7 + i * 4
eq_(res[index], ofproto.OFPAT_OUTPUT)
eq_(res[index + 1], ofproto.OFP_ACTION_OUTPUT_SIZE)
eq_(res[index + 2], i)
eq_(res[index + 3], 0)
if data:
eq_(res[-1], data)
def test_serialize_true(self):
buffer_id = 0xffffffff
in_port = 0x00040455
action_cnt = 2
data = b'Message'
self._test_serialize(buffer_id, in_port, action_cnt, data)
def test_serialize_none(self):
buffer_id = 0xffffffff
in_port = 0x00040455
self._test_serialize(buffer_id, in_port)
def test_serialize_max(self):
buffer_id = 0xffffffff
in_port = 4294967295
action_cnt = 1
data = b'Message'.ljust(65495)
self._test_serialize(buffer_id, in_port, action_cnt, data)
def test_serialize_min(self):
buffer_id = 0
in_port = 0
self._test_serialize(buffer_id, in_port)
def test_serialize_p1(self):
buffer_id = 2147483648
in_port = ofproto.OFPP_CONTROLLER
self._test_serialize(buffer_id, in_port)
@raises(AssertionError)
def test_serialize_check_buffer_id(self):
buffer_id = 2147483648
in_port = 1
action_cnt = 0
data = b'DATA'
self._test_serialize(buffer_id, in_port, action_cnt, data)
class TestOFPFlowMod(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPFlowMod
"""
def test_init(self):
# OFP_FLOW_MOD_PACK_STR0
# '!QQBBHHHIIIH2x'...cookie, cookie_mask, table_id, command,
# idle_timeout, hard_timeout, priority, buffer_id,
# out_port, out_group, flags
cookie = 2127614848199081640
cookie_mask = 2127614848199081641
table_id = 3
command = 0
idle_timeout = 62317
hard_timeout = 7365
priority = 40163
buffer_id = 4037115955
out_port = 65037
out_group = 6606
flags = 135
instructions = [OFPInstructionGotoTable(table_id)]
in_port = 1
match = OFPMatch()
match.set_in_port(in_port)
c = OFPFlowMod(_Datapath, cookie, cookie_mask, table_id, command,
idle_timeout, hard_timeout, priority, buffer_id,
out_port, out_group, flags, match, instructions)
eq_(cookie, c.cookie)
eq_(cookie_mask, c.cookie_mask)
eq_(table_id, c.table_id)
eq_(command, c.command)
eq_(idle_timeout, c.idle_timeout)
eq_(hard_timeout, c.hard_timeout)
eq_(priority, c.priority)
eq_(buffer_id, c.buffer_id)
eq_(out_port, c.out_port)
eq_(out_group, c.out_group)
eq_(flags, c.flags)
eq_(in_port, c.match._flow.in_port)
eq_(instructions[0], c.instructions[0])
def _test_serialize(self, cookie, cookie_mask, table_id,
command, idle_timeout, hard_timeout,
priority, buffer_id, out_port,
out_group, flags, inst_cnt=0):
dl_type = 0x0800
match = OFPMatch()
match.set_dl_type(dl_type)
insts = []
for i in range(inst_cnt):
insts.append(OFPInstructionGotoTable(i))
c = OFPFlowMod(_Datapath, cookie, cookie_mask, table_id, command,
idle_timeout, hard_timeout, priority, buffer_id,
out_port, out_group, flags, match, insts)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_FLOW_MOD, c.msg_type)
eq_(0, c.xid)
fmt = ofproto.OFP_HEADER_PACK_STR \
+ ofproto.OFP_FLOW_MOD_PACK_STR0[1:] \
+ 'HHHBB' \
+ MTEthType.pack_str[1:] + '6x' \
+ ofproto.OFP_INSTRUCTION_GOTO_TABLE_PACK_STR[1:] * inst_cnt
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_FLOW_MOD)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], cookie)
eq_(res[5], cookie_mask)
eq_(res[6], table_id)
eq_(res[7], command)
eq_(res[8], idle_timeout)
eq_(res[9], hard_timeout)
eq_(res[10], priority)
eq_(res[11], buffer_id)
eq_(res[12], out_port)
eq_(res[13], out_group)
eq_(res[14], flags)
# OFP_MATCH (type, length, class, [field, hashmask], n_byte, ip_proto)
eq_(res[15], ofproto.OFPMT_OXM)
eq_(res[16], 10) # OFP_MATCH_STR + MTEthType.pack_str
eq_(res[17], ofproto.OFPXMC_OPENFLOW_BASIC)
eq_(res[18] >> 1, ofproto.OFPXMT_OFB_ETH_TYPE)
eq_(res[18] & 0b0001, 0)
eq_(res[19], calcsize(MTEthType.pack_str))
eq_(res[20], dl_type)
# insts (type, length, table_id)
for i in range(inst_cnt):
index = 21 + 3 * i
eq_(res[index], ofproto.OFPIT_GOTO_TABLE)
eq_(res[index + 1], ofproto.OFP_INSTRUCTION_GOTO_TABLE_SIZE)
eq_(res[index + 2], i)
def test_serialize_mid(self):
cookie = 2127614848199081640
cookie_mask = 2127614848199081641
table_id = 3
command = 128
idle_timeout = 62317
hard_timeout = 7365
priority = 40163
buffer_id = 4037115955
out_port = 65037
out_group = 6606
flags = 135
inst_cnt = 1
self._test_serialize(cookie, cookie_mask, table_id,
command, idle_timeout, hard_timeout,
priority, buffer_id, out_port,
out_group, flags, inst_cnt)
def test_serialize_max(self):
cookie = 18446744073709551615
cookie_mask = 18446744073709551615
table_id = 255
command = 255
idle_timeout = 65535
hard_timeout = 65535
priority = 65535
buffer_id = 0xffffffff
out_port = 0xffffffff
out_group = 0xffffffff
flags = 65535
inst_cnt = 0xff
self._test_serialize(cookie, cookie_mask, table_id,
command, idle_timeout, hard_timeout,
priority, buffer_id, out_port,
out_group, flags, inst_cnt)
def test_serialize_min(self):
cookie = 0
cookie_mask = 0
table_id = 0
command = ofproto.OFPFC_ADD
idle_timeout = 0
hard_timeout = 0
priority = 0
buffer_id = 0
out_port = 0
out_group = 0
flags = 0
self._test_serialize(cookie, cookie_mask, table_id,
command, idle_timeout, hard_timeout,
priority, buffer_id, out_port,
out_group, flags)
def test_serialize_p1(self):
cookie = 2127614848199081640
cookie_mask = 2127614848199081641
table_id = 3
command = 1
idle_timeout = 62317
hard_timeout = 7365
priority = 40163
buffer_id = 4037115955
out_port = 65037
out_group = 6606
flags = 1 << 0
self._test_serialize(cookie, cookie_mask, table_id,
command, idle_timeout, hard_timeout,
priority, buffer_id, out_port,
out_group, flags)
def test_serialize_p2(self):
cookie = 2127614848199081640
cookie_mask = 2127614848199081641
table_id = 3
command = 2
idle_timeout = 62317
hard_timeout = 7365
priority = 40163
buffer_id = 4037115955
out_port = 65037
out_group = 6606
flags = 1 << 0
self._test_serialize(cookie, cookie_mask, table_id,
command, idle_timeout, hard_timeout,
priority, buffer_id, out_port,
out_group, flags)
def test_serialize_p3(self):
cookie = 2127614848199081640
cookie_mask = 2127614848199081641
table_id = 3
command = 3
idle_timeout = 62317
hard_timeout = 7365
priority = 40163
buffer_id = 4037115955
out_port = 65037
out_group = 6606
flags = 1 << 1
self._test_serialize(cookie, cookie_mask, table_id,
command, idle_timeout, hard_timeout,
priority, buffer_id, out_port,
out_group, flags)
def test_serialize_p4(self):
cookie = 2127614848199081640
cookie_mask = 2127614848199081641
table_id = 3
command = 4
idle_timeout = 62317
hard_timeout = 7365
priority = 40163
buffer_id = 4037115955
out_port = 65037
out_group = 6606
flags = 1 << 2
self._test_serialize(cookie, cookie_mask, table_id,
command, idle_timeout, hard_timeout,
priority, buffer_id, out_port,
out_group, flags)
class TestOFPInstructionGotoTable(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPInstructionGotoTable
"""
# OFP_INSTRUCTION_GOTO_TABLE_PACK_STR
# '!HHB3x'...type, len, table_id, pad(3)
type_ = ofproto.OFPIT_GOTO_TABLE
len_ = ofproto.OFP_INSTRUCTION_GOTO_TABLE_SIZE
fmt = ofproto.OFP_INSTRUCTION_GOTO_TABLE_PACK_STR
def test_init(self):
table_id = 3
c = OFPInstructionGotoTable(table_id)
eq_(self.type_, c.type)
eq_(self.len_, c.len)
eq_(table_id, c.table_id)
def _test_parser(self, table_id):
buf = pack(self.fmt, self.type_, self.len_, table_id)
res = OFPInstructionGotoTable.parser(buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
eq_(res.table_id, table_id)
def test_parser_mid(self):
self._test_parser(3)
def test_parser_max(self):
self._test_parser(255)
def test_parser_min(self):
self._test_parser(0)
def _test_serialize(self, table_id):
c = OFPInstructionGotoTable(table_id)
buf = bytearray()
c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], table_id)
def test_serialize_mid(self):
self._test_serialize(3)
def test_serialize_max(self):
self._test_serialize(255)
def test_serialize_min(self):
self._test_serialize(0)
class TestOFPInstructionWriteMetadata(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPInstructionWriteMetadata
"""
# OFP_INSTRUCTION_WRITE_METADATA_PACK_STR
# '!HH4xQQ'...type, len, pad(4), metadata, metadata_mask
type_ = ofproto.OFPIT_WRITE_METADATA
len_ = ofproto.OFP_INSTRUCTION_WRITE_METADATA_SIZE
metadata = 0x1212121212121212
metadata_mask = 0xff00ff00ff00ff00
fmt = ofproto.OFP_INSTRUCTION_WRITE_METADATA_PACK_STR
def test_init(self):
c = OFPInstructionWriteMetadata(self.metadata,
self.metadata_mask)
eq_(self.type_, c.type)
eq_(self.len_, c.len)
eq_(self.metadata, c.metadata)
eq_(self.metadata_mask, c.metadata_mask)
def _test_parser(self, metadata, metadata_mask):
buf = pack(self.fmt, self.type_, self.len_,
metadata, metadata_mask)
res = OFPInstructionWriteMetadata.parser(buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
eq_(res.metadata, metadata)
eq_(res.metadata_mask, metadata_mask)
def test_parser_metadata_mid(self):
self._test_parser(self.metadata, self.metadata_mask)
def test_parser_metadata_max(self):
metadata = 0xffffffffffffffff
self._test_parser(metadata, self.metadata_mask)
def test_parser_metadata_min(self):
metadata = 0
self._test_parser(metadata, self.metadata_mask)
def test_parser_metadata_mask_max(self):
metadata_mask = 0xffffffffffffffff
self._test_parser(self.metadata, metadata_mask)
def test_parser_metadata_mask_min(self):
metadata_mask = 0
self._test_parser(self.metadata, metadata_mask)
def _test_serialize(self, metadata, metadata_mask):
c = OFPInstructionWriteMetadata(metadata,
metadata_mask)
buf = bytearray()
c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], metadata)
eq_(res[3], metadata_mask)
def test_serialize_metadata_mid(self):
self._test_serialize(self.metadata, self.metadata_mask)
def test_serialize_metadata_max(self):
metadata = 0xffffffffffffffff
self._test_serialize(metadata, self.metadata_mask)
def test_serialize_metadata_min(self):
metadata = 0
self._test_serialize(metadata, self.metadata_mask)
def test_serialize_metadata_mask_max(self):
metadata_mask = 0xffffffffffffffff
self._test_serialize(self.metadata, metadata_mask)
def test_serialize_metadata_mask_min(self):
metadata_mask = 0
self._test_serialize(self.metadata, metadata_mask)
class TestOFPInstructionActions(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPInstructionActions
"""
# OFP_INSTRUCTION_ACTIONS_PACK_STR
# '!HH4x'...type, len, pad(4)
type_ = ofproto.OFPIT_WRITE_ACTIONS
len_ = ofproto.OFP_INSTRUCTION_ACTIONS_SIZE \
+ ofproto.OFP_ACTION_OUTPUT_SIZE
fmt = ofproto.OFP_INSTRUCTION_ACTIONS_PACK_STR
buf = pack(fmt, type_, len_)
# OFP_ACTION (OFP_ACTION_OUTPUT)
port = 0x00002ae0
max_len = ofproto.OFP_ACTION_OUTPUT_SIZE
actions = [OFPActionOutput(port, max_len)]
buf_actions = bytearray()
actions[0].serialize(buf_actions, 0)
buf += six.binary_type(buf_actions)
def test_init(self):
c = OFPInstructionActions(self.type_, self.actions)
eq_(self.type_, c.type)
eq_(self.actions, c.actions)
def _test_parser(self, action_cnt):
# OFP_INSTRUCTION_ACTIONS_PACK_STR
# '!HH4x'...type, len, pad(4)
len_ = ofproto.OFP_INSTRUCTION_ACTIONS_SIZE \
+ (ofproto.OFP_ACTION_OUTPUT_SIZE * action_cnt)
fmt = ofproto.OFP_INSTRUCTION_ACTIONS_PACK_STR
buf = pack(fmt, self.type_, len_)
actions = []
for a in range(action_cnt):
# OFP_ACTION (OFP_ACTION_OUTPUT)
port = a
action = OFPActionOutput(port, self.max_len)
actions.append(action)
buf_actions = bytearray()
actions[a].serialize(buf_actions, 0)
buf += six.binary_type(buf_actions)
res = OFPInstructionActions.parser(buf, 0)
# 8
eq_(res.len, len_)
eq_(res.type, self.type_)
# 8 + 16 * action_cnt < 65535 byte
# action_cnt <= 4095
for a in range(action_cnt):
eq_(res.actions[a].type, actions[a].type)
eq_(res.actions[a].len, actions[a].len)
eq_(res.actions[a].port, actions[a].port)
eq_(res.actions[a].max_len, actions[a].max_len)
def test_parser_mid(self):
self._test_parser(2047)
def test_parser_max(self):
self._test_parser(4095)
def test_parser_min(self):
self._test_parser(0)
def _test_serialize(self, action_cnt):
# OFP_INSTRUCTION_ACTIONS_PACK_STR
# '!HH4x'...type, len, pad(4)
len_ = ofproto.OFP_INSTRUCTION_ACTIONS_SIZE \
+ (ofproto.OFP_ACTION_OUTPUT_SIZE * action_cnt)
actions = []
for a in range(action_cnt):
# OFP_ACTION (OFP_ACTION_OUTPUT)
port = a
action = OFPActionOutput(port, self.max_len)
actions.append(action)
c = OFPInstructionActions(self.type_, actions)
buf = bytearray()
c.serialize(buf, 0)
fmt = '!' \
+ ofproto.OFP_INSTRUCTION_ACTIONS_PACK_STR.replace('!', '')
for a in range(action_cnt):
fmt += ofproto.OFP_ACTION_OUTPUT_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], len_)
for a in range(action_cnt):
d = 2 + a * 4
eq_(res[d], actions[a].type)
eq_(res[d + 1], actions[a].len)
eq_(res[d + 2], actions[a].port)
eq_(res[d + 3], actions[a].max_len)
def test_serialize_mid(self):
self._test_serialize(2047)
def test_serialize_max(self):
self._test_serialize(4095)
def test_serialize_min(self):
self._test_serialize(0)
class TestOFPActionHeader(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionHeader
"""
def test_init(self):
# OFP_ACTION_HEADER_PACK_STR
# '!HH4x'...type, len, pad(4)
type_ = ofproto.OFPAT_OUTPUT
len_ = ofproto.OFP_ACTION_HEADER_SIZE
fmt = ofproto.OFP_ACTION_HEADER_PACK_STR
buf = pack(fmt, type_, len_)
c = OFPActionHeader(type_, len_)
eq_(type_, c.type)
eq_(len_, c.len)
def _test_serialize(self, type_, len_):
# OFP_ACTION_HEADER_PACK_STR
# '!HH4x'...type, len, pad(4)
fmt = ofproto.OFP_ACTION_HEADER_PACK_STR
buf = pack(fmt, type_, len_)
c = OFPActionHeader(type_, len_)
buf = bytearray()
c.serialize(buf, 0)
fmt = ofproto.OFP_ACTION_HEADER_PACK_STR
res = struct.unpack(fmt, six.binary_type(buf))
eq_(res[0], type_)
eq_(res[1], len_)
def test_serialize_mid(self):
type_ = 11
len_ = 8
self._test_serialize(type_, len_)
def test_serialize_max(self):
type_ = 0xffff
len_ = 0xffff
self._test_serialize(type_, len_)
def test_serialize_min(self):
type_ = 0
len_ = 0
self._test_serialize(type_, len_)
class TestOFPActionOutput(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionOutput
"""
# OFP_ACTION_OUTPUT_PACK_STR
# '!HHIH6x'...type, len, port, max_len, pad(6)
type_ = ofproto.OFPAT_OUTPUT
len_ = ofproto.OFP_ACTION_OUTPUT_SIZE
def test_init(self):
port = 6606
max_len = 1500
fmt = ofproto.OFP_ACTION_OUTPUT_PACK_STR
c = OFPActionOutput(port, max_len)
eq_(port, c.port)
eq_(max_len, c.max_len)
def _test_parser(self, port, max_len):
fmt = ofproto.OFP_ACTION_OUTPUT_PACK_STR
buf = pack(fmt, self.type_, self.len_, port, max_len)
c = OFPActionOutput(port, max_len)
res = c.parser(buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
eq_(res.port, port)
eq_(res.max_len, max_len)
def test_parser_mid(self):
port = 6606
max_len = 16
self._test_parser(port, max_len)
def test_parser_max(self):
port = 4294967295
max_len = 0xffff
self._test_parser(port, max_len)
def test_parser_min(self):
port = 0
max_len = 0
self._test_parser(port, max_len)
def test_parser_p1(self):
port = 6606
max_len = 0xffe5
self._test_parser(port, max_len)
def _test_serialize(self, port, max_len):
c = OFPActionOutput(port, max_len)
buf = bytearray()
c.serialize(buf, 0)
fmt = ofproto.OFP_ACTION_OUTPUT_PACK_STR
res = struct.unpack(fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], port)
eq_(res[3], max_len)
def test_serialize_mid(self):
port = 6606
max_len = 16
self._test_serialize(port, max_len)
def test_serialize_max(self):
port = 4294967295
max_len = 0xffff
self._test_serialize(port, max_len)
def test_serialize_min(self):
port = 0
max_len = 0
self._test_serialize(port, max_len)
def test_serialize_p1(self):
port = 6606
max_len = 0xffe5
self._test_serialize(port, max_len)
class TestOFPActionGroup(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionGroup
"""
# OFP_ACTION_GROUP_PACK_STR
# '!HHI'...type, len, group_id
type_ = ofproto.OFPAT_GROUP
len_ = ofproto.OFP_ACTION_GROUP_SIZE
group_id = 6606
fmt = ofproto.OFP_ACTION_GROUP_PACK_STR
def test_init(self):
c = OFPActionGroup(self.group_id)
eq_(self.group_id, c.group_id)
def _test_parser(self, group_id):
buf = pack(self.fmt, self.type_, self.len_, group_id)
res = OFPActionGroup.parser(buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
eq_(res.group_id, group_id)
def test_parser_mid(self):
self._test_parser(self.group_id)
def test_parser_max(self):
self._test_parser(4294967295)
def test_parser_min(self):
self._test_parser(0)
def _test_serialize(self, group_id):
c = OFPActionGroup(group_id)
buf = bytearray()
c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], group_id)
def test_serialize_mid(self):
self._test_serialize(self.group_id)
def test_serialize_max(self):
self._test_serialize(4294967295)
def test_serialize_min(self):
self._test_serialize(0)
class TestOFPActionSetQueue(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionSetQueue
"""
# OFP_ACTION_SET_QUEUE_PACK_STR
# '!HHI'...type, len, queue_id
type_ = ofproto.OFPAT_SET_QUEUE
len_ = ofproto.OFP_ACTION_SET_QUEUE_SIZE
queue_id = 6606
fmt = ofproto.OFP_ACTION_SET_QUEUE_PACK_STR
def test_init(self):
c = OFPActionSetQueue(self.queue_id)
eq_(self.queue_id, c.queue_id)
def _test_parser(self, queue_id):
buf = pack(self.fmt, self.type_, self.len_, queue_id)
res = OFPActionSetQueue.parser(buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
eq_(res.queue_id, queue_id)
def test_parser_mid(self):
self._test_parser(self.queue_id)
def test_parser_max(self):
self._test_parser(4294967295)
def test_parser_min(self):
self._test_parser(0)
def _test_serialize(self, queue_id):
c = OFPActionSetQueue(queue_id)
buf = bytearray()
c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], queue_id)
def test_serialize_mid(self):
self._test_serialize(self.queue_id)
def test_serialize_max(self):
self._test_serialize(4294967295)
def test_serialize_min(self):
self._test_serialize(0)
class TestOFPActionSetMplsTtl(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionSetMplsTtl
"""
# OFP_ACTION_MPLS_TTL_PACK_STR
# '!HHB3x'...type, len, mpls_ttl, pad(3)
type_ = ofproto.OFPAT_SET_MPLS_TTL
len_ = ofproto.OFP_ACTION_MPLS_TTL_SIZE
mpls_ttl = 254
fmt = ofproto.OFP_ACTION_MPLS_TTL_PACK_STR
def test_init(self):
c = OFPActionSetMplsTtl(self.mpls_ttl)
eq_(self.mpls_ttl, c.mpls_ttl)
def _test_parser(self, mpls_ttl):
buf = pack(self.fmt, self.type_, self.len_, mpls_ttl)
res = OFPActionSetMplsTtl.parser(buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
eq_(res.mpls_ttl, mpls_ttl)
def test_parser_mid(self):
self._test_parser(self.mpls_ttl)
def test_parser_max(self):
self._test_parser(255)
def test_parser_min(self):
self._test_parser(0)
def _test_serialize(self, mpls_ttl):
c = OFPActionSetMplsTtl(mpls_ttl)
buf = bytearray()
c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], mpls_ttl)
def test_serialize_mid(self):
self._test_serialize(self.mpls_ttl)
def test_serialize_max(self):
self._test_serialize(255)
def test_serialize_min(self):
self._test_serialize(0)
class TestOFPActionDecMplsTtl(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionDecMplsTtl
"""
type_ = ofproto.OFPAT_DEC_MPLS_TTL
len_ = ofproto.OFP_ACTION_MPLS_TTL_SIZE
fmt = ofproto.OFP_ACTION_HEADER_PACK_STR
buf = pack(fmt, type_, len_)
c = OFPActionDecMplsTtl()
def test_parser(self):
res = self.c.parser(self.buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
class TestOFPActionSetNwTtl(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionSetNwTtl
"""
# OFP_ACTION_NW_TTL_PACK_STR
# '!HHB3x'...type, len, nw_ttl, pad(3)
type_ = ofproto.OFPAT_SET_NW_TTL
len_ = ofproto.OFP_ACTION_NW_TTL_SIZE
nw_ttl = 240
fmt = ofproto.OFP_ACTION_NW_TTL_PACK_STR
def test_init(self):
c = OFPActionSetNwTtl(self.nw_ttl)
eq_(self.nw_ttl, c.nw_ttl)
def _test_parser(self, nw_ttl):
buf = pack(self.fmt, self.type_, self.len_, nw_ttl)
res = OFPActionSetNwTtl.parser(buf, 0)
eq_(res.type, self.type_)
eq_(res.len, self.len_)
eq_(res.nw_ttl, nw_ttl)
def test_parser_mid(self):
self._test_parser(self.nw_ttl)
def test_parser_max(self):
self._test_parser(255)
def test_parser_min(self):
self._test_parser(0)
def _test_serialize(self, nw_ttl):
c = OFPActionSetNwTtl(nw_ttl)
buf = bytearray()
c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], nw_ttl)
def test_serialize_mid(self):
self._test_serialize(self.nw_ttl)
def test_serialize_max(self):
self._test_serialize(255)
def test_serialize_min(self):
self._test_serialize(0)
class TestOFPActionDecNwTtl(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionDecNwTtl
"""
type_ = ofproto.OFPAT_DEC_NW_TTL
len_ = ofproto.OFP_ACTION_NW_TTL_SIZE
fmt = ofproto.OFP_ACTION_HEADER_PACK_STR
buf = pack(fmt, type_, len_)
c = OFPActionDecNwTtl()
def test_parser(self):
res = self.c.parser(self.buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
class TestOFPActionCopyTtlOut(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionCopyTtlOut
"""
type_ = ofproto.OFPAT_COPY_TTL_OUT
len_ = ofproto.OFP_ACTION_HEADER_SIZE
fmt = ofproto.OFP_ACTION_HEADER_PACK_STR
buf = pack(fmt, type_, len_)
c = OFPActionCopyTtlOut()
def test_parser(self):
res = self.c.parser(self.buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
class TestOFPActionCopyTtlIn(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionCopyTtlIn
"""
# OFP_ACTION_HEADER_PACK_STR
# '!HH'...type, len
type_ = ofproto.OFPAT_COPY_TTL_IN
len_ = ofproto.OFP_ACTION_HEADER_SIZE
fmt = ofproto.OFP_ACTION_HEADER_PACK_STR
buf = pack(fmt, type_, len_)
c = OFPActionCopyTtlIn()
def test_parser(self):
res = self.c.parser(self.buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
class TestOFPActionPushVlan(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionPushVlan
"""
# OFP_ACTION_PUSH_PACK_STR
# '!HHH2x'...type, len, ethertype, pad(2)
type_ = ofproto.OFPAT_PUSH_VLAN
len_ = ofproto.OFP_ACTION_PUSH_SIZE
fmt = ofproto.OFP_ACTION_PUSH_PACK_STR
def test_init(self):
ethertype = 0x8100
c = OFPActionPushVlan(ethertype)
eq_(ethertype, c.ethertype)
def _test_parser(self, ethertype):
buf = pack(self.fmt, self.type_, self.len_, ethertype)
res = OFPActionPushVlan.parser(buf, 0)
eq_(res.type, self.type_)
eq_(res.len, self.len_)
eq_(res.ethertype, ethertype)
def test_parser_mid(self):
self._test_parser(0x8100)
def test_parser_max(self):
self._test_parser(0xffff)
def test_parser_min(self):
self._test_parser(0)
def _test_serialize(self, ethertype):
c = OFPActionPushVlan(ethertype)
buf = bytearray()
c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], ethertype)
def test_serialize_mid(self):
self._test_serialize(0x8100)
def test_serialize_max(self):
self._test_serialize(0xffff)
def test_serialize_min(self):
self._test_serialize(0)
class TestOFPActionPushMpls(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionPushMpls
"""
# OFP_ACTION_PUSH_PACK_STR
# '!HHH2x'...type, len, ethertype, pad(2)
type_ = ofproto.OFPAT_PUSH_MPLS
len_ = ofproto.OFP_ACTION_PUSH_SIZE
fmt = ofproto.OFP_ACTION_PUSH_PACK_STR
def test_init(self):
ethertype = 0x8100
c = OFPActionPushMpls(ethertype)
eq_(ethertype, c.ethertype)
def _test_parser(self, ethertype):
buf = pack(self.fmt, self.type_, self.len_, ethertype)
res = OFPActionPushMpls.parser(buf, 0)
eq_(res.type, self.type_)
eq_(res.len, self.len_)
eq_(res.ethertype, ethertype)
def test_parser_mid(self):
self._test_parser(0x8100)
def test_parser_max(self):
self._test_parser(0xffff)
def test_parser_min(self):
self._test_parser(0)
def _test_serialize(self, ethertype):
c = OFPActionPushMpls(ethertype)
buf = bytearray()
c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], ethertype)
def test_serialize_mid(self):
self._test_serialize(0x8100)
def test_serialize_max(self):
self._test_serialize(0xffff)
def test_serialize_min(self):
self._test_serialize(0)
class TestOFPActionPopVlan(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionPopVlan
"""
# OFP_ACTION_HEADER_PACK_STR
# '!HH'...type, len
type_ = ofproto.OFPAT_POP_VLAN
len_ = ofproto.OFP_ACTION_HEADER_SIZE
fmt = ofproto.OFP_ACTION_HEADER_PACK_STR
buf = pack(fmt, type_, len_)
c = OFPActionPopVlan()
def test_parser(self):
res = self.c.parser(self.buf, 0)
eq_(self.type_, res.type)
eq_(self.len_, res.len)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
class TestOFPActionPopMpls(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionPopMpls
"""
# OFP_ACTION_POP_MPLS_PACK_STR
# '!HHH2x'...type, len, ethertype, pad(2)
type_ = ofproto.OFPAT_POP_MPLS
len_ = ofproto.OFP_ACTION_POP_MPLS_SIZE
fmt = ofproto.OFP_ACTION_POP_MPLS_PACK_STR
def test_init(self):
ethertype = 0x8100
c = OFPActionPopMpls(ethertype)
eq_(ethertype, c.ethertype)
def _test_parser(self, ethertype):
buf = pack(self.fmt, self.type_, self.len_, ethertype)
res = OFPActionPopMpls.parser(buf, 0)
eq_(res.type, self.type_)
eq_(res.len, self.len_)
eq_(res.ethertype, ethertype)
def test_parser_mid(self):
self._test_parser(0x8100)
def test_parser_max(self):
self._test_parser(0xffff)
def test_parser_min(self):
self._test_parser(0)
def _test_serialize(self, ethertype):
c = OFPActionPopMpls(ethertype)
buf = bytearray()
c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], ethertype)
def test_serialize_mid(self):
self._test_serialize(0x8100)
def test_serialize_max(self):
self._test_serialize(0xffff)
def test_serialize_min(self):
self._test_serialize(0)
class TestOFPActionSetField(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionSetField
"""
type_ = ofproto.OFPAT_SET_FIELD
header = ofproto.OXM_OF_IN_PORT
in_port = 6606
field = MTInPort(header, in_port)
length = ofproto.OFP_ACTION_SET_FIELD_SIZE + field.oxm_len()
len_ = utils.round_up(length, 8)
fmt = '!HHII4x'
buf = pack(fmt, type_, len_, header, in_port)
c = OFPActionSetField(field)
def test_init(self):
eq_(self.field, self.c.field)
def test_parser(self):
res = self.c.parser(self.buf, 0)
eq_(res.type, self.type_)
eq_(res.len, self.len_)
eq_(res.field.header, self.header)
eq_(res.field.value, self.in_port)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], self.header)
eq_(res[3], self.in_port)
class TestOFPActionExperimenter(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPActionExperimenter
"""
# OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR v1.2
# '!HHI'...type, len, experimenter
type_ = ofproto.OFPAT_EXPERIMENTER
len_ = ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE
fmt = ofproto.OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR
def test_init(self):
experimenter = 4294967295
c = OFPActionExperimenter(experimenter)
eq_(experimenter, c.experimenter)
def _test_parser(self, experimenter):
buf = pack(self.fmt, self.type_, self.len_, experimenter)
res = OFPActionExperimenter.parser(buf, 0)
eq_(res.type, self.type_)
eq_(res.len, self.len_)
eq_(res.experimenter, experimenter)
def test_parser_mid(self):
experimenter = 2147483648
self._test_parser(experimenter)
def test_parser_max(self):
experimenter = 4294967295
self._test_parser(experimenter)
def test_parser_min(self):
experimenter = 0
self._test_parser(experimenter)
def _test_serialize(self, experimenter):
c = OFPActionExperimenter(experimenter)
buf = bytearray()
c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], experimenter)
def test_serialize_mid(self):
experimenter = 2147483648
self._test_serialize(experimenter)
def test_serialize_max(self):
experimenter = 4294967295
self._test_serialize(experimenter)
def test_serialize_min(self):
experimenter = 0
self._test_serialize(experimenter)
class TestOFPBucket(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPBucket
"""
def test_init(self):
# OFP_BUCKET_PACK_STR
# '!HHII4x'...len, weight, watch_port, watch_group, pad(4)
weight = 4386
watch_port = 6606
watch_group = 3
# OFP_ACTION (OFP_ACTION_OUTPUT)
port = 3
max_len = 1500
actions = [OFPActionOutput(port, max_len)]
c = OFPBucket(weight, watch_port, watch_group, actions)
eq_(weight, c.weight)
eq_(watch_port, c.watch_port)
eq_(watch_group, c.watch_group)
eq_(1, len(c.actions))
eq_(port, c.actions[0].port)
eq_(max_len, c.actions[0].max_len)
def _test_parser(self, weight, watch_port, watch_group, action_cnt):
# OFP_BUCKET_PACK_STR
# '!HHII4x'...len, weight, watch_port, watch_group, pad(4)
len_ = ofproto.OFP_BUCKET_SIZE \
+ (ofproto.OFP_ACTION_OUTPUT_SIZE * action_cnt)
fmt = ofproto.OFP_BUCKET_PACK_STR
buf = pack(fmt, len_, weight, watch_port, watch_group)
actions = []
for a in range(action_cnt):
# OFP_ACTION (OFP_ACTION_OUTPUT)
port = a
max_len = ofproto.OFP_ACTION_OUTPUT_SIZE
action = OFPActionOutput(port, max_len)
actions.append(action)
buf_actions = bytearray()
actions[a].serialize(buf_actions, 0)
buf += six.binary_type(buf_actions)
res = OFPBucket.parser(buf, 0)
# 16
eq_(weight, res.weight)
eq_(watch_port, res.watch_port)
eq_(watch_group, res.watch_group)
# 16 + 16 * action_cnt < 65535 byte
# action_cnt <= 4094
for a in range(action_cnt):
eq_(actions[a].type, res.actions[a].type)
eq_(actions[a].len, res.actions[a].len)
eq_(actions[a].port, res.actions[a].port)
eq_(actions[a].max_len, res.actions[a].max_len)
def test_parser_mid(self):
weight = 4386
watch_port = 6606
watch_group = 3
action_cnt = 2047
self._test_parser(weight, watch_port,
watch_group, action_cnt)
def test_parser_max(self):
weight = 65535
watch_port = 4294967295
watch_group = 4294967295
action_cnt = 4094
self._test_parser(weight, watch_port,
watch_group, action_cnt)
def test_parser_min(self):
weight = 0
watch_port = 0
watch_group = 0
action_cnt = 0
self._test_parser(weight, watch_port,
watch_group, action_cnt)
def _test_serialize(self, weight, watch_port, watch_group,
action_cnt):
# OFP_BUCKET_PACK_STR
# '!HHII4x'...len, weight, watch_port, watch_group, pad(4)
len_ = ofproto.OFP_BUCKET_SIZE \
+ (ofproto.OFP_ACTION_OUTPUT_SIZE * action_cnt)
actions = []
for a in range(action_cnt):
# OFP_ACTION (OFP_ACTION_OUTPUT)
port = a
max_len = ofproto.OFP_ACTION_OUTPUT_SIZE
action = OFPActionOutput(port, max_len)
actions.append(action)
c = OFPBucket(weight, watch_port, watch_group, actions)
buf = bytearray()
c.serialize(buf, 0)
fmt = ofproto.OFP_BUCKET_PACK_STR
for a in range(action_cnt):
fmt += ofproto.OFP_ACTION_OUTPUT_PACK_STR[1:]
res = struct.unpack(fmt, six.binary_type(buf))
eq_(res[0], len_)
eq_(res[1], weight)
eq_(res[2], watch_port)
eq_(res[3], watch_group)
for a in range(action_cnt):
d = 4 + a * 4
eq_(res[d], actions[a].type)
eq_(res[d + 1], actions[a].len)
eq_(res[d + 2], actions[a].port)
eq_(res[d + 3], actions[a].max_len)
def test_serialize_mid(self):
weight = 4386
watch_port = 6606
watch_group = 3
action_cnt = 2047
self._test_serialize(weight, watch_port,
watch_group, action_cnt)
def test_serialize_max(self):
weight = 65535
watch_port = 4294967295
watch_group = 4294967295
action_cnt = 4094
self._test_serialize(weight, watch_port,
watch_group, action_cnt)
def test_serialize_min(self):
weight = 0
watch_port = 0
watch_group = 0
action_cnt = 0
self._test_serialize(weight, watch_port,
watch_group, action_cnt)
class TestOFPGroupMod(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPGroupMod
"""
def test_init(self):
# OFP_GROUP_MOD_PACK_STR
# '!HBBI'...command, type, pad, group_id
command = ofproto.OFPFC_ADD
type_ = ofproto.OFPGT_SELECT
group_id = 6606
# OFP_BUCKET
weight = 4386
watch_port = 8006
watch_group = 3
# OFP_ACTION (OFP_ACTION_OUTPUT)
port = 10
max_len = 2000
actions = [OFPActionOutput(port, max_len)]
buckets = [OFPBucket(weight, watch_port, watch_group, actions)]
c = OFPGroupMod(_Datapath, command, type_, group_id, buckets)
eq_(command, c.command)
eq_(type_, c.type)
eq_(group_id, c.group_id)
eq_(1, len(c.buckets))
eq_(1, len(c.buckets[0].actions))
eq_(port, c.buckets[0].actions[0].port)
eq_(max_len, c.buckets[0].actions[0].max_len)
def _test_serialize(self, command, type_, group_id, bucket_cnt):
len_ = ofproto.OFP_BUCKET_SIZE \
+ ofproto.OFP_ACTION_OUTPUT_SIZE
buckets = []
for b in range(bucket_cnt):
# OFP_BUCKET
weight = watch_port = watch_group = port = b
actions = [OFPActionOutput(port, 0)]
bucket = OFPBucket(weight, watch_port, watch_group, actions)
buckets.append(bucket)
c = OFPGroupMod(_Datapath, command, type_, group_id, buckets)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_GROUP_MOD, c.msg_type)
eq_(0, c.xid)
eq_(len(c.buf), c.msg_len)
# 16 byte
fmt = ofproto.OFP_HEADER_PACK_STR \
+ ofproto.OFP_GROUP_MOD_PACK_STR[1:]
# 16 + (16 + 16) * bucket_cnt < 65535 byte
# bucket_cnt <= 2047
for b in range(bucket_cnt):
fmt += ofproto.OFP_BUCKET_PACK_STR[1:] \
+ ofproto.OFP_ACTION_OUTPUT_PACK_STR[1:]
res = struct.unpack(fmt, six.binary_type(c.buf))
msg_len = ofproto.OFP_GROUP_MOD_SIZE \
+ (len_ * bucket_cnt)
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_GROUP_MOD)
eq_(res[2], msg_len)
eq_(res[3], 0)
eq_(res[4], command)
eq_(res[5], type_)
eq_(res[6], group_id)
for d in range(bucket_cnt):
e = 7 + d * 8
eq_(res[e + 1], buckets[d].weight)
eq_(res[e + 2], buckets[d].watch_port)
eq_(res[e + 3], buckets[d].watch_group)
eq_(res[e + 4], buckets[d].actions[0].type)
eq_(res[e + 5], buckets[d].actions[0].len)
eq_(res[e + 6], buckets[d].actions[0].port)
eq_(res[e + 7], buckets[d].actions[0].max_len)
def test_serialize_mid(self):
command = 32768
type_ = 128
group_id = 6606
bucket_cnt = 1023
self._test_serialize(command, type_, group_id, bucket_cnt)
def test_serialize_max(self):
command = 65535
type_ = 255
group_id = 4294967295
bucket_cnt = 2047
self._test_serialize(command, type_, group_id, bucket_cnt)
def test_serialize_min(self):
command = 0
type_ = 0
group_id = 0
bucket_cnt = 0
self._test_serialize(command, type_, group_id, bucket_cnt)
def test_serialize_p1(self):
command = 1
type_ = 1
group_id = 6606
bucket_cnt = 1023
self._test_serialize(command, type_, group_id, bucket_cnt)
def test_serialize_p2(self):
command = 1
type_ = 2
group_id = 6606
bucket_cnt = 1023
self._test_serialize(command, type_, group_id, bucket_cnt)
def test_serialize_p3(self):
command = 2
type_ = 3
group_id = 6606
bucket_cnt = 1023
self._test_serialize(command, type_, group_id, bucket_cnt)
class TestOFPPortMod(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPPortMod
"""
# OFP_PORT_MOD_PACK_STR v1.2
# '!I4xs2xIII4x'...port_no, pad(4), hw_addr, pad(2),
# config, mask, advertise, pad(4)
port_no = 1119692796
hw_addr = 'e8:fe:5e:a9:68:6c'
config = 2226555987
mask = 1678244809
advertise = 2025421682
def test_init(self):
c = OFPPortMod(_Datapath, self.port_no, self.hw_addr,
self.config, self.mask, self.advertise)
eq_(self.port_no, c.port_no)
eq_(self.hw_addr, c.hw_addr)
eq_(self.config, c.config)
eq_(self.mask, c.mask)
eq_(self.advertise, c.advertise)
def _test_serialize(self, port_no, hw_addr, config, mask, advertise):
c = OFPPortMod(_Datapath, port_no, hw_addr, config,
mask, advertise)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_PORT_MOD, c.msg_type)
eq_(0, c.xid)
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_PORT_MOD_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_PORT_MOD)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], port_no)
eq_(res[5], addrconv.mac.text_to_bin(hw_addr))
eq_(res[6], config)
eq_(res[7], mask)
eq_(res[8], advertise)
def test_serialize_mid(self):
self._test_serialize(self.port_no, self.hw_addr,
self.config, self.mask, self.advertise)
def test_serialize_max(self):
port_no = ofproto.OFPP_ANY
hw_addr = 'ff:ff:ff:ff:ff:ff'
config = 0xffffffff
mask = 0xffffffff
advertise = 0xffffffff
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_min(self):
port_no = 0
hw_addr = '00:00:00:00:00:00'
config = 0
mask = 0
advertise = 0
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p1(self):
port_no = ofproto.OFPP_MAX
hw_addr = self.hw_addr
config = ofproto.OFPPC_PORT_DOWN
mask = ofproto.OFPPC_PORT_DOWN
advertise = ofproto.OFPPF_10MB_HD
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p2(self):
port_no = ofproto.OFPP_IN_PORT
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_RECV
mask = ofproto.OFPPC_NO_RECV
advertise = ofproto.OFPPF_10MB_FD
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p3(self):
port_no = ofproto.OFPP_TABLE
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_FWD
mask = ofproto.OFPPC_NO_FWD
advertise = ofproto.OFPPF_100MB_HD
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p4(self):
port_no = ofproto.OFPP_NORMAL
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_100MB_FD
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p5(self):
port_no = ofproto.OFPP_FLOOD
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_1GB_HD
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p6(self):
port_no = ofproto.OFPP_ALL
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_1GB_FD
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p7(self):
port_no = ofproto.OFPP_CONTROLLER
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_10GB_FD
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p8(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_40GB_FD
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p9(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_100GB_FD
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p10(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_1TB_FD
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p11(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_OTHER
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p12(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_COPPER
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p13(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_FIBER
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p14(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_AUTONEG
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p15(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_PAUSE
self._test_serialize(port_no, hw_addr, config, mask, advertise)
def test_serialize_p16(self):
port_no = ofproto.OFPP_LOCAL
hw_addr = self.hw_addr
config = ofproto.OFPPC_NO_PACKET_IN
mask = ofproto.OFPPC_NO_PACKET_IN
advertise = ofproto.OFPPF_PAUSE_ASYM
self._test_serialize(port_no, hw_addr, config, mask, advertise)
class TestOFPTableMod(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPTableMod
"""
# OFP_PORT_TABLE_PACK_STR v1.2
# '!B3xI'...table_id, pad(3), config
table_id = 3
config = 2226555987
def test_init(self):
c = OFPTableMod(_Datapath, self.table_id, self.config)
eq_(self.table_id, c.table_id)
eq_(self.config, c.config)
def _test_serialize(self, table_id, config):
c = OFPTableMod(_Datapath, table_id, config)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_TABLE_MOD, c.msg_type)
eq_(0, c.xid)
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_TABLE_MOD_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_TABLE_MOD)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], table_id)
eq_(res[5], config)
def test_serialize_mid(self):
self._test_serialize(self.table_id, self.config)
def test_serialize_max(self):
table_id = ofproto.OFPTT_ALL
config = 0xffffffff
self._test_serialize(table_id, config)
def test_serialize_min(self):
table_id = 0
config = 0
self._test_serialize(table_id, config)
def test_serialize_p1(self):
table_id = ofproto.OFPTT_MAX
config = ofproto.OFPTC_TABLE_MISS_CONTINUE
self._test_serialize(table_id, config)
def test_serialize_p2(self):
table_id = ofproto.OFPTT_MAX
config = ofproto.OFPTC_TABLE_MISS_DROP
self._test_serialize(table_id, config)
def test_serialize_p3(self):
table_id = ofproto.OFPTT_MAX
config = ofproto.OFPTC_TABLE_MISS_MASK
self._test_serialize(table_id, config)
class TestOFPStatsRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPStatsRequest
"""
type_ = ofproto.OFPST_DESC
c = OFPStatsRequest(_Datapath, type_)
def test_init(self):
eq_(self.type_, self.c.type)
eq_(0, self.c.flags)
def test_serialize_body(self):
len_ = ofproto.OFP_HEADER_SIZE \
+ ofproto.OFP_STATS_REQUEST_SIZE
self.c.buf = bytearray(len_)
self.c._serialize_body()
fmt = ofproto.OFP_STATS_REQUEST_PACK_STR
res = struct.unpack_from(fmt, six.binary_type(self.c.buf),
ofproto.OFP_HEADER_SIZE)
eq_(res[0], self.type_)
eq_(res[1], 0)
class TestOFPStatsReply(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPStatsReply
"""
c = OFPStatsReply(_Datapath)
def test_parser_single_struct_true(self):
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_STATS_REPLY
msg_len = ofproto.OFP_STATS_REPLY_SIZE \
+ ofproto.OFP_AGGREGATE_STATS_REPLY_SIZE
xid = 2495926989
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_STATS_REPLY_PACK_STR
# '!HH4x'...type, flags, pad(4)
type_ = ofproto.OFPST_AGGREGATE
flags = 41802
fmt = ofproto.OFP_STATS_REPLY_PACK_STR
buf += pack(fmt, type_, flags)
# OFP_AGGREGATE_STATS_REPLY_PACK_STR
packet_count = 5142202600015232219
byte_count = 2659740543924820419
flow_count = 1344694860
body = OFPAggregateStatsReply(packet_count, byte_count, flow_count)
fmt = ofproto.OFP_AGGREGATE_STATS_REPLY_PACK_STR
buf += pack(fmt, packet_count, byte_count, flow_count)
res = self.c.parser(object, version, msg_type, msg_len, xid, buf)
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
eq_(type_, res.type)
eq_(flags, res.flags)
eq_(packet_count, res.body.packet_count)
eq_(byte_count, res.body.byte_count)
eq_(flow_count, res.body.flow_count)
def test_parser_single_struct_flase(self):
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_STATS_REPLY
msg_len = ofproto.OFP_STATS_REPLY_SIZE \
+ ofproto.OFP_QUEUE_STATS_SIZE
xid = 2495926989
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_STATS_REPLY_PACK_STR
# '!HH4x'...type, flags, pad(4)
type_ = ofproto.OFPST_QUEUE
flags = 11884
fmt = ofproto.OFP_STATS_REPLY_PACK_STR
buf += pack(fmt, type_, flags)
# OFP_QUEUE_STATS_PACK_STR
port_no = 41186
queue_id = 6606
tx_bytes = 8638420181865882538
tx_packets = 2856480458895760962
tx_errors = 6283093430376743019
body = [OFPQueueStats(port_no, queue_id, tx_bytes, tx_packets,
tx_errors)]
fmt = ofproto.OFP_QUEUE_STATS_PACK_STR
buf += pack(fmt, port_no, queue_id, tx_bytes, tx_packets, tx_errors)
res = self.c.parser(object, version, msg_type, msg_len, xid, buf)
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
eq_(type_, res.type)
eq_(flags, res.flags)
eq_(port_no, res.body[0].port_no)
eq_(queue_id, res.body[0].queue_id)
eq_(tx_bytes, res.body[0].tx_bytes)
eq_(tx_packets, res.body[0].tx_packets)
eq_(tx_errors, res.body[0].tx_errors)
def test_parser_max(self):
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_STATS_REPLY
msg_len = ofproto.OFP_STATS_REPLY_SIZE
xid = 0xffffffff
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_STATS_REPLY_PACK_STR
# '!HH4x'...type, flags, pad(4)
type_ = ofproto.OFPST_QUEUE
flags = 0xffff
fmt = ofproto.OFP_STATS_REPLY_PACK_STR
buf += pack(fmt, type_, flags)
res = self.c.parser(object, version, msg_type, msg_len, xid, buf)
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
eq_(type_, res.type)
eq_(flags, res.flags)
def test_parser_min(self):
# OFP_HEADER_PACK_STR
# '!BBHI'...version, msg_type, msg_len, xid
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_STATS_REPLY
msg_len = ofproto.OFP_STATS_REPLY_SIZE
xid = 0
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_STATS_REPLY_PACK_STR
# '!HH4x'...type, flags, pad(4)
type_ = ofproto.OFPST_QUEUE
flags = 0
fmt = ofproto.OFP_STATS_REPLY_PACK_STR
buf += pack(fmt, type_, flags)
res = self.c.parser(object, version, msg_type, msg_len, xid, buf)
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
eq_(type_, res.type)
eq_(flags, res.flags)
class TestOFPDescStatsRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPDescStatsRequest
"""
def test_serialize(self):
c = OFPDescStatsRequest(_Datapath)
c.serialize()
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_STATS_REQUEST_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_STATS_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], ofproto.OFPST_DESC)
eq_(res[5], 0)
class TestOFPDescStats(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPDescStats
"""
# OFP_DESC_STATS_PACK_STR
# '!256s256s256s32s256s'...mfr_desc, hw_desc, sw_desc, serial_num, dp_desc
mfr_desc = b'mfr_desc'.ljust(256)
hw_desc = b'hw_desc'.ljust(256)
sw_desc = b'sw_desc'.ljust(256)
serial_num = b'serial_num'.ljust(32)
dp_desc = b'dp_desc'.ljust(256)
buf = mfr_desc \
+ hw_desc \
+ sw_desc \
+ serial_num \
+ dp_desc
c = OFPDescStats(mfr_desc, hw_desc, sw_desc, serial_num, dp_desc)
def test_init(self):
eq_(self.mfr_desc, self.c.mfr_desc)
eq_(self.hw_desc, self.c.hw_desc)
eq_(self.sw_desc, self.c.sw_desc)
eq_(self.serial_num, self.c.serial_num)
eq_(self.dp_desc, self.c.dp_desc)
def test_parser(self):
res = self.c.parser(self.buf, 0)
eq_(self.mfr_desc, res.mfr_desc)
eq_(self.hw_desc, res.hw_desc)
eq_(self.sw_desc, res.sw_desc)
eq_(self.serial_num, res.serial_num)
eq_(self.dp_desc, res.dp_desc)
class TestOFPFlowStatsRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPFlowStatsRequest
"""
# OFP_FLOW_STATS_REQUEST_PACK_STR
# '!B3xII4xQQ'...table_id, pad(3), out_port, out_group, pad(4),
# cookie, cookie_mask
table_id = 3
out_port = 65037
out_group = 6606
cookie = 2127614848199081640
cookie_mask = 2127614848199081641
def test_init(self):
match = OFPMatch()
in_port = 3
match.set_in_port(in_port)
c = OFPFlowStatsRequest(_Datapath, self.table_id, self.out_port,
self.out_group, self.cookie, self.cookie_mask,
match)
eq_(self.table_id, c.table_id)
eq_(self.out_port, c.out_port)
eq_(self.out_group, c.out_group)
eq_(self.cookie, c.cookie)
eq_(self.cookie_mask, c.cookie_mask)
eq_(in_port, c.match._flow.in_port)
def _test_serialize(self, table_id, out_port, out_group,
cookie, cookie_mask):
match = OFPMatch()
dl_type = 0x800
match.set_dl_type(dl_type)
c = OFPFlowStatsRequest(_Datapath, table_id, out_port,
out_group, cookie, cookie_mask, match)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_STATS_REQUEST, c.msg_type)
eq_(0, c.xid)
fmt = ofproto.OFP_HEADER_PACK_STR \
+ ofproto.OFP_STATS_REQUEST_PACK_STR[1:] \
+ ofproto.OFP_FLOW_STATS_REQUEST_PACK_STR[1:] \
+ 'HHHBB' \
+ MTEthType.pack_str[1:] + '6x'
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_STATS_REQUEST)
size = ofproto.OFP_STATS_REPLY_SIZE \
+ ofproto.OFP_FLOW_STATS_REQUEST_SIZE \
+ calcsize(MTEthType.pack_str + '6x')
eq_(res[2], size)
eq_(res[3], 0)
eq_(res[4], ofproto.OFPST_FLOW)
eq_(res[5], 0)
eq_(res[6], table_id)
eq_(res[7], out_port)
eq_(res[8], out_group)
eq_(res[9], cookie)
eq_(res[10], cookie_mask)
# match
eq_(res[11], ofproto.OFPMT_OXM)
eq_(res[12], 10)
eq_(res[13], ofproto.OFPXMC_OPENFLOW_BASIC)
eq_(res[14] >> 1, ofproto.OFPXMT_OFB_ETH_TYPE)
eq_(res[14] & 0b0001, 0)
eq_(res[15], calcsize(MTEthType.pack_str))
eq_(res[16], dl_type)
def test_serialize_mid(self):
self._test_serialize(self.table_id, self.out_port, self.out_group,
self.cookie, self.cookie_mask)
def test_serialize_max(self):
table_id = 0xff
out_port = 0xffff
out_group = 0xffff
cookie = 0xffffffff
cookie_mask = 0xffffffff
self._test_serialize(table_id, out_port, out_group,
cookie, cookie_mask)
def test_serialize_min(self):
table_id = 0
out_port = 0
out_group = 0
cookie = 0
cookie_mask = 0
self._test_serialize(table_id, out_port, out_group,
cookie, cookie_mask)
def test_serialize_p1(self):
table_id = ofproto.OFPTT_MAX
self._test_serialize(table_id, self.out_port, self.out_group,
self.cookie, self.cookie_mask)
class TestOFPFlowStats(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPFlowStats
"""
def test_init(self):
length = ofproto.OFP_FLOW_STATS_SIZE
table_id = 81
duration_sec = 2484712402
duration_nsec = 3999715196
priority = 57792
idle_timeout = 36368
hard_timeout = 54425
cookie = 793171083674290912
packet_count = 5142202600015232219
byte_count = 2659740543924820419
match = OFPMatch()
in_port = 2
match.set_in_port(in_port)
goto_table = 3
instructions = [OFPInstructionGotoTable(goto_table)]
c = OFPFlowStats(table_id, duration_sec, duration_nsec,
priority, idle_timeout, hard_timeout, cookie,
packet_count, byte_count, match, instructions)
eq_(table_id, c.table_id)
eq_(duration_sec, c.duration_sec)
eq_(duration_nsec, c.duration_nsec)
eq_(priority, c.priority)
eq_(idle_timeout, c.idle_timeout)
eq_(hard_timeout, c.hard_timeout)
eq_(cookie, c.cookie)
eq_(packet_count, c.packet_count)
eq_(byte_count, c.byte_count)
eq_(in_port, c.match._flow.in_port)
eq_(goto_table, c.instructions[0].table_id)
def _test_parser(self, table_id, duration_sec, duration_nsec,
priority, idle_timeout, hard_timeout, cookie,
packet_count, byte_count, inst_cnt=0):
length = ofproto.OFP_FLOW_STATS_SIZE \
+ calcsize(MTEthType.pack_str[1:] + '6x') \
+ ofproto.OFP_INSTRUCTION_GOTO_TABLE_SIZE * inst_cnt
# OFP_FLOW_STATS_PACK_STR
buf = pack(ofproto.OFP_FLOW_STATS_PACK_STR,
length, table_id, duration_sec, duration_nsec,
priority, idle_timeout, hard_timeout, cookie,
packet_count, byte_count)
# match
match = OFPMatch()
dl_type = 0x0800
match.set_dl_type(dl_type)
match_buf = bytearray()
match.serialize(match_buf, 0)
buf += six.binary_type(match_buf)
# instructions
# 56 + 8 + 8 * inst_cnt <= 65535
# inst_cnt <= 8183
for i in range(inst_cnt):
inst = OFPInstructionGotoTable(1)
inst_buf = bytearray()
inst.serialize(inst_buf, 0)
buf += six.binary_type(inst_buf)
# parse
res = OFPFlowStats.parser(buf, 0)
eq_(length, res.length)
eq_(table_id, res.table_id)
eq_(duration_sec, res.duration_sec)
eq_(duration_nsec, res.duration_nsec)
eq_(priority, res.priority)
eq_(idle_timeout, res.idle_timeout)
eq_(hard_timeout, res.hard_timeout)
eq_(cookie, res.cookie)
eq_(packet_count, res.packet_count)
eq_(byte_count, res.byte_count)
eq_(dl_type, res.match.fields[0].value)
for i in range(inst_cnt):
eq_(1, res.instructions[i].table_id)
def test_parser_mid(self):
table_id = 81
duration_sec = 2484712402
duration_nsec = 3999715196
priority = 57792
idle_timeout = 36368
hard_timeout = 54425
cookie = 793171083674290912
packet_count = 5142202600015232219
byte_count = 2659740543924820419
inst_cnt = 2
self._test_parser(table_id, duration_sec, duration_nsec,
priority, idle_timeout, hard_timeout, cookie,
packet_count, byte_count, inst_cnt)
def test_parser_max(self):
table_id = 0xff
duration_sec = 0xffff
duration_nsec = 0xffff
priority = 0xffff
idle_timeout = 0xff
hard_timeout = 0xff
cookie = 0xffffffffffffffff
packet_count = 0xffffffffffffffff
byte_count = 0xffffffffffffffff
inst_cnt = 8183
self._test_parser(table_id, duration_sec, duration_nsec,
priority, idle_timeout, hard_timeout, cookie,
packet_count, byte_count, inst_cnt)
def test_parser_min(self):
self._test_parser(0, 0, 0, 0, 0, 0, 0, 0, 0)
class TestOFPAggregateStatsRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPAggregateStatsRequest
"""
# OFP_AGGREGATE_STATS_REQUEST_PACK_STR
# '!B3xII4xQQ'...table_id, pad(3), out_port, out_group, pad(4),
# cookie, cookie_mask
table_id = 3
out_port = 65037
out_group = 6606
cookie = 2127614848199081640
cookie_mask = 2127614848199081641
def test_init(self):
match = OFPMatch()
dl_type = 0x800
match.set_dl_type(dl_type)
c = OFPAggregateStatsRequest(_Datapath, self.table_id,
self.out_port, self.out_group,
self.cookie, self.cookie_mask,
match)
eq_(self.table_id, c.table_id)
eq_(self.out_port, c.out_port)
eq_(self.out_group, c.out_group)
eq_(self.cookie, c.cookie)
eq_(self.cookie_mask, c.cookie_mask)
eq_(dl_type, c.match._flow.dl_type)
def _test_serialize(self, table_id, out_port, out_group,
cookie, cookie_mask):
match = OFPMatch()
dl_type = 0x800
match.set_dl_type(dl_type)
c = OFPAggregateStatsRequest(_Datapath, table_id,
out_port, out_group, cookie,
cookie_mask, match)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_STATS_REQUEST, c.msg_type)
eq_(0, c.xid)
fmt = ofproto.OFP_HEADER_PACK_STR \
+ ofproto.OFP_STATS_REQUEST_PACK_STR[1:] \
+ ofproto.OFP_AGGREGATE_STATS_REQUEST_PACK_STR[1:] \
+ 'HHHBB' \
+ MTEthType.pack_str[1:] + '6x'
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_STATS_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], ofproto.OFPST_AGGREGATE)
eq_(res[5], 0)
eq_(res[6], table_id)
eq_(res[7], out_port)
eq_(res[8], out_group)
eq_(res[9], cookie)
eq_(res[10], cookie_mask)
# match
eq_(res[11], ofproto.OFPMT_OXM)
eq_(res[12], 10)
eq_(res[13], ofproto.OFPXMC_OPENFLOW_BASIC)
eq_(res[14] >> 1, ofproto.OFPXMT_OFB_ETH_TYPE)
eq_(res[14] & 0b0001, 0)
eq_(res[15], calcsize(MTEthType.pack_str))
eq_(res[16], dl_type)
def test_serialize_mid(self):
self._test_serialize(self.table_id, self.out_port, self.out_group,
self.cookie, self.cookie_mask)
def test_serialize_max(self):
table_id = 0xff
out_port = 0xffffffff
out_group = 0xffffffff
cookie = 0xffffffff
cookie_mask = 0xffffffff
self._test_serialize(table_id, out_port, out_group,
cookie, cookie_mask)
def test_serialize_min(self):
table_id = 0
out_port = 0
out_group = 0
cookie = 0
cookie_mask = 0
self._test_serialize(table_id, out_port, out_group,
cookie, cookie_mask)
def test_serialize_p1(self):
table_id = ofproto.OFPTT_MAX
self._test_serialize(table_id, self.out_port, self.out_group,
self.cookie, self.cookie_mask)
class TestOFPAggregateStatsReply(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPAggregateStatsReply
"""
# OFP_AGGREGATE_STATS_REPLY_PACK_STR
# '!QQI4x'...packet_count, byte_count, flow_count, pad(4)
packet_count = 5142202600015232219
byte_count = 2659740543924820419
flow_count = 1344694860
def test_init(self):
c = OFPAggregateStatsReply(self.packet_count, self.byte_count,
self.flow_count)
eq_(c.packet_count, self.packet_count)
eq_(c.byte_count, self.byte_count)
eq_(c.flow_count, self.flow_count)
def _test_parser(self, packet_count, byte_count, flow_count):
fmt = ofproto.OFP_AGGREGATE_STATS_REPLY_PACK_STR
buf = pack(fmt, packet_count, byte_count, flow_count)
res = OFPAggregateStatsReply.parser(buf, 0)
eq_(packet_count, res.packet_count)
eq_(byte_count, res.byte_count)
eq_(flow_count, res.flow_count)
def test_parser_mid(self):
self._test_parser(self.packet_count, self.byte_count,
self.flow_count)
def test_parser_max(self):
packet_count = 18446744073709551615
byte_count = 18446744073709551615
flow_count = 4294967295
self._test_parser(packet_count, byte_count,
flow_count)
def test_parser_min(self):
packet_count = 0
byte_count = 0
flow_count = 0
self._test_parser(packet_count, byte_count,
flow_count)
class TestOFPTableStatsRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPTableStatsRequest
"""
def test_serialize(self):
c = OFPTableStatsRequest(_Datapath)
c.serialize()
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_STATS_REQUEST_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_STATS_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], ofproto.OFPST_TABLE)
eq_(res[5], 0)
class TestOFPTableStats(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPTableStats
"""
def test_init(self):
table_id = 91
name = 'name'
match = 1270985291017894273
wildcards = 3316608530
write_actions = 2484712402
apply_actions = 3999715196
write_setfields = 5142202600015232219
apply_setfields = 2659740543924820419
metadata_match = 2127614848199081640
metadata_write = 2127614848199081641
instructions = 1119692796
config = 2226555987
max_entries = 2506913869
active_count = 2024581150
lookup_count = 4620020561814017052
matched_count = 2825167325263435621
res = OFPTableStats(table_id, name, match, wildcards, write_actions,
apply_actions, write_setfields, apply_setfields,
metadata_match, metadata_write, instructions,
config, max_entries, active_count, lookup_count,
matched_count)
eq_(table_id, res.table_id)
eq_(name, res.name)
eq_(match, res.match)
eq_(wildcards, res.wildcards)
eq_(write_actions, res.write_actions)
eq_(apply_actions, res.apply_actions)
eq_(write_setfields, res.write_setfields)
eq_(apply_setfields, res.apply_setfields)
eq_(metadata_match, res.metadata_match)
eq_(metadata_write, res.metadata_write)
eq_(instructions, res.instructions)
eq_(config, res.config)
eq_(max_entries, res.max_entries)
eq_(active_count, res.active_count)
eq_(lookup_count, res.lookup_count)
eq_(matched_count, res.matched_count)
def _test_parser(self, table_id, name, match, wildcards, write_actions,
apply_actions, write_setfields, apply_setfields,
metadata_match, metadata_write, instructions, config,
max_entries, active_count, lookup_count, matched_count):
# OFP_TABLE_STATS_PACK_STR
# '!B7x32sQQIIQQQQIIIIQQ'
# ...table_id, name, match, wildcards, write_actions, apply_actions,
# write_setfields, apply_setfields', metadata_match, metadata_write,
# instructions, config, max_entries,
# active_count, lookup_count, matched_count
fmt = ofproto.OFP_TABLE_STATS_PACK_STR
buf = pack(fmt, table_id, name,
match, wildcards, write_actions,
apply_actions, write_setfields, apply_setfields,
metadata_match, metadata_write, instructions, config,
max_entries, active_count, lookup_count, matched_count)
res = OFPTableStats.parser(buf, 0)
eq_(table_id, res.table_id)
eq_(name, res.name.replace(b'\x00', b''))
eq_(match, res.match)
eq_(wildcards, res.wildcards)
eq_(write_actions, res.write_actions)
eq_(apply_actions, res.apply_actions)
eq_(write_setfields, res.write_setfields)
eq_(apply_setfields, res.apply_setfields)
eq_(metadata_match, res.metadata_match)
eq_(metadata_write, res.metadata_write)
eq_(instructions, res.instructions)
eq_(config, res.config)
eq_(max_entries, res.max_entries)
eq_(active_count, res.active_count)
eq_(lookup_count, res.lookup_count)
eq_(matched_count, res.matched_count)
def test_parser_mid(self):
table_id = 91
name = b'name'
match = 1270985291017894273
wildcards = 3316608530
write_actions = 2484712402
apply_actions = 3999715196
write_setfields = 5142202600015232219
apply_setfields = 2659740543924820419
metadata_match = 2127614848199081640
metadata_write = 2127614848199081641
instructions = 1119692796
config = 2226555987
max_entries = 2506913869
active_count = 2024581150
lookup_count = 4620020561814017052
matched_count = 2825167325263435621
self._test_parser(table_id, name, match, wildcards, write_actions,
apply_actions, write_setfields, apply_setfields,
metadata_match, metadata_write, instructions, config,
max_entries, active_count, lookup_count,
matched_count)
def test_parser_max(self):
# '!B7x32sQQIIQQQQIIIIQQ'
table_id = 0xff
name = b'a' * 32
match = 0xffffffffffffffff
wildcards = 0xffffffffffffffff
write_actions = 0xffffffff
apply_actions = 0xffffffff
write_setfields = 0xffffffffffffffff
apply_setfields = 0xffffffffffffffff
metadata_match = 0xffffffffffffffff
metadata_write = 0xffffffffffffffff
instructions = 0xffffffff
config = 0xffffffff
max_entries = 0xffffffff
active_count = 0xffffffff
lookup_count = 0xffffffffffffffff
matched_count = 0xffffffffffffffff
self._test_parser(table_id, name, match, wildcards, write_actions,
apply_actions, write_setfields, apply_setfields,
metadata_match, metadata_write, instructions, config,
max_entries, active_count, lookup_count,
matched_count)
def test_parser_min(self):
table_id = 0
name = b''
match = 0
wildcards = 0
write_actions = 0
apply_actions = 0
write_setfields = 0
apply_setfields = 0
metadata_match = 0
metadata_write = 0
instructions = 0
config = 0
max_entries = 0
active_count = 0
lookup_count = 0
matched_count = 0
self._test_parser(table_id, name, match, wildcards, write_actions,
apply_actions, write_setfields, apply_setfields,
metadata_match, metadata_write, instructions, config,
max_entries, active_count, lookup_count,
matched_count)
def _test_parser_p(self, ofpxmt, ofpit, ofptc):
table_id = 91
name = b'name'
match = ofpxmt
wildcards = ofpxmt
write_actions = 2484712402
apply_actions = 3999715196
write_setfields = ofpxmt
apply_setfields = ofpxmt
metadata_match = 2127614848199081640
metadata_write = 2127614848199081641
instructions = ofpit
config = ofptc
max_entries = 2506913869
active_count = 2024581150
lookup_count = 4620020561814017052
matched_count = 2825167325263435621
self._test_parser(table_id, name, match, wildcards, write_actions,
apply_actions, write_setfields, apply_setfields,
metadata_match, metadata_write, instructions, config,
max_entries, active_count, lookup_count,
matched_count)
def test_parser_p1(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IN_PORT,
ofproto.OFPIT_GOTO_TABLE,
ofproto.OFPTC_TABLE_MISS_CONTINUE)
def test_parser_p2(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IN_PHY_PORT,
ofproto.OFPIT_WRITE_METADATA,
ofproto.OFPTC_TABLE_MISS_DROP)
def test_parser_p3(self):
self._test_parser_p(ofproto.OFPXMT_OFB_METADATA,
ofproto.OFPIT_WRITE_ACTIONS,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p4(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ETH_DST,
ofproto.OFPIT_APPLY_ACTIONS,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p5(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ETH_SRC,
ofproto.OFPIT_CLEAR_ACTIONS,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p6(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ETH_TYPE,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p7(self):
self._test_parser_p(ofproto.OFPXMT_OFB_VLAN_VID,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p8(self):
self._test_parser_p(ofproto.OFPXMT_OFB_VLAN_PCP,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p9(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IP_DSCP,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p10(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IP_ECN,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p11(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IP_PROTO,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p12(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV4_SRC,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p13(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV4_DST,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p14(self):
self._test_parser_p(ofproto.OFPXMT_OFB_TCP_SRC,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p15(self):
self._test_parser_p(ofproto.OFPXMT_OFB_TCP_DST,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p16(self):
self._test_parser_p(ofproto.OFPXMT_OFB_UDP_SRC,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p17(self):
self._test_parser_p(ofproto.OFPXMT_OFB_UDP_DST,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p18(self):
self._test_parser_p(ofproto.OFPXMT_OFB_SCTP_SRC,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p19(self):
self._test_parser_p(ofproto.OFPXMT_OFB_SCTP_DST,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p20(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ICMPV4_TYPE,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p21(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ICMPV4_CODE,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p22(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ARP_OP,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p23(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ARP_SPA,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p24(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ARP_TPA,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p25(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ARP_SHA,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p26(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ARP_THA,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p27(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_SRC,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p28(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_DST,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p29(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_FLABEL,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p30(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ICMPV6_TYPE,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p31(self):
self._test_parser_p(ofproto.OFPXMT_OFB_ICMPV6_CODE,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p32(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_ND_TARGET,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p33(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_ND_SLL,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p34(self):
self._test_parser_p(ofproto.OFPXMT_OFB_IPV6_ND_TLL,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p35(self):
self._test_parser_p(ofproto.OFPXMT_OFB_MPLS_LABEL,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
def test_parser_p36(self):
self._test_parser_p(ofproto.OFPXMT_OFB_MPLS_TC,
ofproto.OFPIT_EXPERIMENTER,
ofproto.OFPTC_TABLE_MISS_MASK)
class TestOFPPortStatsRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPPortStatsRequest
"""
# OFP_PORT_STATS_REQUEST_PACK_STR
# '!I4x'...port_no, pad(4)
port_no = 41186
def test_init(self):
c = OFPPortStatsRequest(_Datapath, self.port_no)
eq_(self.port_no, c.port_no)
def _test_serialize(self, port_no):
c = OFPPortStatsRequest(_Datapath, port_no)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_STATS_REQUEST, c.msg_type)
eq_(0, c.xid)
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_STATS_REQUEST_PACK_STR.replace('!', '') \
+ ofproto.OFP_PORT_STATS_REQUEST_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_STATS_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], ofproto.OFPST_PORT)
eq_(res[5], 0)
eq_(res[6], port_no)
def test_serialize_mid(self):
self._test_serialize(self.port_no)
def test_serialize_max(self):
self._test_serialize(ofproto.OFPP_ANY)
def test_serialize_min(self):
self._test_serialize(0)
def test_serialize_p1(self):
self._test_serialize(ofproto.OFPP_MAX)
def test_serialize_p2(self):
self._test_serialize(ofproto.OFPP_IN_PORT)
def test_serialize_p3(self):
self._test_serialize(ofproto.OFPP_TABLE)
def test_serialize_p4(self):
self._test_serialize(ofproto.OFPP_NORMAL)
def test_serialize_p5(self):
self._test_serialize(ofproto.OFPP_FLOOD)
def test_serialize_p6(self):
self._test_serialize(ofproto.OFPP_ALL)
def test_serialize_p7(self):
self._test_serialize(ofproto.OFPP_CONTROLLER)
def test_serialize_p8(self):
self._test_serialize(ofproto.OFPP_LOCAL)
class TestOFPPortStats(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPPortStats
"""
def test_init(self):
port_no = 6606
rx_packets = 5999980397101236279
tx_packets = 2856480458895760962
rx_bytes = 6170274950576278921
tx_bytes = 8638420181865882538
rx_dropped = 6982303461569875546
tx_dropped = 661287462113808071
rx_errors = 3422231811478788365
tx_errors = 6283093430376743019
rx_frame_err = 876072919806406283
rx_over_err = 6525873760178941600
rx_crc_err = 8303073210207070535
collisions = 3409801584220270201
res = OFPPortStats(port_no, rx_packets, tx_packets,
rx_bytes, tx_bytes, rx_dropped, tx_dropped,
rx_errors, tx_errors, rx_frame_err,
rx_over_err, rx_crc_err, collisions)
eq_(port_no, res.port_no)
eq_(rx_packets, res.rx_packets)
eq_(tx_packets, res.tx_packets)
eq_(rx_bytes, res.rx_bytes)
eq_(tx_bytes, res.tx_bytes)
eq_(rx_dropped, res.rx_dropped)
eq_(tx_dropped, res.tx_dropped)
eq_(rx_errors, res.rx_errors)
eq_(tx_errors, res.tx_errors)
eq_(rx_frame_err, res.rx_frame_err)
eq_(rx_over_err, res.rx_over_err)
eq_(rx_crc_err, res.rx_crc_err)
eq_(collisions, res.collisions)
def _test_parser(self, port_no, rx_packets, tx_packets,
rx_bytes, tx_bytes, rx_dropped, tx_dropped,
rx_errors, tx_errors, rx_frame_err,
rx_over_err, rx_crc_err, collisions):
# OFP_PORT_STATS_PACK_STR = '!H6xQQQQQQQQQQQQ'
fmt = ofproto.OFP_PORT_STATS_PACK_STR
buf = pack(fmt, port_no, rx_packets, tx_packets, rx_bytes, tx_bytes,
rx_dropped, tx_dropped, rx_errors, tx_errors, rx_frame_err,
rx_over_err, rx_crc_err, collisions)
res = OFPPortStats.parser(buf, 0)
eq_(port_no, res.port_no)
eq_(rx_packets, res.rx_packets)
eq_(tx_packets, res.tx_packets)
eq_(rx_bytes, res.rx_bytes)
eq_(tx_bytes, res.tx_bytes)
eq_(rx_dropped, res.rx_dropped)
eq_(tx_dropped, res.tx_dropped)
eq_(rx_errors, res.rx_errors)
eq_(tx_errors, res.tx_errors)
eq_(rx_frame_err, res.rx_frame_err)
eq_(rx_over_err, res.rx_over_err)
eq_(rx_crc_err, res.rx_crc_err)
eq_(collisions, res.collisions)
def test_parser_mid(self):
port_no = 6606
rx_packets = 5999980397101236279
tx_packets = 2856480458895760962
rx_bytes = 6170274950576278921
tx_bytes = 8638420181865882538
rx_dropped = 6982303461569875546
tx_dropped = 661287462113808071
rx_errors = 3422231811478788365
tx_errors = 6283093430376743019
rx_frame_err = 876072919806406283
rx_over_err = 6525873760178941600
rx_crc_err = 8303073210207070535
collisions = 3409801584220270201
self._test_parser(port_no, rx_packets, tx_packets, rx_bytes, tx_bytes,
rx_dropped, tx_dropped, rx_errors, tx_errors,
rx_frame_err, rx_over_err, rx_crc_err, collisions)
def test_parser_max(self):
port_no = 0xffffffff
rx_packets = 0xffffffffffffffff
tx_packets = 0xffffffffffffffff
rx_bytes = 0xffffffffffffffff
tx_bytes = 0xffffffffffffffff
rx_dropped = 0xffffffffffffffff
tx_dropped = 0xffffffffffffffff
rx_errors = 0xffffffffffffffff
tx_errors = 0xffffffffffffffff
rx_frame_err = 0xffffffffffffffff
rx_over_err = 0xffffffffffffffff
rx_crc_err = 0xffffffffffffffff
collisions = 0xffffffffffffffff
self._test_parser(port_no, rx_packets, tx_packets, rx_bytes, tx_bytes,
rx_dropped, tx_dropped, rx_errors, tx_errors,
rx_frame_err, rx_over_err, rx_crc_err, collisions)
def test_parser_min(self):
port_no = 0
rx_packets = 0
tx_packets = 0
rx_bytes = 0
tx_bytes = 0
rx_dropped = 0
tx_dropped = 0
rx_errors = 0
tx_errors = 0
rx_frame_err = 0
rx_over_err = 0
rx_crc_err = 0
collisions = 0
self._test_parser(port_no, rx_packets, tx_packets, rx_bytes, tx_bytes,
rx_dropped, tx_dropped, rx_errors, tx_errors,
rx_frame_err, rx_over_err, rx_crc_err, collisions)
def _test_parser_p(self, port_no):
port_no = port_no
rx_packets = 5999980397101236279
tx_packets = 2856480458895760962
rx_bytes = 6170274950576278921
tx_bytes = 8638420181865882538
rx_dropped = 6982303461569875546
tx_dropped = 661287462113808071
rx_errors = 3422231811478788365
tx_errors = 6283093430376743019
rx_frame_err = 876072919806406283
rx_over_err = 6525873760178941600
rx_crc_err = 8303073210207070535
collisions = 3409801584220270201
self._test_parser(port_no, rx_packets, tx_packets, rx_bytes, tx_bytes,
rx_dropped, tx_dropped, rx_errors, tx_errors,
rx_frame_err, rx_over_err, rx_crc_err, collisions)
def test_parser_p1(self):
self._test_parser_p(ofproto.OFPP_MAX)
def test_parser_p2(self):
self._test_parser_p(ofproto.OFPP_IN_PORT)
def test_parser_p3(self):
self._test_parser_p(ofproto.OFPP_TABLE)
def test_parser_p4(self):
self._test_parser_p(ofproto.OFPP_NORMAL)
def test_parser_p5(self):
self._test_parser_p(ofproto.OFPP_FLOOD)
def test_parser_p6(self):
self._test_parser_p(ofproto.OFPP_ALL)
def test_parser_p7(self):
self._test_parser_p(ofproto.OFPP_CONTROLLER)
def test_parser_p8(self):
self._test_parser_p(ofproto.OFPP_LOCAL)
class TestOFPQueueStatsRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPQueueStatsRequest
"""
# OFP_QUEUE_STATS_REQUEST_PACK_STR
# '!II'...port_no, queue_id
port_no = 41186
queue_id = 6606
def test_init(self):
c = OFPQueueStatsRequest(_Datapath, self.port_no, self.queue_id)
eq_(self.port_no, c.port_no)
eq_(self.queue_id, c.queue_id)
def _test_serialize(self, port_no, queue_id):
c = OFPQueueStatsRequest(_Datapath, port_no, queue_id)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_STATS_REQUEST, c.msg_type)
eq_(0, c.xid)
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_STATS_REQUEST_PACK_STR.replace('!', '') \
+ ofproto.OFP_QUEUE_STATS_REQUEST_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_STATS_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], ofproto.OFPST_QUEUE)
eq_(res[5], 0)
eq_(res[6], port_no)
eq_(res[7], queue_id)
def test_serialize_mid(self):
self._test_serialize(self.port_no, self.queue_id)
def test_serialize_max(self):
self._test_serialize(0xffffffff, 0xffffffff)
def test_serialize_min(self):
self._test_serialize(0, 0)
def test_serialize_p1(self):
self._test_serialize(ofproto.OFPP_MAX, self.queue_id)
def test_serialize_p2(self):
self._test_serialize(ofproto.OFPP_IN_PORT, self.queue_id)
def test_serialize_p3(self):
self._test_serialize(ofproto.OFPP_NORMAL, self.queue_id)
def test_serialize_p4(self):
self._test_serialize(ofproto.OFPP_TABLE, self.queue_id)
def test_serialize_p5(self):
self._test_serialize(ofproto.OFPP_FLOOD, self.queue_id)
def test_serialize_p6(self):
self._test_serialize(ofproto.OFPP_ALL, self.queue_id)
def test_serialize_p7(self):
self._test_serialize(ofproto.OFPP_CONTROLLER, self.queue_id)
def test_serialize_p8(self):
self._test_serialize(ofproto.OFPP_LOCAL, self.queue_id)
class TestOFPQueueStats(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPQueueStats
"""
def test_init(self):
port_no = 41186
queue_id = 6606
tx_bytes = 8638420181865882538
tx_packets = 2856480458895760962
tx_errors = 6283093430376743019
res = OFPQueueStats(port_no, queue_id, tx_bytes,
tx_packets, tx_errors)
eq_(port_no, res.port_no)
eq_(queue_id, res.queue_id)
eq_(tx_bytes, res.tx_bytes)
eq_(tx_packets, res.tx_packets)
eq_(tx_errors, res.tx_errors)
def _test_parser(self, port_no, queue_id, tx_bytes,
tx_packets, tx_errors):
# OFP_QUEUE_STATS_PACK_STR = '!IIQQQ'
fmt = ofproto.OFP_QUEUE_STATS_PACK_STR
buf = pack(fmt, port_no, queue_id, tx_bytes, tx_packets, tx_errors)
res = OFPQueueStats.parser(buf, 0)
eq_(port_no, res.port_no)
eq_(queue_id, res.queue_id)
eq_(tx_bytes, res.tx_bytes)
eq_(tx_packets, res.tx_packets)
eq_(tx_errors, res.tx_errors)
def test_parser_mid(self):
port_no = 41186
queue_id = 6606
tx_bytes = 8638420181865882538
tx_packets = 2856480458895760962
tx_errors = 6283093430376743019
self._test_parser(port_no, queue_id, tx_bytes,
tx_packets, tx_errors)
def test_parser_max(self):
port_no = 0xffffffff
queue_id = 0xffffffff
tx_bytes = 0xffffffffffffffff
tx_packets = 0xffffffffffffffff
tx_errors = 0xffffffffffffffff
self._test_parser(port_no, queue_id, tx_bytes,
tx_packets, tx_errors)
def test_parser_min(self):
port_no = 0
queue_id = 0
tx_bytes = 0
tx_packets = 0
tx_errors = 0
self._test_parser(port_no, queue_id, tx_bytes,
tx_packets, tx_errors)
def _test_parser_p(self, port_no):
queue_id = 6606
tx_bytes = 8638420181865882538
tx_packets = 2856480458895760962
tx_errors = 6283093430376743019
self._test_parser(port_no, queue_id, tx_bytes,
tx_packets, tx_errors)
def test_parser_p1(self):
self._test_parser_p(ofproto.OFPP_MAX)
def test_parser_p2(self):
self._test_parser_p(ofproto.OFPP_IN_PORT)
def test_parser_p3(self):
self._test_parser_p(ofproto.OFPP_TABLE)
def test_parser_p4(self):
self._test_parser_p(ofproto.OFPP_NORMAL)
def test_parser_p5(self):
self._test_parser_p(ofproto.OFPP_FLOOD)
def test_parser_p6(self):
self._test_parser_p(ofproto.OFPP_ALL)
def test_parser_p7(self):
self._test_parser_p(ofproto.OFPP_CONTROLLER)
def test_parser_p8(self):
self._test_parser_p(ofproto.OFPP_LOCAL)
class TestOFPBucketCounter(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPBucketCounter
"""
# OFP_BUCKET_COUNTER_PACK_STR = '!QQ'
packet_count = 6489108735192644493
byte_count = 7334344481123449724
def test_init(self):
c = OFPBucketCounter(self.packet_count, self.byte_count)
eq_(self.packet_count, c.packet_count)
eq_(self.byte_count, c.byte_count)
def _test_parser(self, packet_count, byte_count):
fmt = ofproto.OFP_BUCKET_COUNTER_PACK_STR
buf = pack(fmt, packet_count, byte_count)
res = OFPBucketCounter.parser(buf, 0)
eq_(packet_count, res.packet_count)
eq_(byte_count, res.byte_count)
def test_parser_mid(self):
self._test_parser(self.packet_count, self.byte_count)
def test_parser_max(self):
packet_count = 18446744073709551615
byte_count = 18446744073709551615
self._test_parser(packet_count, byte_count)
def test_parser_min(self):
packet_count = 0
byte_count = 0
self._test_parser(packet_count, byte_count)
class TestOFPGroupStatsRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPGroupStatsRequest
"""
# OFP_GROUP_STATS_REQUEST_PACK_STR
# '!I4x'...group_id, pad(4)
group_id = 6606
def test_init(self):
c = OFPGroupStatsRequest(_Datapath, self.group_id)
eq_(self.group_id, c.group_id)
def _test_serialize(self, group_id):
c = OFPGroupStatsRequest(_Datapath, group_id)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_STATS_REQUEST, c.msg_type)
eq_(0, c.xid)
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_STATS_REQUEST_PACK_STR.replace('!', '') \
+ ofproto.OFP_GROUP_STATS_REQUEST_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_STATS_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], ofproto.OFPST_GROUP)
eq_(res[5], 0)
eq_(res[6], group_id)
def test_serialize_mid(self):
self._test_serialize(self.group_id)
def test_serialize_max(self):
self._test_serialize(0xffffffff)
def test_serialize_min(self):
self._test_serialize(0)
def test_serialize_p1(self):
self._test_serialize(ofproto.OFPG_MAX)
def test_serialize_p2(self):
self._test_serialize(ofproto.OFPG_ALL)
class TestOFPGroupStats(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPGroupStats
"""
# OFP_GROUP_STATS_PACK_STR = '!H2xII4xQQ'
length = ofproto.OFP_GROUP_STATS_SIZE \
+ ofproto.OFP_BUCKET_COUNTER_SIZE
group_id = 6606
ref_count = 2102
packet_count = 6489108735192644493
byte_count = 7334344481123449724
# OFP_BUCKET_COUNTER_PACK_STR = '!QQ'
buck_packet_count = 3519264449364891087
buck_byte_count = 3123449724733434448
bucket_counters = [OFPBucketCounter(buck_packet_count, buck_byte_count)]
buf_bucket_counters = pack(ofproto.OFP_BUCKET_COUNTER_PACK_STR,
buck_packet_count, buck_byte_count)
fmt = ofproto.OFP_GROUP_STATS_PACK_STR
buf = pack(fmt, length, group_id, ref_count, packet_count, byte_count) \
+ buf_bucket_counters
def test_init(self):
c = OFPGroupStats(self.group_id, self.ref_count,
self.packet_count, self.byte_count,
self.bucket_counters)
eq_(self.group_id, c.group_id)
eq_(self.ref_count, c.ref_count)
eq_(self.packet_count, c.packet_count)
eq_(self.byte_count, c.byte_count)
eq_(self.bucket_counters, c.bucket_counters)
def _test_parser(self, group_id, ref_count, packet_count,
byte_count, bucket_counter_cnt):
# OFP_GROUP_STATS_PACK_STR = '!H2xII4xQQ'
length = ofproto.OFP_GROUP_STATS_SIZE \
+ (ofproto.OFP_BUCKET_COUNTER_SIZE * bucket_counter_cnt)
fmt = ofproto.OFP_GROUP_STATS_PACK_STR
buf = pack(fmt, length, group_id, ref_count,
packet_count, byte_count)
bucket_counters = []
for b in range(bucket_counter_cnt):
# OFP_BUCKET_COUNTER_PACK_STR = '!QQ'
buck_packet_count = b
buck_byte_count = b
bucket_counter = OFPBucketCounter(buck_packet_count,
buck_byte_count)
bucket_counters.append(bucket_counter)
buf_bucket_counters = \
pack(ofproto.OFP_BUCKET_COUNTER_PACK_STR,
buck_packet_count, buck_byte_count)
buf += buf_bucket_counters
res = OFPGroupStats.parser(buf, 0)
# 32
eq_(length, res.length)
eq_(group_id, res.group_id)
eq_(ref_count, res.ref_count)
eq_(packet_count, res.packet_count)
eq_(byte_count, res.byte_count)
# 32 + 16 * bucket_counter_cnt < 65535 byte
# bucket_counter_cnt <= 4093
for b in range(bucket_counter_cnt):
eq_(bucket_counters[b].packet_count,
res.bucket_counters[b].packet_count)
eq_(bucket_counters[b].byte_count,
res.bucket_counters[b].byte_count)
def test_parser_mid(self):
bucket_counter_cnt = 2046
self._test_parser(self.group_id, self.ref_count,
self.packet_count, self.byte_count,
bucket_counter_cnt)
def test_parser_max(self):
group_id = 4294967295
ref_count = 4294967295
packet_count = 18446744073709551615
byte_count = 18446744073709551615
bucket_counter_cnt = 4093
self._test_parser(group_id, ref_count,
packet_count, byte_count,
bucket_counter_cnt)
def test_parser_min(self):
group_id = 0
ref_count = 0
packet_count = 0
byte_count = 0
bucket_counter_cnt = 0
self._test_parser(group_id, ref_count,
packet_count, byte_count,
bucket_counter_cnt)
class TestOFPGroupDescStatsRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPGroupDescStatsRequest
"""
def test_serialize(self):
c = OFPGroupDescStatsRequest(_Datapath)
c.serialize()
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_STATS_REQUEST_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_STATS_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], ofproto.OFPST_GROUP_DESC)
eq_(res[5], 0)
class TestOFPGroupDescStats(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPGroupDescStats
"""
# OFP_GROUP_DESC_STATS_PACK_STR = '!HBxI'
length = ofproto.OFP_GROUP_DESC_STATS_SIZE \
+ ofproto.OFP_BUCKET_SIZE \
+ ofproto.OFP_ACTION_OUTPUT_SIZE
type_ = 128
group_id = 6606
# OFP_ACTION (OFP_ACTION_OUTPUT)
port = 0x00002ae0
max_len = ofproto.OFP_ACTION_OUTPUT_SIZE
actions = [OFPActionOutput(port, max_len)]
buf_actions = bytearray()
actions[0].serialize(buf_actions, 0)
# OFP_BUCKET
weight = 4386
watch_port = 8006
watch_group = 3
buckets = [OFPBucket(weight, watch_port, watch_group, actions)]
bucket_cnt = 1024
def test_init(self):
c = OFPGroupDescStats(self.type_, self.group_id, self.buckets)
eq_(self.type_, c.type)
eq_(self.group_id, c.group_id)
eq_(self.buckets, c.buckets)
def _test_parser(self, type_, group_id, bucket_cnt):
# OFP_GROUP_DESC_STATS_PACK_STR = '!HBxI'
length = ofproto.OFP_GROUP_DESC_STATS_SIZE \
+ (ofproto.OFP_BUCKET_SIZE
+ ofproto.OFP_ACTION_OUTPUT_SIZE) * bucket_cnt
fmt = ofproto.OFP_GROUP_DESC_STATS_PACK_STR
buf = pack(fmt, length, type_, group_id)
buckets = []
for b in range(bucket_cnt):
# OFP_BUCKET
weight = watch_port = watch_group = b
bucket = OFPBucket(weight,
watch_port, watch_group,
self.actions)
buckets.append(bucket)
buf_buckets = bytearray()
buckets[b].serialize(buf_buckets, 0)
buf += six.binary_type(buf_buckets)
res = OFPGroupDescStats.parser(buf, 0)
# 8 byte
eq_(type_, res.type)
eq_(group_id, res.group_id)
# 8 + ( 16 + 16 ) * b < 65535 byte
# b <= 2047 byte
for b in range(bucket_cnt):
eq_(buckets[b].weight, res.buckets[b].weight)
eq_(buckets[b].watch_port, res.buckets[b].watch_port)
eq_(buckets[b].watch_group, res.buckets[b].watch_group)
eq_(buckets[b].actions[0].port,
res.buckets[b].actions[0].port)
eq_(buckets[b].actions[0].max_len,
res.buckets[b].actions[0].max_len)
def test_parser_mid(self):
self._test_parser(self.type_, self.group_id, self.bucket_cnt)
def test_parser_max(self):
group_id = 4294967295
type_ = 255
bucket_cnt = 2047
self._test_parser(type_, group_id, bucket_cnt)
def test_parser_min(self):
group_id = 0
type_ = ofproto.OFPGT_ALL
bucket_cnt = 0
self._test_parser(type_, group_id, bucket_cnt)
def test_parser_p1(self):
type_ = ofproto.OFPGT_SELECT
self._test_parser(type_, self.group_id, self.bucket_cnt)
def test_parser_p2(self):
type_ = ofproto.OFPGT_INDIRECT
self._test_parser(type_, self.group_id, self.bucket_cnt)
def test_parser_p3(self):
type_ = ofproto.OFPGT_FF
self._test_parser(type_, self.group_id, self.bucket_cnt)
class TestOFPGroupFeaturesStatsRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPGroupFeaturesStatsRequest
"""
def test_serialize(self):
c = OFPGroupFeaturesStatsRequest(_Datapath)
c.serialize()
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_STATS_REQUEST_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_STATS_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], ofproto.OFPST_GROUP_FEATURES)
eq_(res[5], 0)
class TestOFPGroupFeaturesStats(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPGroupFeaturesStats
"""
# OFP_GROUP_FEATURES_STATS_PACK_STR = '!II4I4I'
types = ofproto.OFPGT_ALL
capabilities = ofproto.OFPGFC_SELECT_WEIGHT
max_groups = [1, 2, 3, 4]
actions = [1 << ofproto.OFPAT_OUTPUT,
1 << ofproto.OFPAT_COPY_TTL_OUT,
1 << ofproto.OFPAT_SET_MPLS_TTL,
1 << ofproto.OFPAT_PUSH_VLAN]
def test_init(self):
c = OFPGroupFeaturesStats(self.types, self.capabilities,
self.max_groups, self.actions)
eq_(self.types, c.types)
eq_(self.capabilities, c.capabilities)
eq_(self.max_groups, c.max_groups)
eq_(self.actions, c.actions)
def _test_parser(self, types, capabilities, max_groups, actions):
buf = pack('!I', types) \
+ pack('!I', capabilities) \
+ pack('!I', max_groups[0]) \
+ pack('!I', max_groups[1]) \
+ pack('!I', max_groups[2]) \
+ pack('!I', max_groups[3]) \
+ pack('!I', actions[0]) \
+ pack('!I', actions[1]) \
+ pack('!I', actions[2]) \
+ pack('!I', actions[3])
res = OFPGroupFeaturesStats.parser(buf, 0)
# max_groups and actions after the parser is tuple
eq_(types, res.types)
eq_(capabilities, res.capabilities)
eq_(max_groups, res.max_groups)
eq_(actions, res.actions)
def test_parser_mid(self):
self._test_parser(self.types, self.capabilities,
self.max_groups, self.actions)
def test_parser_max(self):
types = 0b11111111111111111111111111111111
capabilities = 0b11111111111111111111111111111111
max_groups = [4294967295] * 4
actions = [0b11111111111111111111111111111111] * 4
self._test_parser(types, capabilities,
max_groups, actions)
def test_parser_min(self):
types = 0b00000000000000000000000000000000
capabilities = 0b00000000000000000000000000000000
max_groups = [0] * 4
actions = [0b00000000000000000000000000000000] * 4
self._test_parser(types, capabilities,
max_groups, actions)
def _test_parser_p(self, types, capabilities, actions):
self._test_parser(types, capabilities,
self.max_groups, actions)
def test_parser_p1(self):
actions = [1 << ofproto.OFPAT_COPY_TTL_IN,
1 << ofproto.OFPAT_DEC_MPLS_TTL,
1 << ofproto.OFPAT_POP_VLAN,
1 << ofproto.OFPAT_PUSH_MPLS]
self._test_parser_p(1 << ofproto.OFPGT_ALL,
ofproto.OFPGFC_CHAINING,
actions)
def test_parser_p2(self):
actions = [1 << ofproto.OFPAT_POP_MPLS,
1 << ofproto.OFPAT_SET_QUEUE,
1 << ofproto.OFPAT_GROUP,
1 << ofproto.OFPAT_SET_NW_TTL]
self._test_parser_p(1 << ofproto.OFPGT_SELECT,
ofproto.OFPGFC_SELECT_WEIGHT,
actions)
def test_parser_p3(self):
actions = [1 << ofproto.OFPAT_DEC_NW_TTL,
1 << ofproto.OFPAT_SET_FIELD,
1 << ofproto.OFPAT_GROUP,
1 << ofproto.OFPAT_SET_NW_TTL]
self._test_parser_p(1 << ofproto.OFPGT_SELECT,
ofproto.OFPGFC_SELECT_LIVENESS,
actions)
def test_parser_p4(self):
self._test_parser_p(1 << ofproto.OFPGT_INDIRECT,
ofproto.OFPGFC_CHAINING,
self.actions)
def test_parser_p5(self):
self._test_parser_p(1 << ofproto.OFPGT_FF,
ofproto.OFPGFC_CHAINING_CHECKS,
self.actions)
class TestOFPQueueGetConfigRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPQueueGetConfigRequest
"""
# OFP_QUEUE_GET_CONFIG_REQUEST_PACK_STR v1.2
# '!I4x'...port, pad(4)
port = 41186
def test_init(self):
c = OFPQueueGetConfigRequest(_Datapath, self.port)
eq_(self.port, c.port)
def _test_serialize(self, port):
c = OFPQueueGetConfigRequest(_Datapath, port)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_QUEUE_GET_CONFIG_REQUEST, c.msg_type)
eq_(0, c.xid)
fmt = ofproto.OFP_HEADER_PACK_STR \
+ ofproto.OFP_QUEUE_GET_CONFIG_REQUEST_PACK_STR[1:]
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_QUEUE_GET_CONFIG_REQUEST)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], port)
def test_serialize_mid(self):
self._test_serialize(self.port)
def test_serialize_max(self):
self._test_serialize(0xffffffff)
def test_serialize_min(self):
self._test_serialize(0)
def test_serialize_p1(self):
self._test_serialize(ofproto.OFPP_MAX)
class TestOFPQueuePropHeader(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPQueuePropHeader
"""
# OFP_QUEUE_PROP_HEADER_PACK_STR = '!HH4x'
property_ = 1
len_ = 10
def test_init(self):
c = OFPQueuePropHeader(self.property_, self.len_)
eq_(self.property_, c.property)
eq_(self.len_, c.len)
def _test_serialize(self, property_, len_):
c = OFPQueuePropHeader(property_, len_)
buf = bytearray()
c.serialize(buf, 0)
fmt = ofproto.OFP_QUEUE_PROP_HEADER_PACK_STR
res = struct.unpack(fmt, six.binary_type(buf))
eq_(res[0], property_)
eq_(res[1], len_)
def test_serialize_mid(self):
self._test_serialize(self.property_, self.len_)
def test_serialize_max(self):
self._test_serialize(0xffff, 0xffff)
def test_serialize_min(self):
self._test_serialize(0, 0)
class TestOFPPacketQueue(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPPacketQueue
"""
def test_init(self):
queue_id = 1
port = 2
len_ = 3
properties = [4, 5, 6]
c = OFPPacketQueue(queue_id, port, properties)
eq_(queue_id, c.queue_id)
eq_(port, c.port)
eq_(properties, c.properties)
def _test_parser(self, queue_id, port, prop_cnt):
# OFP_PACKET_QUEUE_PACK_STR = '!IIH6x'
fmt = ofproto.OFP_PACKET_QUEUE_PACK_STR
queue_len = ofproto.OFP_PACKET_QUEUE_SIZE \
+ ofproto.OFP_QUEUE_PROP_MIN_RATE_SIZE * prop_cnt
buf = pack(fmt, queue_id, port, queue_len)
for rate in range(prop_cnt):
# OFP_QUEUE_PROP_HEADER_PACK_STR = '!HH4x'
fmt = ofproto.OFP_QUEUE_PROP_HEADER_PACK_STR
prop_type = ofproto.OFPQT_MIN_RATE
prop_len = ofproto.OFP_QUEUE_PROP_MIN_RATE_SIZE
buf += pack(fmt, prop_type, prop_len)
# OFP_QUEUE_PROP_MIN_RATE_PACK_STR = '!H6x'
fmt = ofproto.OFP_QUEUE_PROP_MIN_RATE_PACK_STR
prop_rate = rate
buf += pack(fmt, prop_rate)
res = OFPPacketQueue.parser(buf, 0)
eq_(queue_id, res.queue_id)
eq_(port, res.port)
eq_(queue_len, res.len)
eq_(prop_cnt, len(res.properties))
for rate, p in enumerate(res.properties):
eq_(prop_type, p.property)
eq_(prop_len, p.len)
eq_(rate, p.rate)
def test_parser_mid(self):
queue_id = 1
port = 2
prop_cnt = 2
self._test_parser(queue_id, port, prop_cnt)
def test_parser_max(self):
# queue_len format is 'H' < number 65535
#
# queue_len = OFP_PACKET_QUEUE_SIZE(16)
# + OFP_QUEUE_PROP_MIN_RATE_SIZE(16) * N
# max_prop_cnt = (65535 - 16) / 16 = 4094
queue_id = 0xffffffff
port = 0xffffffff
prop_cnt = 4094
self._test_parser(queue_id, port, prop_cnt)
def test_parser_min(self):
queue_id = 0
port = 0
prop_cnt = 0
self._test_parser(queue_id, port, prop_cnt)
class TestOFPQueuePropMinRate(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPQueuePropMinRate
"""
def _test_parser(self, rate):
# OFP_QUEUE_PROP_MIN_RATE_PACK_STR...H6x
buf = pack(ofproto.OFP_QUEUE_PROP_MIN_RATE_PACK_STR, rate)
res = OFPQueuePropMinRate.parser(buf, 0)
eq_(rate, res.rate)
def test_parser_mid(self):
self._test_parser(32768)
def test_parser_max(self):
self._test_parser(0xffff)
def test_parser_min(self):
self._test_parser(0)
class TestOFPQueuePropMaxRate(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPQueuePropMaxRate
"""
rate = 100
buf = pack(ofproto.OFP_QUEUE_PROP_MAX_RATE_PACK_STR, rate)
c = OFPQueuePropMaxRate(rate)
def _test_parser(self, rate):
# OFP_QUEUE_PROP_MAX_RATE_PACK_STR...H6x
buf = pack(ofproto.OFP_QUEUE_PROP_MAX_RATE_PACK_STR, rate)
res = OFPQueuePropMaxRate.parser(buf, 0)
eq_(rate, res.rate)
def test_parser_mid(self):
self._test_parser(100)
def test_parser_max(self):
self._test_parser(0xffff)
def test_parser_min(self):
self._test_parser(0)
class TestOFPQueueGetConfigReply(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPQueueGetConfigReply
"""
def _test_parser(self, xid, port, queue_cnt):
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_QUEUE_GET_CONFIG_REPLY
queues_len = 0
for q in range(queue_cnt):
queues_len += ofproto.OFP_PACKET_QUEUE_SIZE
queues_len += ofproto.OFP_QUEUE_PROP_MIN_RATE_SIZE
msg_len = ofproto.OFP_QUEUE_GET_CONFIG_REPLY_SIZE \
+ queues_len
# OFP_HEADER_PACK_STR = '!BBHI'
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
# OFP_QUEUE_GET_CONFIG_REPLY_PACK_STR = '!I4x'
fmt = ofproto.OFP_QUEUE_GET_CONFIG_REPLY_PACK_STR
buf += pack(fmt, port)
queues = []
for q in range(1, queue_cnt + 1):
# OFP_PACKET_QUEUE_PACK_STR = '!IIH6x'
fmt = ofproto.OFP_PACKET_QUEUE_PACK_STR
queue_id = q * 100
queue_port = q
queue_len = ofproto.OFP_PACKET_QUEUE_SIZE \
+ ofproto.OFP_QUEUE_PROP_MIN_RATE_SIZE
buf += pack(fmt, queue_id, queue_port, queue_len)
# OFP_QUEUE_PROP_HEADER_PACK_STR = '!HH4x'
fmt = ofproto.OFP_QUEUE_PROP_HEADER_PACK_STR
prop_type = ofproto.OFPQT_MIN_RATE
prop_len = ofproto.OFP_QUEUE_PROP_MIN_RATE_SIZE
buf += pack(fmt, prop_type, prop_len)
# OFP_QUEUE_PROP_MIN_RATE_PACK_STR = '!H6x'
fmt = ofproto.OFP_QUEUE_PROP_MIN_RATE_PACK_STR
prop_rate = q * 10
buf += pack(fmt, prop_rate)
queue = {'queue_id': queue_id, 'queue_port': queue_port,
'queue_len': queue_len, 'prop_type': prop_type,
'prop_len': prop_len, 'prop_rate': prop_rate}
queues.append(queue)
res = OFPQueueGetConfigReply.parser(object, version, msg_type,
msg_len, xid, buf)
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
eq_(port, res.port)
eq_(queue_cnt, len(res.queues))
for i, val in enumerate(res.queues):
c = queues[i]
eq_(c['queue_id'], val.queue_id)
eq_(c['queue_port'], val.port)
eq_(c['queue_len'], val.len)
eq_(1, len(val.properties))
prop = val.properties[0]
eq_(c['prop_type'], prop.property)
eq_(c['prop_len'], prop.len)
eq_(c['prop_rate'], prop.rate)
def test_parser_mid(self):
self._test_parser(2495926989, 65037, 2)
def test_parser_max(self):
# total msg_len = 65520
self._test_parser(0xffffffff, 0xffffffff, 2047)
def test_parser_min(self):
self._test_parser(0, 0, 0)
class TestOFPBarrierRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPBarrierRequest
"""
def test_serialize(self):
c = OFPBarrierRequest(_Datapath)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_BARRIER_REQUEST, c.msg_type)
eq_(ofproto.OFP_HEADER_SIZE, c.msg_len)
eq_(0, c.xid)
fmt = ofproto.OFP_HEADER_PACK_STR
res = unpack(fmt, six.binary_type(c.buf))
eq_(ofproto.OFP_VERSION, res[0])
eq_(ofproto.OFPT_BARRIER_REQUEST, res[1])
eq_(len(c.buf), res[2])
eq_(0, c.xid)
class TestOFPBarrierReply(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPBarrierReply
"""
def _test_parser(self, xid):
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_BARRIER_REPLY
msg_len = ofproto.OFP_HEADER_SIZE
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
res = OFPBarrierReply.parser(object, version, msg_type,
msg_len, xid, buf)
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
def test_parser_mid(self):
self._test_parser(2147483648)
def test_parser_max(self):
self._test_parser(0xffffffff)
def test_parser_min(self):
self._test_parser(0)
class TestOFPRoleRequest(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPRoleRequest
"""
# OFP_ROLE_REQUEST_PACK_STR
# '!I4xQ'...role, pad(4), generation_id
role = 2147483648
generation_id = 1270985291017894273
def test_init(self):
c = OFPRoleRequest(_Datapath, self.role, self.generation_id)
eq_(self.role, c.role)
eq_(self.generation_id, c.generation_id)
def _test_serialize(self, role, generation_id):
c = OFPRoleRequest(_Datapath, role, generation_id)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_ROLE_REQUEST, c.msg_type)
eq_(0, c.xid)
fmt = '!' \
+ ofproto.OFP_HEADER_PACK_STR.replace('!', '') \
+ ofproto.OFP_ROLE_REQUEST_PACK_STR.replace('!', '')
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(ofproto.OFP_VERSION, res[0])
eq_(ofproto.OFPT_ROLE_REQUEST, res[1])
eq_(len(c.buf), res[2])
eq_(0, res[3])
eq_(role, res[4])
eq_(generation_id, res[5])
def test_serialize_mid(self):
self._test_serialize(self.role, self.generation_id)
def test_serialize_max(self):
role = 0xffffffff
generation_id = 0xffffffffffffffff
self._test_serialize(role, generation_id)
def test_serialize_min(self):
role = 0
generation_id = 0
self._test_serialize(role, generation_id)
def test_serialize_p1(self):
role = ofproto.OFPCR_ROLE_EQUAL
self._test_serialize(role, self.generation_id)
def test_serialize_p2(self):
role = ofproto.OFPCR_ROLE_MASTER
self._test_serialize(role, self.generation_id)
def test_serialize_p3(self):
role = ofproto.OFPCR_ROLE_SLAVE
self._test_serialize(role, self.generation_id)
class TestOFPRoleReply(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPRoleReply
"""
# OFP_ROLE_REQUEST_PACK_STR
# '!I4xQ'...role, pad(4), generation_id
# role = ofproto.OFPCR_ROLE_NOCHANGE
role = 2147483648
generation_id = 1270985291017894273
def _test_parser(self, role, generation_id):
# OFP_HEADER_PACK_STR
version = ofproto.OFP_VERSION
msg_type = ofproto.OFPT_ROLE_REPLY
msg_len = ofproto.OFP_ROLE_REQUEST_SIZE
xid = 2495926989
fmt = ofproto.OFP_HEADER_PACK_STR
buf = pack(fmt, version, msg_type, msg_len, xid)
fmt = ofproto.OFP_ROLE_REQUEST_PACK_STR
buf += pack(fmt, role, generation_id)
res = OFPRoleReply.parser(object, version, msg_type, msg_len, xid, buf)
# OFP_HEADER_PACK_STR
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
# OFP_ROLE_REQUEST_PACK_STR
eq_(role, res.role)
eq_(generation_id, res.generation_id)
def test_parser_mid(self):
self._test_parser(self.role, self.generation_id)
def test_parser_max(self):
role = 0xffffffff
generation_id = 0xffffffffffffffff
self._test_parser(role, generation_id)
def test_parser_min(self):
role = ofproto.OFPCR_ROLE_NOCHANGE
generation_id = 0
self._test_parser(role, generation_id)
def test_parser_p1(self):
role = ofproto.OFPCR_ROLE_EQUAL
self._test_parser(role, self.generation_id)
def test_parser_p2(self):
role = ofproto.OFPCR_ROLE_MASTER
self._test_parser(role, self.generation_id)
def test_parser_p3(self):
role = ofproto.OFPCR_ROLE_SLAVE
self._test_parser(role, self.generation_id)
class TestOFPMatch(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPMatch
"""
def test_init(self):
res = OFPMatch()
# wc check
eq_(res._wc.metadata_mask, 0)
eq_(res._wc.dl_dst_mask, 0)
eq_(res._wc.dl_src_mask, 0)
eq_(res._wc.vlan_vid_mask, 0)
eq_(res._wc.ipv4_src_mask, 0)
eq_(res._wc.ipv4_dst_mask, 0)
eq_(res._wc.arp_spa_mask, 0)
eq_(res._wc.arp_tpa_mask, 0)
eq_(res._wc.arp_sha_mask, 0)
eq_(res._wc.arp_tha_mask, 0)
eq_(res._wc.ipv6_src_mask, [])
eq_(res._wc.ipv6_dst_mask, [])
eq_(res._wc.ipv6_flabel_mask, 0)
eq_(res._wc.wildcards, (1 << 64) - 1)
# flow check
eq_(res._flow.in_port, 0)
eq_(res._flow.in_phy_port, 0)
eq_(res._flow.metadata, 0)
eq_(res._flow.dl_dst, mac.DONTCARE)
eq_(res._flow.dl_src, mac.DONTCARE)
eq_(res._flow.dl_type, 0)
eq_(res._flow.vlan_vid, 0)
eq_(res._flow.vlan_pcp, 0)
eq_(res._flow.ip_dscp, 0)
eq_(res._flow.ip_ecn, 0)
eq_(res._flow.ip_proto, 0)
eq_(res._flow.ipv4_src, 0)
eq_(res._flow.ipv4_dst, 0)
eq_(res._flow.tcp_src, 0)
eq_(res._flow.tcp_dst, 0)
eq_(res._flow.udp_src, 0)
eq_(res._flow.udp_dst, 0)
eq_(res._flow.sctp_src, 0)
eq_(res._flow.sctp_dst, 0)
eq_(res._flow.icmpv4_type, 0)
eq_(res._flow.icmpv4_code, 0)
eq_(res._flow.arp_op, 0)
eq_(res._flow.arp_spa, 0)
eq_(res._flow.arp_tpa, 0)
eq_(res._flow.arp_sha, 0)
eq_(res._flow.arp_tha, 0)
eq_(res._flow.ipv6_src, [])
eq_(res._flow.ipv6_dst, [])
eq_(res._flow.ipv6_flabel, 0)
eq_(res._flow.icmpv6_type, 0)
eq_(res._flow.icmpv6_code, 0)
eq_(res._flow.ipv6_nd_target, [])
eq_(res._flow.ipv6_nd_sll, 0)
eq_(res._flow.ipv6_nd_tll, 0)
eq_(res._flow.mpls_label, 0)
eq_(res._flow.mpls_tc, 0)
# flow check
eq_(res.fields, [])
def _test_serialize_and_parser(self, match, header, value, mask=None):
cls_ = OFPMatchField._FIELDS_HEADERS.get(header)
pack_str = cls_.pack_str.replace('!', '')
fmt = '!HHI' + pack_str
# serialize
buf = bytearray()
length = match.serialize(buf, 0)
eq_(length, len(buf))
if mask and len(buf) > calcsize(fmt):
fmt += pack_str
res = list(unpack_from(fmt, six.binary_type(buf), 0)[3:])
if type(value) is list:
res_value = res[:calcsize(pack_str) // 2]
eq_(res_value, value)
if mask:
res_mask = res[calcsize(pack_str) // 2:]
eq_(res_mask, mask)
else:
res_value = res.pop(0)
if cls_.__name__ == 'MTVlanVid':
eq_(res_value, value | ofproto.OFPVID_PRESENT)
else:
eq_(res_value, value)
if mask and res and res[0]:
res_mask = res[0]
eq_(res_mask, mask)
# parser
res = match.parser(six.binary_type(buf), 0)
eq_(res.type, ofproto.OFPMT_OXM)
eq_(res.fields[0].header, header)
eq_(res.fields[0].value, value)
if mask and res.fields[0].mask is not None:
eq_(res.fields[0].mask, mask)
# to_jsondict
jsondict = match.to_jsondict()
# from_jsondict
match2 = match.from_jsondict(jsondict["OFPMatch"])
buf2 = bytearray()
match2.serialize(buf2, 0)
eq_(str(match), str(match2))
eq_(buf, buf2)
def test_parse_unknown_field(self):
buf = bytearray()
pack_utils.msg_pack_into('!HH', buf, 0, ofproto.OFPMT_OXM, 4 + 6)
header = ofproto.oxm_tlv_header(36, 2)
pack_utils.msg_pack_into('!IH', buf, 4, header, 1)
header = ofproto.OXM_OF_ETH_TYPE
pack_utils.msg_pack_into('!IH', buf, 10, header, 1)
match = OFPMatch()
res = match.parser(six.binary_type(buf), 0)
# set_in_port
def _test_set_in_port(self, in_port):
header = ofproto.OXM_OF_IN_PORT
match = OFPMatch()
match.set_in_port(in_port)
self._test_serialize_and_parser(match, header, in_port)
def test_set_in_port_mid(self):
self._test_set_in_port(0xff8)
def test_set_in_port_max(self):
self._test_set_in_port(0xffffffff)
def test_set_in_port_min(self):
self._test_set_in_port(0)
# set_in_phy_port
def _test_set_in_phy_port(self, phy_port):
header = ofproto.OXM_OF_IN_PHY_PORT
match = OFPMatch()
match.set_in_phy_port(phy_port)
self._test_serialize_and_parser(match, header, phy_port)
def test_set_in_phy_port_mid(self):
self._test_set_in_phy_port(1)
def test_set_in_phy_port_max(self):
self._test_set_in_phy_port(0xffffffff)
def test_set_in_phy_port_min(self):
self._test_set_in_phy_port(0)
# set_metadata
def _test_set_metadata(self, metadata, mask=None):
header = ofproto.OXM_OF_METADATA
match = OFPMatch()
if mask is None:
match.set_metadata(metadata)
else:
if (mask + 1) >> 64 != 1:
header = ofproto.OXM_OF_METADATA_W
match.set_metadata_masked(metadata, mask)
metadata &= mask
self._test_serialize_and_parser(match, header, metadata, mask)
def test_set_metadata_mid(self):
self._test_set_metadata(0x1212121212121212)
def test_set_metadata_max(self):
self._test_set_metadata(0xffffffffffffffff)
def test_set_metadata_min(self):
self._test_set_metadata(0)
def test_set_metadata_masked_mid(self):
self._test_set_metadata(0x1212121212121212, 0xff00ff00ff00ff00)
def test_set_metadata_masked_max(self):
self._test_set_metadata(0x1212121212121212, 0xffffffffffffffff)
def test_set_metadata_masked_min(self):
self._test_set_metadata(0x1212121212121212, 0)
# set_dl_dst
def _test_set_dl_dst(self, dl_dst, mask=None):
header = ofproto.OXM_OF_ETH_DST
match = OFPMatch()
dl_dst = mac.haddr_to_bin(dl_dst)
if mask is None:
match.set_dl_dst(dl_dst)
else:
header = ofproto.OXM_OF_ETH_DST_W
mask = mac.haddr_to_bin(mask)
match.set_dl_dst_masked(dl_dst, mask)
dl_dst = mac.haddr_bitand(dl_dst, mask)
self._test_serialize_and_parser(match, header, dl_dst, mask)
def test_set_dl_dst_mid(self):
self._test_set_dl_dst('e2:7a:09:79:0b:0f')
def test_set_dl_dst_max(self):
self._test_set_dl_dst('ff:ff:ff:ff:ff:ff')
def test_set_dl_dst_min(self):
self._test_set_dl_dst('00:00:00:00:00:00')
def test_set_dl_dst_masked_mid(self):
self._test_set_dl_dst('e2:7a:09:79:0b:0f', 'ff:00:ff:00:ff:00')
def test_set_dl_dst_masked_max(self):
self._test_set_dl_dst('e2:7a:09:79:0b:0f', 'ff:ff:ff:ff:ff:ff')
def test_set_dl_dst_masked_min(self):
self._test_set_dl_dst('e2:7a:09:79:0b:0f', '00:00:00:00:00:00')
# set_dl_src
def _test_set_dl_src(self, dl_src, mask=None):
header = ofproto.OXM_OF_ETH_SRC
match = OFPMatch()
dl_src = mac.haddr_to_bin(dl_src)
if mask is None:
match.set_dl_src(dl_src)
else:
header = ofproto.OXM_OF_ETH_SRC_W
mask = mac.haddr_to_bin(mask)
match.set_dl_src_masked(dl_src, mask)
dl_src = mac.haddr_bitand(dl_src, mask)
self._test_serialize_and_parser(match, header, dl_src, mask)
def test_set_dl_src_mid(self):
self._test_set_dl_src('d0:98:79:b4:75:b5')
def test_set_dl_src_max(self):
self._test_set_dl_src('ff:ff:ff:ff:ff:ff')
def test_set_dl_src_min(self):
self._test_set_dl_src('00:00:00:00:00:00')
def test_set_dl_src_masked_mid(self):
self._test_set_dl_src('d0:98:79:b4:75:b5', 'f0:f0:f0:f0:f0:f0')
def test_set_dl_src_masked_max(self):
self._test_set_dl_src('d0:98:79:b4:75:b5', 'ff:ff:ff:ff:ff:ff')
def test_set_dl_src_masked_min(self):
self._test_set_dl_src('d0:98:79:b4:75:b5', '00:00:00:00:00:00')
# set_dl_type
def _test_set_dl_type(self, value):
header = ofproto.OXM_OF_ETH_TYPE
match = OFPMatch()
match.set_dl_type(value)
self._test_serialize_and_parser(match, header, value)
def test_set_dl_type_mid(self):
self._test_set_dl_type(0x7fb6)
def test_set_dl_type_max(self):
self._test_set_dl_type(0xffff)
def test_set_dl_type_min(self):
self._test_set_dl_type(0)
def test_set_dl_type_ip(self):
value = ether.ETH_TYPE_IP
self._test_set_dl_type(value)
def test_set_dl_type_arp(self):
value = ether.ETH_TYPE_ARP
self._test_set_dl_type(value)
def test_set_dl_type_ipv6(self):
value = ether.ETH_TYPE_IPV6
self._test_set_dl_type(value)
def test_set_dl_type_slow(self):
value = ether.ETH_TYPE_SLOW
self._test_set_dl_type(value)
# set_vlan_vid
def _test_set_vlan_vid(self, vid, mask=None):
header = ofproto.OXM_OF_VLAN_VID
match = OFPMatch()
if mask is None:
match.set_vlan_vid(vid)
else:
header = ofproto.OXM_OF_VLAN_VID_W
match.set_vlan_vid_masked(vid, mask)
self._test_serialize_and_parser(match, header, vid, mask)
def _test_set_vlan_vid_none(self):
header = ofproto.OXM_OF_VLAN_VID
match = OFPMatch()
match.set_vlan_vid_none()
value = ofproto.OFPVID_NONE
cls_ = OFPMatchField._FIELDS_HEADERS.get(header)
pack_str = cls_.pack_str.replace('!', '')
fmt = '!HHI' + pack_str
# serialize
buf = bytearray()
length = match.serialize(buf, 0)
eq_(length, len(buf))
res = list(unpack_from(fmt, six.binary_type(buf), 0)[3:])
res_value = res.pop(0)
eq_(res_value, value)
# parser
res = match.parser(six.binary_type(buf), 0)
eq_(res.type, ofproto.OFPMT_OXM)
eq_(res.fields[0].header, header)
eq_(res.fields[0].value, value)
# to_jsondict
jsondict = match.to_jsondict()
# from_jsondict
match2 = match.from_jsondict(jsondict["OFPMatch"])
buf2 = bytearray()
match2.serialize(buf2, 0)
eq_(str(match), str(match2))
eq_(buf, buf2)
def test_set_vlan_vid_mid(self):
self._test_set_vlan_vid(2047)
def test_set_vlan_vid_max(self):
self._test_set_vlan_vid(0xfff)
def test_set_vlan_vid_min(self):
self._test_set_vlan_vid(0)
def test_set_vlan_vid_masked_mid(self):
self._test_set_vlan_vid(2047, 0xf0f)
def test_set_vlan_vid_masked_max(self):
self._test_set_vlan_vid(2047, 0xfff)
def test_set_vlan_vid_masked_min(self):
self._test_set_vlan_vid(2047, 0)
def test_set_vlan_vid_none(self):
self._test_set_vlan_vid_none()
# set_vlan_pcp
def _test_set_vlan_pcp(self, pcp):
header = ofproto.OXM_OF_VLAN_PCP
match = OFPMatch()
match.set_vlan_pcp(pcp)
self._test_serialize_and_parser(match, header, pcp)
def test_set_vlan_pcp_mid(self):
self._test_set_vlan_pcp(5)
def test_set_vlan_pcp_max(self):
self._test_set_vlan_pcp(7)
def test_set_vlan_pcp_min(self):
self._test_set_vlan_pcp(0)
# set_ip_dscp
def _test_set_ip_dscp(self, ip_dscp):
header = ofproto.OXM_OF_IP_DSCP
match = OFPMatch()
match.set_ip_dscp(ip_dscp)
self._test_serialize_and_parser(match, header, ip_dscp)
def test_set_ip_dscp_mid(self):
self._test_set_ip_dscp(36)
def test_set_ip_dscp_max(self):
self._test_set_ip_dscp(63)
def test_set_ip_dscp_min(self):
self._test_set_ip_dscp(0)
# set_ip_ecn
def _test_set_ip_ecn(self, ip_ecn):
header = ofproto.OXM_OF_IP_ECN
match = OFPMatch()
match.set_ip_ecn(ip_ecn)
self._test_serialize_and_parser(match, header, ip_ecn)
def test_set_ip_ecn_mid(self):
self._test_set_ip_ecn(1)
def test_set_ip_ecn_max(self):
self._test_set_ip_ecn(3)
def test_set_ip_ecn_min(self):
self._test_set_ip_ecn(0)
# set_ip_proto
def _test_set_ip_proto(self, ip_proto):
header = ofproto.OXM_OF_IP_PROTO
match = OFPMatch()
match.set_ip_proto(ip_proto)
self._test_serialize_and_parser(match, header, ip_proto)
def test_set_ip_proto_mid(self):
self._test_set_ip_proto(6)
def test_set_ip_proto_max(self):
self._test_set_ip_proto(0xff)
def test_set_ip_proto_min(self):
self._test_set_ip_proto(0)
# set_ipv4_src
def _test_set_ipv4_src(self, ip, mask=None):
header = ofproto.OXM_OF_IPV4_SRC
match = OFPMatch()
ip = unpack('!I', socket.inet_aton(ip))[0]
if mask is None:
match.set_ipv4_src(ip)
else:
mask = unpack('!I', socket.inet_aton(mask))[0]
if (mask + 1) >> 32 != 1:
header = ofproto.OXM_OF_IPV4_SRC_W
match.set_ipv4_src_masked(ip, mask)
self._test_serialize_and_parser(match, header, ip, mask)
def test_set_ipv4_src_mid(self):
self._test_set_ipv4_src('192.168.196.250')
def test_set_ipv4_src_max(self):
self._test_set_ipv4_src('255.255.255.255')
def test_set_ipv4_src_min(self):
self._test_set_ipv4_src('0.0.0.0')
def test_set_ipv4_src_masked_mid(self):
self._test_set_ipv4_src('192.168.196.250', '255.255.0.0')
def test_set_ipv4_src_masked_max(self):
self._test_set_ipv4_src('192.168.196.250', '255.255.255.255')
def test_set_ipv4_src_masked_min(self):
self._test_set_ipv4_src('192.168.196.250', '0.0.0.0')
# set_ipv4_dst
def _test_set_ipv4_dst(self, ip, mask=None):
header = ofproto.OXM_OF_IPV4_DST
match = OFPMatch()
ip = unpack('!I', socket.inet_aton(ip))[0]
if mask is None:
match.set_ipv4_dst(ip)
else:
mask = unpack('!I', socket.inet_aton(mask))[0]
if (mask + 1) >> 32 != 1:
header = ofproto.OXM_OF_IPV4_DST_W
match.set_ipv4_dst_masked(ip, mask)
self._test_serialize_and_parser(match, header, ip, mask)
def test_set_ipv4_dst_mid(self):
self._test_set_ipv4_dst('192.168.196.250')
def test_set_ipv4_dst_max(self):
self._test_set_ipv4_dst('255.255.255.255')
def test_set_ipv4_dst_min(self):
self._test_set_ipv4_dst('0.0.0.0')
def test_set_ipv4_dst_masked_mid(self):
self._test_set_ipv4_dst('192.168.196.250', '255.255.0.0')
def test_set_ipv4_dst_masked_max(self):
self._test_set_ipv4_dst('192.168.196.250', '255.255.255.255')
def test_set_ipv4_dst_masked_min(self):
self._test_set_ipv4_dst('192.168.196.250', '0.0.0.0')
# set_tcp_src
def _test_set_tcp_src(self, tcp_src):
header = ofproto.OXM_OF_TCP_SRC
match = OFPMatch()
match.set_tcp_src(tcp_src)
self._test_serialize_and_parser(match, header, tcp_src)
def test_set_tcp_src_mid(self):
self._test_set_tcp_src(1103)
def test_set_tcp_src_max(self):
self._test_set_tcp_src(0xffff)
def test_set_tcp_src_min(self):
self._test_set_tcp_src(0)
# set_tcp_dst
def _test_set_tcp_dst(self, tcp_dst):
header = ofproto.OXM_OF_TCP_DST
match = OFPMatch()
match.set_tcp_dst(tcp_dst)
self._test_serialize_and_parser(match, header, tcp_dst)
def test_set_tcp_dst_mid(self):
self._test_set_tcp_dst(236)
def test_set_tcp_dst_max(self):
self._test_set_tcp_dst(0xffff)
def test_set_tcp_dst_min(self):
self._test_set_tcp_dst(0)
# set_udp_src
def _test_set_udp_src(self, udp_src):
header = ofproto.OXM_OF_UDP_SRC
match = OFPMatch()
match.set_udp_src(udp_src)
self._test_serialize_and_parser(match, header, udp_src)
def test_set_udp_src_mid(self):
self._test_set_udp_src(56617)
def test_set_udp_src_max(self):
self._test_set_udp_src(0xffff)
def test_set_udp_src_min(self):
self._test_set_udp_src(0)
# set_udp_dst
def _test_set_udp_dst(self, udp_dst):
header = ofproto.OXM_OF_UDP_DST
match = OFPMatch()
match.set_udp_dst(udp_dst)
self._test_serialize_and_parser(match, header, udp_dst)
def test_set_udp_dst_mid(self):
self._test_set_udp_dst(61278)
def test_set_udp_dst_max(self):
self._test_set_udp_dst(0xffff)
def test_set_udp_dst_min(self):
self._test_set_udp_dst(0)
# set_sctp_src
def _test_set_sctp_src(self, sctp_src):
header = ofproto.OXM_OF_SCTP_SRC
match = OFPMatch()
match.set_sctp_src(sctp_src)
self._test_serialize_and_parser(match, header, sctp_src)
def test_set_sctp_src_mid(self):
self._test_set_sctp_src(9999)
def test_set_sctp_src_max(self):
self._test_set_sctp_src(0xffff)
def test_set_sctp_src_min(self):
self._test_set_sctp_src(0)
# set_sctp_dst
def _test_set_sctp_dst(self, sctp_dst):
header = ofproto.OXM_OF_SCTP_DST
match = OFPMatch()
match.set_sctp_dst(sctp_dst)
self._test_serialize_and_parser(match, header, sctp_dst)
def test_set_sctp_dst_mid(self):
self._test_set_sctp_dst(1234)
def test_set_sctp_dst_max(self):
self._test_set_sctp_dst(0xffff)
def test_set_sctp_dst_min(self):
self._test_set_sctp_dst(0)
# set_icmpv4_type
def _test_set_icmpv4_type(self, icmpv4_type):
header = ofproto.OXM_OF_ICMPV4_TYPE
match = OFPMatch()
match.set_icmpv4_type(icmpv4_type)
self._test_serialize_and_parser(match, header, icmpv4_type)
def test_set_icmpv4_type_mid(self):
self._test_set_icmpv4_type(8)
def test_set_icmpv4_type_max(self):
self._test_set_icmpv4_type(0xff)
def test_set_icmpv4_type_min(self):
self._test_set_icmpv4_type(0)
# set_icmpv4_code
def _test_set_icmpv4_code(self, icmpv4_code):
header = ofproto.OXM_OF_ICMPV4_CODE
match = OFPMatch()
match.set_icmpv4_code(icmpv4_code)
self._test_serialize_and_parser(match, header, icmpv4_code)
def test_set_icmpv4_code_mid(self):
self._test_set_icmpv4_code(1)
def test_set_icmpv4_code_max(self):
self._test_set_icmpv4_code(0xff)
def test_set_icmpv4_code_min(self):
self._test_set_icmpv4_code(0)
# set_arp_opcode
def _test_set_arp_opcode(self, arp_op):
header = ofproto.OXM_OF_ARP_OP
match = OFPMatch()
match.set_arp_opcode(arp_op)
self._test_serialize_and_parser(match, header, arp_op)
def test_set_arp_opcode_mid(self):
self._test_set_arp_opcode(1)
def test_set_arp_opcode_max(self):
self._test_set_arp_opcode(0xffff)
def test_set_arp_opcode_min(self):
self._test_set_arp_opcode(0)
# set_arp_spa
def _test_set_arp_spa(self, ip, mask=None):
header = ofproto.OXM_OF_ARP_SPA
match = OFPMatch()
ip = unpack('!I', socket.inet_aton(ip))[0]
if mask is None:
match.set_arp_spa(ip)
else:
mask = unpack('!I', socket.inet_aton(mask))[0]
if (mask + 1) >> 32 != 1:
header = ofproto.OXM_OF_ARP_SPA_W
match.set_arp_spa_masked(ip, mask)
self._test_serialize_and_parser(match, header, ip, mask)
def test_set_arp_spa_mid(self):
self._test_set_arp_spa('192.168.227.57')
def test_set_arp_spa_max(self):
self._test_set_arp_spa('255.255.255.255')
def test_set_arp_spa_min(self):
self._test_set_arp_spa('0.0.0.0')
def test_set_arp_spa_masked_mid(self):
self._test_set_arp_spa('192.168.227.57', '255.255.0.0')
def test_set_arp_spa_masked_max(self):
self._test_set_arp_spa('192.168.227.57', '255.255.255.255')
def test_set_arp_spa_masked_min(self):
self._test_set_arp_spa('192.168.227.57', '0.0.0.0')
# set_arp_tpa
def _test_set_arp_tpa(self, ip, mask=None):
header = ofproto.OXM_OF_ARP_TPA
match = OFPMatch()
ip = unpack('!I', socket.inet_aton(ip))[0]
if mask is None:
match.set_arp_tpa(ip)
else:
mask = unpack('!I', socket.inet_aton(mask))[0]
if (mask + 1) >> 32 != 1:
header = ofproto.OXM_OF_ARP_TPA_W
match.set_arp_tpa_masked(ip, mask)
self._test_serialize_and_parser(match, header, ip, mask)
def test_set_arp_tpa_mid(self):
self._test_set_arp_tpa('192.168.227.57')
def test_set_arp_tpa_max(self):
self._test_set_arp_tpa('255.255.255.255')
def test_set_arp_tpa_min(self):
self._test_set_arp_tpa('0.0.0.0')
def test_set_arp_tpa_masked_mid(self):
self._test_set_arp_tpa('192.168.227.57', '255.255.0.0')
def test_set_arp_tpa_masked_max(self):
self._test_set_arp_tpa('192.168.227.57', '255.255.255.255')
def test_set_arp_tpa_masked_min(self):
self._test_set_arp_tpa('192.168.227.57', '0.0.0.0')
# set_arp_sha
def _test_set_arp_sha(self, arp_sha, mask=None):
header = ofproto.OXM_OF_ARP_SHA
match = OFPMatch()
arp_sha = mac.haddr_to_bin(arp_sha)
if mask is None:
match.set_arp_sha(arp_sha)
else:
header = ofproto.OXM_OF_ARP_SHA_W
mask = mac.haddr_to_bin(mask)
match.set_arp_sha_masked(arp_sha, mask)
arp_sha = mac.haddr_bitand(arp_sha, mask)
self._test_serialize_and_parser(match, header, arp_sha, mask)
def test_set_arp_sha_mid(self):
self._test_set_arp_sha('3e:ec:13:9b:f3:0b')
def test_set_arp_sha_max(self):
self._test_set_arp_sha('ff:ff:ff:ff:ff:ff')
def test_set_arp_sha_min(self):
self._test_set_arp_sha('00:00:00:00:00:00')
def test_set_arp_sha_masked_mid(self):
self._test_set_arp_sha('3e:ec:13:9b:f3:0b', 'ff:ff:ff:00:00:00')
def test_set_arp_sha_masked_max(self):
self._test_set_arp_sha('3e:ec:13:9b:f3:0b', 'ff:ff:ff:ff:ff:ff')
def test_set_arp_sha_masked_min(self):
self._test_set_arp_sha('3e:ec:13:9b:f3:0b', '00:00:00:00:00:00')
# set_arp_tha
def _test_set_arp_tha(self, arp_tha, mask=None):
header = ofproto.OXM_OF_ARP_THA
match = OFPMatch()
arp_tha = mac.haddr_to_bin(arp_tha)
if mask is None:
match.set_arp_tha(arp_tha)
else:
header = ofproto.OXM_OF_ARP_THA_W
mask = mac.haddr_to_bin(mask)
match.set_arp_tha_masked(arp_tha, mask)
arp_tha = mac.haddr_bitand(arp_tha, mask)
self._test_serialize_and_parser(match, header, arp_tha, mask)
def test_set_arp_tha_mid(self):
self._test_set_arp_tha('83:6c:21:52:49:68')
def test_set_arp_tha_max(self):
self._test_set_arp_tha('ff:ff:ff:ff:ff:ff')
def test_set_arp_tha_min(self):
self._test_set_arp_tha('00:00:00:00:00:00')
def test_set_arp_tha_masked_mid(self):
self._test_set_arp_tha('83:6c:21:52:49:68', 'ff:ff:ff:00:00:00')
def test_set_arp_tha_masked_max(self):
self._test_set_arp_tha('83:6c:21:52:49:68', 'ff:ff:ff:ff:ff:ff')
def test_set_arp_tha_masked_min(self):
self._test_set_arp_tha('83:6c:21:52:49:68', '00:00:00:00:00:00')
# set_ipv6_src
def _test_set_ipv6_src(self, ipv6, mask=None):
header = ofproto.OXM_OF_IPV6_SRC
match = OFPMatch()
ipv6 = [int(x, 16) for x in ipv6.split(":")]
if mask is None:
match.set_ipv6_src(ipv6)
else:
header = ofproto.OXM_OF_IPV6_SRC_W
mask = [int(x, 16) for x in mask.split(":")]
match.set_ipv6_src_masked(ipv6, mask)
ipv6 = [x & y for (x, y) in zip(ipv6, mask)]
self._test_serialize_and_parser(match, header, ipv6, mask)
def test_set_ipv6_src_mid(self):
ipv6 = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
self._test_set_ipv6_src(ipv6)
def test_set_ipv6_src_max(self):
ipv6 = 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'
self._test_set_ipv6_src(ipv6)
def test_set_ipv6_src_min(self):
ipv6 = '0:0:0:0:0:0:0:0'
self._test_set_ipv6_src(ipv6)
def test_set_ipv6_src_masked_mid(self):
ipv6 = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
mask = 'ffff:ffff:ffff:ffff:0:0:0:0'
self._test_set_ipv6_src(ipv6, mask)
def test_set_ipv6_src_masked_max(self):
ipv6 = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
mask = 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'
self._test_set_ipv6_src(ipv6, mask)
def test_set_ipv6_src_masked_min(self):
ipv6 = '2001:db8:bd05:1d2:288a:1fc0:1:10ee'
mask = '0:0:0:0:0:0:0:0'
self._test_set_ipv6_src(ipv6, mask)
# set_ipv6_dst
def _test_set_ipv6_dst(self, ipv6, mask=None):
header = ofproto.OXM_OF_IPV6_DST
match = OFPMatch()
ipv6 = [int(x, 16) for x in ipv6.split(":")]
if mask is None:
match.set_ipv6_dst(ipv6)
else:
header = ofproto.OXM_OF_IPV6_DST_W
mask = [int(x, 16) for x in mask.split(":")]
match.set_ipv6_dst_masked(ipv6, mask)
ipv6 = [x & y for (x, y) in zip(ipv6, mask)]
self._test_serialize_and_parser(match, header, ipv6, mask)
def test_set_ipv6_dst_mid(self):
ipv6 = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
self._test_set_ipv6_dst(ipv6)
def test_set_ipv6_dst_max(self):
ipv6 = ':'.join(['ffff'] * 8)
self._test_set_ipv6_dst(ipv6)
def test_set_ipv6_dst_min(self):
ipv6 = ':'.join(['0'] * 8)
self._test_set_ipv6_dst(ipv6)
def test_set_ipv6_dst_mask_mid(self):
ipv6 = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
mask = ':'.join(['ffff'] * 4 + ['0'] * 4)
self._test_set_ipv6_dst(ipv6, mask)
def test_set_ipv6_dst_mask_max(self):
ipv6 = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
mask = ':'.join(['ffff'] * 8)
self._test_set_ipv6_dst(ipv6, mask)
def test_set_ipv6_dst_mask_min(self):
ipv6 = 'e9e8:9ea5:7d67:82cc:ca54:1fc0:2d24:f038'
mask = ':'.join(['0'] * 8)
self._test_set_ipv6_dst(ipv6, mask)
# set_ipv6_flabel
def _test_set_ipv6_flabel(self, flabel, mask=None):
header = ofproto.OXM_OF_IPV6_FLABEL
match = OFPMatch()
if mask is None:
match.set_ipv6_flabel(flabel)
else:
header = ofproto.OXM_OF_IPV6_FLABEL_W
match.set_ipv6_flabel_masked(flabel, mask)
self._test_serialize_and_parser(match, header, flabel, mask)
def test_set_ipv6_flabel_mid(self):
self._test_set_ipv6_flabel(0xc5384)
def test_set_ipv6_flabel_max(self):
self._test_set_ipv6_flabel(0xfffff)
def test_set_ipv6_flabel_min(self):
self._test_set_ipv6_flabel(0)
def test_set_ipv6_flabel_masked_mid(self):
self._test_set_ipv6_flabel(0xc5384, 0xfff00)
def test_set_ipv6_flabel_masked_max(self):
self._test_set_ipv6_flabel(0xc5384, 0xfffff)
def test_set_ipv6_flabel_masked_min(self):
self._test_set_ipv6_flabel(0xc5384, 0)
# set_icmpv6_type
def _test_set_icmpv6_type(self, icmpv6_type):
header = ofproto.OXM_OF_ICMPV6_TYPE
match = OFPMatch()
match.set_icmpv6_type(icmpv6_type)
self._test_serialize_and_parser(match, header, icmpv6_type)
def test_set_icmpv6_type_mid(self):
self._test_set_icmpv6_type(129)
def test_set_icmpv6_type_max(self):
self._test_set_icmpv6_type(0xff)
def test_set_icmpv6_type_min(self):
self._test_set_icmpv6_type(0)
# set_icmpv6_code
def _test_set_icmpv6_code(self, icmpv6_code):
header = ofproto.OXM_OF_ICMPV6_CODE
match = OFPMatch()
match.set_icmpv6_code(icmpv6_code)
self._test_serialize_and_parser(match, header, icmpv6_code)
def test_set_icmpv6_code_mid(self):
self._test_set_icmpv6_code(1)
def test_set_icmpv6_code_max(self):
self._test_set_icmpv6_code(0xff)
def test_set_icmpv6_code_min(self):
self._test_set_icmpv6_code(0)
# set_ipv6_nd_target
def _test_set_ipv6_nd_target(self, ipv6):
header = ofproto.OXM_OF_IPV6_ND_TARGET
match = OFPMatch()
ipv6 = [int(x, 16) for x in ipv6.split(":")]
match.set_ipv6_nd_target(ipv6)
self._test_serialize_and_parser(match, header, ipv6)
def test_set_ipv6_nd_target_mid(self):
ip = '5420:db3f:921b:3e33:2791:98f:dd7f:2e19'
self._test_set_ipv6_nd_target(ip)
def test_set_ipv6_nd_target_max(self):
ip = ':'.join(['ffff'] * 8)
self._test_set_ipv6_nd_target(ip)
def test_set_ipv6_nd_target_min(self):
ip = ':'.join(['0'] * 8)
self._test_set_ipv6_nd_target(ip)
# set_ipv6_nd_sll
def _test_set_ipv6_nd_sll(self, nd_sll):
header = ofproto.OXM_OF_IPV6_ND_SLL
match = OFPMatch()
nd_sll = mac.haddr_to_bin(nd_sll)
match.set_ipv6_nd_sll(nd_sll)
self._test_serialize_and_parser(match, header, nd_sll)
def test_set_ipv6_nd_sll_mid(self):
self._test_set_ipv6_nd_sll('93:6d:d0:d4:e8:36')
def test_set_ipv6_nd_sll_max(self):
self._test_set_ipv6_nd_sll('ff:ff:ff:ff:ff:ff')
def test_set_ipv6_nd_sll_min(self):
self._test_set_ipv6_nd_sll('00:00:00:00:00:00')
# set_ipv6_nd_tll
def _test_set_ipv6_nd_tll(self, nd_tll):
header = ofproto.OXM_OF_IPV6_ND_TLL
match = OFPMatch()
nd_tll = mac.haddr_to_bin(nd_tll)
match.set_ipv6_nd_tll(nd_tll)
self._test_serialize_and_parser(match, header, nd_tll)
def test_set_ipv6_nd_tll_mid(self):
self._test_set_ipv6_nd_tll('18:f6:66:b6:f1:b3')
def test_set_ipv6_nd_tll_max(self):
self._test_set_ipv6_nd_tll('ff:ff:ff:ff:ff:ff')
def test_set_ipv6_nd_tll_min(self):
self._test_set_ipv6_nd_tll('00:00:00:00:00:00')
# set_mpls_label
def _test_set_mpls_label(self, mpls_label):
header = ofproto.OXM_OF_MPLS_LABEL
match = OFPMatch()
match.set_mpls_label(mpls_label)
self._test_serialize_and_parser(match, header, mpls_label)
def test_set_mpls_label_mid(self):
self._test_set_mpls_label(2144)
def test_set_mpls_label_max(self):
self._test_set_mpls_label(0xfffff)
def test_set_mpls_label_min(self):
self._test_set_mpls_label(0)
# set_mpls_tc
def _test_set_mpls_tc(self, mpls_tc):
header = ofproto.OXM_OF_MPLS_TC
match = OFPMatch()
match.set_mpls_tc(mpls_tc)
self._test_serialize_and_parser(match, header, mpls_tc)
def test_set_mpls_tc_mid(self):
self._test_set_mpls_tc(3)
def test_set_mpls_tc_max(self):
self._test_set_mpls_tc(7)
def test_set_mpls_tc_min(self):
self._test_set_mpls_tc(0)
class TestOFPMatchField(unittest.TestCase):
""" Test case for ofproto_v1_2_parser.OFPMatchField
"""
def test_init_hasmask_true(self):
header = 0x0100
res = OFPMatchField(header)
eq_(res.header, header)
eq_(res.n_bytes, (header & 0xff) // 2)
eq_(res.length, 0)
def test_init_hasmask_false(self):
header = 0x0000
res = OFPMatchField(header)
eq_(res.header, header)
eq_(res.n_bytes, header & 0xff)
eq_(res.length, 0)
|
signed/intellij-community
|
refs/heads/master
|
python/testData/intentions/PyConvertToFStringIntentionTest/formatMethodWrapExpressionsInParentheses.py
|
31
|
'{0.real}, {1.imag}, {2[0]}, {3}, {4.attr}'.format(-42, 42, [1] + [2], lambda: None, ~MyClass())
|
baconbum/SportsTicker
|
refs/heads/master
|
NHLScraper/NHLScoringPlayPlayer.py
|
1
|
#!/usr/bin/env python3
class NHLScoringPlayPlayer:
'Details of a player who took part in a scoring play'
def __init__(self, scoringPlayPlayerData):
self.fullName = scoringPlayPlayerData["player"]["fullName"]
self.playerType = scoringPlayPlayerData["playerType"]
if (self.playerType != "Goalie"):
self.seasonTotal = scoringPlayPlayerData["seasonTotal"]
|
ThiagoGarciaAlves/intellij-community
|
refs/heads/master
|
python/testData/inspections/AugmentAssignmentWithContext.py
|
83
|
class A:
x = 3
a = A()
<weak_warning descr="Assignment can be replaced with augmented assignment">a.x = a.x +<caret> 1</weak_warning>
|
ella/ella-newman
|
refs/heads/master
|
test_newman/test_statics.py
|
1
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from django.test.client import Client
from django.conf import settings
def test_static():
c = Client()
r = c.get('%s/img/nav-bg.gif' % settings.NEWMAN_MEDIA_PREFIX)
r.status_code == 200
|
jdahlin/stoq-wubi
|
refs/heads/master
|
src/pypack/altgraph/ObjectGraph.py
|
9
|
from itertools import imap
from altgraph.compat import *
from altgraph.Graph import Graph
from altgraph.GraphUtil import filter_stack
class ObjectGraph(object):
"""
A graph of objects that have a "graphident" attribute.
graphident is the key for the object in the graph
"""
def __init__(self, graph=None, debug=0):
if graph is None:
graph = Graph()
self.graphident = self
self.graph = graph
self.debug = debug
self.indent = 0
graph.add_node(self, None)
def __repr__(self):
return '<%s>' % (type(self).__name__,)
def flatten(self, condition=None, start=None):
"""
Iterate over the subgraph that is entirely reachable by condition
starting from the given start node or the ObjectGraph root
"""
if start is None:
start = self
start = self.getRawIdent(start)
return self.graph.iterdata(start=start, condition=condition)
def get_edges(self, node):
start = self.getRawIdent(node)
_, _, outraw, incraw = self.graph.describe_node(start)
def iter_edges(lst, n):
seen = set()
for tpl in imap(self.graph.describe_edge, lst):
ident = tpl[n]
if ident not in seen:
yield self.findNode(ident)
seen.add(ident)
return iter_edges(outraw, 3), iter_edges(incraw, 2)
def filterStack(self, filters):
"""
Filter the ObjectGraph in-place by removing all edges to nodes that
do not match every filter in the given filter list
Returns a tuple containing the number of:
(nodes_visited, nodes_removed, nodes_orphaned)
"""
visited, removes, orphans = filter_stack(self.graph, self, filters)
for last_good, tail in orphans:
self.graph.add_edge(last_good, tail, edge_data='orphan')
for node in removes:
self.graph.hide_node(node)
return len(visited)-1, len(removes), len(orphans)
def removeNode(self, node):
"""
Remove the given node from the graph if it exists
"""
ident = self.getIdent(node)
if ident is not None:
self.graph.hide_node(ident)
def removeReference(self, fromnode, tonode):
"""
Remove all edges from fromnode to tonode
"""
if fromnode is None:
fromnode = self
fromident = self.getIdent(fromnode)
toident = self.getIdent(tonode)
if fromident is not None and toident is not None:
while True:
edge = self.graph.edge_by_node(fromident, toident)
if edge is None:
break
self.graph.hide_edge(edge)
def getIdent(self, node):
"""
Get the graph identifier for a node
"""
ident = self.getRawIdent(node)
if ident is not None:
return ident
node = self.findNode(node)
if node is None:
return None
return node.graphident
def getRawIdent(self, node):
"""
Get the identifier for a node object
"""
if node is self:
return node
ident = getattr(node, 'graphident', None)
if ident is not None:
return ident
return ident
def findNode(self, node):
"""
Find the node on the graph
"""
ident = self.getRawIdent(node)
if ident is None:
ident = node
try:
return self.graph.node_data(ident)
except KeyError:
return None
def addNode(self, node):
"""
Add a node to the graph referenced by the root
"""
self.msg(4, "addNode", node)
self.graph.add_node(node.graphident, node)
def createReference(self, fromnode, tonode, edge_data=None):
"""
Create a reference from fromnode to tonode
"""
if fromnode is None:
fromnode = self
fromident, toident = self.getIdent(fromnode), self.getIdent(tonode)
if fromident is None or toident is None:
return
self.msg(4, "createReference", fromnode, tonode, edge_data)
self.graph.add_edge(fromident, toident, edge_data=edge_data)
def createNode(self, cls, name, *args, **kw):
"""
Add a node of type cls to the graph if it does not already exist
by the given name
"""
m = self.findNode(name)
if m is None:
m = cls(name, *args, **kw)
self.addNode(m)
return m
def msg(self, level, s, *args):
"""
Print a debug message with the given level
"""
if s and level <= self.debug:
print "%s%s %s" % (" " * self.indent, s, ' '.join(map(repr, args)))
def msgin(self, level, s, *args):
"""
Print a debug message and indent
"""
if level <= self.debug:
self.msg(level, s, *args)
self.indent = self.indent + 1
def msgout(self, level, s, *args):
"""
Dedent and print a debug message
"""
if level <= self.debug:
self.indent = self.indent - 1
self.msg(level, s, *args)
|
yush1ga/pulsar
|
refs/heads/master
|
dashboard/django/stats/templatetags/stats_extras.py
|
11
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from django import template
from django.utils import formats
from django.contrib.humanize.templatetags.humanize import intcomma
register = template.Library()
KB = 1 << 10
MB = 1 << 20
GB = 1 << 30
TB = 1 << 40
PB = 1 << 50
def fmt(x):
return str(formats.number_format(round(x, 1), 1))
@register.filter(name='file_size_value')
def file_size_value(bytes_):
bytes_ = float(bytes_)
if bytes_ < KB: return str(bytes_)
elif bytes_ < MB: return fmt(bytes_ / KB)
elif bytes_ < GB: return fmt(bytes_ / MB)
elif bytes_ < TB: return fmt(bytes_ / GB)
elif bytes_ < PB: return fmt(bytes_ / TB)
else: return fmt(bytes_ / PB)
@register.filter(name='file_size_unit')
def file_size_unit(bytes_):
if bytes_ < KB: return 'bytes'
elif bytes_ < MB: return 'KB'
elif bytes_ < GB: return 'MB'
elif bytes_ < TB: return 'GB'
elif bytes_ < PB: return 'TB'
else: return 'PB'
@register.filter(name='mbps')
def mbps(bytes_per_seconds):
if not bytes_per_seconds: return 0.0
else: return float(bytes_per_seconds) * 8 / 1024 / 1024
@register.filter(name='safe_intcomma')
def safe_intcomma(n):
if not n: return 0
else: return intcomma(n)
|
sorenk/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/junos/junos_logging.py
|
3
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: junos_logging
version_added: "2.4"
author: "Ganesh Nalawade (@ganeshrn)"
short_description: Manage logging on network devices
description:
- This module provides declarative management of logging
on Juniper JUNOS devices.
options:
dest:
description:
- Destination of the logs.
choices: ['console', 'host', 'file', 'user']
name:
description:
- If value of C(dest) is I(file) it indicates file-name,
for I(user) it indicates username and for I(host) indicates
the host name to be notified.
facility:
description:
- Set logging facility.
level:
description:
- Set logging severity levels.
aggregate:
description: List of logging definitions.
state:
description:
- State of the logging configuration.
default: present
choices: ['present', 'absent']
active:
description:
- Specifies whether or not the configuration is active or deactivated
default: True
choices: [True, False]
rotate_frequency:
description:
- Rotate log frequency in minutes, this is applicable if value
of I(dest) is C(file). The acceptable value is in range of 1 to 59.
This controls the frequency after which log file is rotated.
required: false
size:
description:
- Size of the file in archive, this is applicable if value
of I(dest) is C(file). The acceptable value is in range from 65536 to
1073741824 bytes.
required: false
files:
description:
- Number of files to be archived, this is applicable if value
of I(dest) is C(file). The acceptable value is in range from 1 to 1000.
required: false
requirements:
- ncclient (>=v0.5.2)
notes:
- This module requires the netconf system service be enabled on
the remote device being managed.
- Tested against vSRX JUNOS version 15.1X49-D15.4, vqfx-10000 JUNOS Version 15.1X53-D60.4.
- Recommended connection is C(netconf). See L(the Junos OS Platform Options,../network/user_guide/platform_junos.html).
- This module also works with C(local) connections for legacy playbooks.
extends_documentation_fragment: junos
"""
EXAMPLES = """
- name: configure console logging
junos_logging:
dest: console
facility: any
level: critical
- name: remove console logging configuration
junos_logging:
dest: console
state: absent
- name: configure file logging
junos_logging:
dest: file
name: test
facility: pfe
level: error
- name: configure logging parameter
junos_logging:
files: 30
size: 65536
rotate_frequency: 10
- name: Configure file logging using aggregate
junos_logging:
dest: file
aggregate:
- name: test-1
facility: pfe
level: critical
- name: test-2
facility: kernel
level: emergency
active: True
- name: Delete file logging using aggregate
junos_logging:
aggregate:
- { dest: file, name: test-1, facility: pfe, level: critical }
- { dest: file, name: test-2, facility: kernel, level: emergency }
state: absent
"""
RETURN = """
diff.prepared:
description: Configuration difference before and after applying change.
returned: when configuration is changed and diff option is enabled.
type: string
sample: >
[edit system syslog]
+ [edit system syslog]
file interactive-commands { ... }
+ file test {
+ pfe critical;
+ }
"""
import collections
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.junos.junos import junos_argument_spec
from ansible.module_utils.network.junos.junos import load_config, map_params_to_obj, map_obj_to_ele, to_param_list
from ansible.module_utils.network.junos.junos import commit_configuration, discard_changes, locked_config
try:
from lxml.etree import tostring
except ImportError:
from xml.etree.ElementTree import tostring
USE_PERSISTENT_CONNECTION = True
def validate_files(value, module):
if value and not 1 <= value <= 1000:
module.fail_json(msg='files must be between 1 and 1000')
def validate_size(value, module):
if value and not 65536 <= value <= 1073741824:
module.fail_json(msg='size must be between 65536 and 1073741824')
def validate_rotate_frequency(value, module):
if value and not 1 <= value <= 59:
module.fail_json(msg='rotate_frequency must be between 1 and 59')
def validate_param_values(module, obj, param=None):
if not param:
param = module.params
for key in obj:
# validate the param value (if validator func exists)
validator = globals().get('validate_%s' % key)
if callable(validator):
validator(param.get(key), module)
def main():
""" main entry point for module execution
"""
element_spec = dict(
dest=dict(choices=['console', 'host', 'file', 'user']),
name=dict(),
facility=dict(),
level=dict(),
rotate_frequency=dict(type='int'),
size=dict(type='int'),
files=dict(type='int'),
src_addr=dict(),
state=dict(default='present', choices=['present', 'absent']),
active=dict(default=True, type='bool')
)
aggregate_spec = deepcopy(element_spec)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
)
argument_spec.update(element_spec)
argument_spec.update(junos_argument_spec)
required_if = [('dest', 'host', ['name', 'facility', 'level']),
('dest', 'file', ['name', 'facility', 'level']),
('dest', 'user', ['name', 'facility', 'level']),
('dest', 'console', ['facility', 'level'])]
module = AnsibleModule(argument_spec=argument_spec,
required_if=required_if,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
params = to_param_list(module)
requests = list()
for param in params:
# if key doesn't exist in the item, get it from module.params
for key in param:
if param.get(key) is None:
param[key] = module.params[key]
module._check_required_if(required_if, param)
item = param.copy()
dest = item.get('dest')
if dest == 'console' and item.get('name'):
module.fail_json(msg="%s and %s are mutually exclusive" % ('console', 'name'))
top = 'system/syslog'
is_facility_key = False
field_top = None
if dest:
if dest == 'console':
field_top = dest
is_facility_key = True
else:
field_top = dest + '/contents'
is_facility_key = False
param_to_xpath_map = collections.OrderedDict()
param_to_xpath_map.update([
('name', {'xpath': 'name', 'is_key': True, 'top': dest}),
('facility', {'xpath': 'name', 'is_key': is_facility_key, 'top': field_top}),
('size', {'xpath': 'size', 'leaf_only': True, 'is_key': True, 'top': 'archive'}),
('files', {'xpath': 'files', 'leaf_only': True, 'is_key': True, 'top': 'archive'}),
('rotate_frequency', {'xpath': 'log-rotate-frequency', 'leaf_only': True}),
])
if item.get('level'):
param_to_xpath_map['level'] = {'xpath': item.get('level'), 'tag_only': True, 'top': field_top}
validate_param_values(module, param_to_xpath_map, param=item)
want = map_params_to_obj(module, param_to_xpath_map, param=item)
requests.append(map_obj_to_ele(module, want, top, param=item))
diff = None
with locked_config(module):
for req in requests:
diff = load_config(module, tostring(req), warnings, action='merge')
commit = not module.check_mode
if diff:
if commit:
commit_configuration(module)
else:
discard_changes(module)
result['changed'] = True
if module._diff:
result['diff'] = {'prepared': diff}
module.exit_json(**result)
if __name__ == "__main__":
main()
|
yeephycho/codePieces
|
refs/heads/master
|
OPENCV_IMAGE_MANIPULATION/image-manipulation.py
|
1
|
#! /usr/bin/env python
import numpy as np
import cv2
image = cv2.imread("./cat.jpg")
def get_image_info(image):
print(image.shape)
print(image.dtype)
def image_reshape(image, shape):
image_reshape = image.reshape(shape)
return image_reshape
def WHC2CWH(image):
b = image[:, :, 0]
g = image[:, :, 1]
r = image[:, :, 2]
image_cwh = np.array([r, g, b])
return image_cwh
def bgr2rgb(image):
image_rgb = image[:, :, ::-1]
return image_rgb
|
samthor/intellij-community
|
refs/heads/master
|
plugins/hg4idea/testData/bin/mercurial/diffhelpers.py
|
96
|
# diffhelpers.py - pure Python implementation of diffhelpers.c
#
# Copyright 2009 Matt Mackall <mpm@selenic.com> and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
def addlines(fp, hunk, lena, lenb, a, b):
while True:
todoa = lena - len(a)
todob = lenb - len(b)
num = max(todoa, todob)
if num == 0:
break
for i in xrange(num):
s = fp.readline()
c = s[0]
if s == "\\ No newline at end of file\n":
fix_newline(hunk, a, b)
continue
if c == "\n":
# Some patches may be missing the control char
# on empty lines. Supply a leading space.
s = " \n"
hunk.append(s)
if c == "+":
b.append(s[1:])
elif c == "-":
a.append(s)
else:
b.append(s[1:])
a.append(s)
return 0
def fix_newline(hunk, a, b):
l = hunk[-1]
# tolerate CRLF in last line
if l.endswith('\r\n'):
hline = l[:-2]
else:
hline = l[:-1]
c = hline[0]
if c in " +":
b[-1] = hline[1:]
if c in " -":
a[-1] = hline
hunk[-1] = hline
return 0
def testhunk(a, b, bstart):
alen = len(a)
blen = len(b)
if alen > blen - bstart:
return -1
for i in xrange(alen):
if a[i][1:] != b[i + bstart]:
return -1
return 0
|
moijes12/oh-mainline
|
refs/heads/master
|
vendor/packages/gdata/src/gdata/marketplace/data.py
|
51
|
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data model for parsing and generating XML for the Google Apps Marketplace Licensing API."""
__author__ = 'Alexandre Vivien <alex@simplecode.fr>'
import atom.core
import gdata
import gdata.data
LICENSES_NAMESPACE = 'http://www.w3.org/2005/Atom'
LICENSES_TEMPLATE = '{%s}%%s' % LICENSES_NAMESPACE
class Enabled(atom.core.XmlElement):
""" """
_qname = LICENSES_TEMPLATE % 'enabled'
class Id(atom.core.XmlElement):
""" """
_qname = LICENSES_TEMPLATE % 'id'
class CustomerId(atom.core.XmlElement):
""" """
_qname = LICENSES_TEMPLATE % 'customerid'
class DomainName(atom.core.XmlElement):
""" """
_qname = LICENSES_TEMPLATE % 'domainname'
class InstallerEmail(atom.core.XmlElement):
""" """
_qname = LICENSES_TEMPLATE % 'installeremail'
class TosAcceptanceTime(atom.core.XmlElement):
""" """
_qname = LICENSES_TEMPLATE % 'tosacceptancetime'
class LastChangeTime(atom.core.XmlElement):
""" """
_qname = LICENSES_TEMPLATE % 'lastchangetime'
class ProductConfigId(atom.core.XmlElement):
""" """
_qname = LICENSES_TEMPLATE % 'productconfigid'
class State(atom.core.XmlElement):
""" """
_qname = LICENSES_TEMPLATE % 'state'
class Entity(atom.core.XmlElement):
""" The entity representing the License. """
_qname = LICENSES_TEMPLATE % 'entity'
enabled = Enabled
id = Id
customer_id = CustomerId
domain_name = DomainName
installer_email = InstallerEmail
tos_acceptance_time = TosAcceptanceTime
last_change_time = LastChangeTime
product_config_id = ProductConfigId
state = State
class Content(atom.data.Content):
entity = Entity
class LicenseEntry(gdata.data.GDEntry):
""" Represents a LicenseEntry object. """
content = Content
class LicenseFeed(gdata.data.GDFeed):
""" Represents a feed of LicenseEntry objects. """
# Override entry so that this feed knows how to type its list of entries.
entry = [LicenseEntry]
|
open-synergy/purchase-workflow
|
refs/heads/8.0
|
purchase_group_hooks/purchase_group_hooks.py
|
21
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Leonardo Pistone
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv.orm import Model
from openerp import netsvc
from openerp.osv.orm import browse_record, browse_null
class PurchaseOrder(Model):
_inherit = 'purchase.order'
def _key_fields_for_grouping(self):
"""Return a list of fields used to identify orders that can be merged.
Orders that have this fields equal can be merged.
This function can be extended by other modules to modify the list.
"""
return ('partner_id', 'location_id', 'pricelist_id')
def _key_fields_for_grouping_lines(self):
"""Return a list of fields used to identify order lines that can be
merged.
Lines that have this fields equal can be merged.
This function can be extended by other modules to modify the list.
"""
return ('name', 'date_planned', 'taxes_id', 'price_unit', 'product_id',
'move_dest_id', 'account_analytic_id')
def _make_key_for_grouping(self, order, fields):
"""From an order, return a tuple to be used as a key.
If two orders have the same key, they can be merged.
"""
key_list = []
for field in fields:
field_value = getattr(order, field)
if isinstance(field_value, browse_record):
field_value = field_value.id
elif isinstance(field_value, browse_null):
field_value = False
elif isinstance(field_value, list):
field_value = ((6, 0, tuple([v.id for v in field_value])),)
key_list.append((field, field_value))
key_list.sort()
return tuple(key_list)
def _can_merge(self, order):
"""Can the order be considered for merging with others?
This method can be surcharged in other modules.
"""
return order.state == 'draft'
def _initial_merged_order_data(self, order):
"""Build the initial values of a merged order."""
return {
'origin': order.origin,
'date_order': order.date_order,
'partner_id': order.partner_id.id,
'dest_address_id': order.dest_address_id.id,
'warehouse_id': order.warehouse_id.id,
'location_id': order.location_id.id,
'pricelist_id': order.pricelist_id.id,
'state': 'draft',
'order_line': {},
'notes': '%s' % (order.notes or '',),
'fiscal_position': (
order.fiscal_position and order.fiscal_position.id or False
),
}
def _update_merged_order_data(self, merged_data, order):
if order.date_order < merged_data['date_order']:
merged_data['date_order'] = order.date_order
if order.notes:
merged_data['notes'] = (
(merged_data['notes'] or '') + ('\n%s' % (order.notes,))
)
if order.origin:
if (
order.origin not in merged_data['origin'] and
merged_data['origin'] not in order.origin
):
merged_data['origin'] = (
(merged_data['origin'] or '') + ' ' + order.origin
)
return merged_data
def _group_orders(self, input_orders):
"""Return a dictionary where each element is in the form:
tuple_key: (dict_of_new_order_data, list_of_old_order_ids)
"""
key_fields = self._key_fields_for_grouping()
grouped_orders = {}
if len(input_orders) < 2:
return {}
for input_order in input_orders:
key = self._make_key_for_grouping(input_order, key_fields)
if key in grouped_orders:
grouped_orders[key] = (
self._update_merged_order_data(
grouped_orders[key][0],
input_order
),
grouped_orders[key][1] + [input_order.id]
)
else:
grouped_orders[key] = (
self._initial_merged_order_data(input_order),
[input_order.id]
)
grouped_order_data = grouped_orders[key][0]
for input_line in input_order.order_line:
line_key = self._make_key_for_grouping(
input_line,
self._key_fields_for_grouping_lines()
)
o_line = grouped_order_data['order_line'].setdefault(
line_key, {}
)
if o_line:
# merge the line with an existing line
o_line['product_qty'] += (
input_line.product_qty *
input_line.product_uom.factor /
o_line['uom_factor']
)
else:
# append a new "standalone" line
for field in ('product_qty', 'product_uom'):
field_val = getattr(input_line, field)
if isinstance(field_val, browse_record):
field_val = field_val.id
o_line[field] = field_val
o_line['uom_factor'] = (
input_line.product_uom.factor
if input_line.product_uom
else 1.0)
return self._cleanup_merged_line_data(grouped_orders)
def _cleanup_merged_line_data(self, grouped_orders):
"""Remove keys from merged lines, and merges of 1 order."""
result = {}
for order_key, (order_data, old_ids) in grouped_orders.iteritems():
if len(old_ids) > 1:
for key, value in order_data['order_line'].iteritems():
del value['uom_factor']
value.update(dict(key))
order_data['order_line'] = [
(0, 0, value)
for value in order_data['order_line'].itervalues()
]
result[order_key] = (order_data, old_ids)
return result
def _create_new_orders(self, cr, uid, grouped_orders, context=None):
"""Create the new merged orders in the database.
Return a dictionary that puts the created order ids in relation to the
original ones, in the form
new_order_id: [old_order_1_id, old_order_2_id]
"""
new_old_rel = {}
for key in grouped_orders:
new_order_data, old_order_ids = grouped_orders[key]
new_id = self.create(cr, uid, new_order_data, context=context)
new_old_rel[new_id] = old_order_ids
return new_old_rel
def _fix_workflow(self, cr, uid, new_old_rel):
"""Fix the workflow of the old and new orders.
Specifically, cancel the old ones and assign workflows to the new ones.
"""
wf_service = netsvc.LocalService("workflow")
for new_order_id in new_old_rel:
old_order_ids = new_old_rel[new_order_id]
for old_id in old_order_ids:
wf_service.trg_redirect(uid, 'purchase.order', old_id,
new_order_id, cr)
wf_service.trg_validate(uid, 'purchase.order', old_id,
'purchase_cancel', cr)
def do_merge(self, cr, uid, input_order_ids, context=None):
"""Merge Purchase Orders.
This method replaces the original one in the purchase module because
it did not provide any hooks for customization.
Receive a list of order ids, and return a dictionary where each
element is in the form:
new_order_id: [old_order_1_id, old_order_2_id]
New orders are created, and old orders are deleted.
"""
input_orders = self.browse(cr, uid, input_order_ids, context=context)
mergeable_orders = filter(self._can_merge, input_orders)
grouped_orders = self._group_orders(mergeable_orders)
new_old_rel = self._create_new_orders(cr, uid, grouped_orders,
context=context)
self._fix_workflow(cr, uid, new_old_rel)
return new_old_rel
|
djkonro/client-python
|
refs/heads/master
|
kubernetes/test/test_v1_config_map.py
|
2
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_config_map import V1ConfigMap
class TestV1ConfigMap(unittest.TestCase):
""" V1ConfigMap unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1ConfigMap(self):
"""
Test V1ConfigMap
"""
model = kubernetes.client.models.v1_config_map.V1ConfigMap()
if __name__ == '__main__':
unittest.main()
|
ojengwa/odoo
|
refs/heads/8.0
|
addons/hr_timesheet/wizard/__init__.py
|
381
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_timesheet_sign_in_out
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
DanForever/TimeSync
|
refs/heads/master
|
GAE/lib/requests/packages/chardet/big5prober.py
|
2930
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import Big5DistributionAnalysis
from .mbcssm import Big5SMModel
class Big5Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(Big5SMModel)
self._mDistributionAnalyzer = Big5DistributionAnalysis()
self.reset()
def get_charset_name(self):
return "Big5"
|
mdavid/cherokee-webserver-svnclone
|
refs/heads/master
|
admin/PageRule.py
|
1
|
# -*- coding: utf-8 -*-
#
# Cherokee-admin
#
# Authors:
# Alvaro Lopez Ortega <alvaro@alobbs.com>
#
# Copyright (C) 2001-2010 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import CTK
import Page
import Cherokee
import SelectionPanel
import validations
import Wizard
from Rule import Rule
from CTK.Tab import HEADER as Tab_HEADER
from CTK.Submitter import HEADER as Submit_HEADER
from CTK.TextField import HEADER as TextField_HEADER
from CTK.SortableList import HEADER as SortableList_HEADER
from util import *
from consts import *
from CTK.util import *
from CTK.consts import *
from configured import *
URL_BASE = '/vserver/%s/rule'
URL_APPLY = '/vserver/%s/rule/apply'
URL_NEW_MANUAL = '/vserver/%s/rule/new/manual'
URL_NEW_MANUAL_R = r'/vserver/(\d+)/rule/new/manual'
URL_BASE_R = r'^/vserver/(\d+)/rule$'
URL_APPLY_R = r'^/vserver/(\d+)/rule/apply$'
URL_PARTICULAR_R = r'^/vserver/(\d+)/rule/\d+$'
NOTE_DELETE_DIALOG = N_('<p>You are about to delete the <b>%s</b> behavior rule.</p><p>Are you sure you want to proceed?</p>')
NOTE_CLONE_DIALOG = N_('You are about to clone a Behavior Rule. Would you like to proceed?')
HELPS = []
VALIDATIONS = []
JS_ACTIVATE_LAST = """
$('.selection-panel:first').data('selectionpanel').select_last();
"""
JS_CLONE = """
var panel = $('.selection-panel:first').data('selectionpanel').get_selected();
var url = panel.find('.row_content').attr('url');
$.ajax ({type: 'GET', async: false, url: url+'/clone', success: function(data) {
$('.panel-buttons').trigger ('submit_success');
}});
"""
JS_PARTICULAR = """
var vserver = window.location.pathname.match (/^\/vserver\/(\d+)/)[1];
var rule = window.location.pathname.match (/^\/vserver\/\d+\/rule\/(\d+)/)[1];
$.cookie ('%(cookie_name)s', rule+'_'+vserver, { path: '/vserver/'+ vserver + '/rule'});
window.location.replace ('/vserver/'+ vserver + '/rule');
"""
def Commit():
# Modifications
return CTK.cfg_apply_post()
def reorder (arg):
# Process new list
order = CTK.post.pop(arg)
tmp = order.split(',')
vsrv = tmp[0].split('_')[1]
tmp = [x.split('_')[0] for x in tmp]
tmp.reverse()
# Build and alternative tree
num = 100
for r in tmp:
CTK.cfg.clone ('vserver!%s!rule!%s'%(vsrv, r), 'tmp!vserver!%s!rule!%d'%(vsrv, num))
num += 100
# Set the new list in place
del (CTK.cfg['vserver!%s!rule'%(vsrv)])
CTK.cfg.rename ('tmp!vserver!%s!rule'%(vsrv), 'vserver!%s!rule'%(vsrv))
return CTK.cfg_reply_ajax_ok()
def NewManual():
# Figure Virtual Server number
vsrv_num = re.findall (URL_NEW_MANUAL_R, CTK.request.url)[0]
# Add New Rule: Content
rules = [('',_('Choose'))] + trans (RULES)
table = CTK.PropsTable()
modul = CTK.PluginSelector ('tmp', rules, vsrv_num=vsrv_num)
table.Add (_('Rule Type'), modul.selector_widget, '')
box = CTK.Box()
box += table
box += modul
return box.Render().toJSON()
class RuleNew (CTK.Container):
def __init__ (self, vsrv_num):
CTK.Container.__init__ (self)
# Build the panel list
right_box = CTK.Box({'class': 'rule_new_content'})
panel = SelectionPanel.SelectionPanel (None, right_box.id, URL_BASE%(vsrv_num), '', cookie_name='new_rule_selected')
self += panel
self += right_box
# Special 1st: Manual
content = [CTK.Box({'class': 'title'}, CTK.RawHTML(_('Manual'))),
CTK.Box({'class': 'description'}, CTK.RawHTML(_('Manual configuration')))]
panel.Add ('manual', URL_NEW_MANUAL%(vsrv_num), content, draggable=False)
# Wizard Categories
for cat in Wizard.Categories (Wizard.TYPE_RULE):
url_pre = '%s/%s' %(Wizard.URL_CAT_LIST_RULE, cat['name'])
title, descr = cat['title'], cat['descr']
content = [CTK.Box({'class': 'title'}, CTK.RawHTML(_(title))),
CTK.Box({'class': 'description'}, CTK.RawHTML(_(descr)))]
panel.Add (cat['name'], url_pre, content, draggable=False)
class Render:
class PanelList (CTK.Container):
def __init__ (self, refresh, right_box, vsrv_num):
CTK.Container.__init__ (self)
url_base = '/vserver/%s/rule' %(vsrv_num)
url_apply = URL_APPLY %(vsrv_num)
# Build the panel list
panel = SelectionPanel.SelectionPanel (reorder, right_box.id, url_base, '', container='rules_panel')
self += panel
# Build the Rule list
rules = CTK.cfg.keys('vserver!%s!rule'%(vsrv_num))
rules.sort (lambda x,y: cmp(int(x), int(y)))
rules.reverse()
for r in rules:
rule = Rule ('vserver!%s!rule!%s!match'%(vsrv_num, r))
rule_name = rule.GetName()
rule_name_esc = CTK.escape_html (rule_name)
# Comment
comment = []
handler = CTK.cfg.get_val ('vserver!%s!rule!%s!handler' %(vsrv_num, r))
if handler:
desc = filter (lambda x: x[0] == handler, HANDLERS)[0][1]
comment.append (_(desc))
auth = CTK.cfg.get_val ('vserver!%s!rule!%s!auth' %(vsrv_num, r))
if auth:
desc = filter (lambda x: x[0] == auth, VALIDATORS)[0][1]
comment.append (_(desc))
for e in CTK.cfg.keys ('vserver!%s!rule!%s!encoder'%(vsrv_num, r)):
val = CTK.cfg.get_val ('vserver!%s!rule!%s!encoder!%s'%(vsrv_num, r, e))
if val == 'allow':
comment.append (e)
elif val == 'forbid':
comment.append ("no %s"%(e))
if CTK.cfg.get_val ('vserver!%s!rule!%s!timeout' %(vsrv_num, r)):
comment.append ('timeout')
if CTK.cfg.get_val ('vserver!%s!rule!%s!rate' %(vsrv_num, r)):
comment.append ('traffic')
if int (CTK.cfg.get_val ('vserver!%s!rule!%s!no_log'%(vsrv_num, r), "0")) > 0:
comment.append ('no log')
# List entry
row_id = '%s_%s' %(r, vsrv_num)
if r == rules[-1]:
content = [CTK.Box ({'class': 'name'}, CTK.RawHTML (rule_name_esc)),
CTK.Box ({'class': 'comment'}, CTK.RawHTML (', '.join(comment)))]
panel.Add (row_id, '/vserver/%s/rule/content/%s'%(vsrv_num, r), content, draggable=False)
else:
# Remove
dialog = CTK.Dialog ({'title': _('Do you really want to remove it?'), 'width': 480})
dialog.AddButton (_('Remove'), CTK.JS.Ajax (url_apply, async=False,
data = {'vserver!%s!rule!%s'%(vsrv_num, r):''},
success = dialog.JS_to_close() + \
refresh.JS_to_refresh()))
dialog.AddButton (_('Cancel'), "close")
dialog += CTK.RawHTML (_(NOTE_DELETE_DIALOG) %(rule_name_esc))
self += dialog
remove = CTK.ImageStock('del')
remove.bind ('click', dialog.JS_to_show() + "return false;")
# Disable
is_disabled = bool (int (CTK.cfg.get_val('vserver!%s!rule!%s!disabled'%(vsrv_num,r), "0")))
disclass = ('','rule-inactive')[is_disabled][:]
disabled = CTK.ToggleButtonOnOff (not is_disabled)
disabled.bind ('changed',
CTK.JS.Ajax (url_apply, async=True,
data = '{"vserver!%s!rule!%s!disabled": event.value}'%(vsrv_num,r)))
disabled.bind ('changed',
"$(this).parents('.row_content').toggleClass('rule-inactive');")
# Final
is_final = bool (int (CTK.cfg.get_val('vserver!%s!rule!%s!match!final'%(vsrv_num,r), "1")))
final = CTK.ToggleButtonImages (CTK.Box({'class': 'final-on'}, CTK.RawHTML(_('Final'))),
CTK.Box({'class': 'final-off'}, CTK.RawHTML(_('Non Final'))),
is_final)
final.bind ('changed',
CTK.JS.Ajax (url_apply, async=True,
data = '{"vserver!%s!rule!%s!match!final": parseInt(event.value)?"0":"1"}'%(vsrv_num,r)))
# Actions
group = CTK.Box ({'class': 'sel-actions'}, [disabled, remove])
content = [group]
content += [CTK.Box ({'class': 'name'}, CTK.RawHTML (rule_name_esc)),
CTK.Box ({'class': 'final'}, final),
CTK.Box ({'class': 'comment'}, CTK.RawHTML (', '.join(comment)))]
# Add the list entry
panel.Add (row_id, '/vserver/%s/rule/content/%s'%(vsrv_num, r), content, True, disclass)
class PanelButtons (CTK.Box):
def __init__ (self, vsrv_num):
CTK.Box.__init__ (self, {'class': 'panel-buttons'})
# Add New
dialog = CTK.Dialog ({'title': _('Add Behavior Rule'), 'width': 720})
dialog.id = 'dialog-new-rule'
dialog.AddButton (_('Add'), dialog.JS_to_trigger('submit'))
dialog.AddButton (_('Cancel'), "close")
dialog += RuleNew (vsrv_num)
druid = CTK.Druid (CTK.RefreshableURL())
wizard = CTK.Dialog ({'title': _('Configuration Assistant'), 'width': 550})
wizard += druid
druid.bind ('druid_exiting',
wizard.JS_to_close() +
self.JS_to_trigger('submit_success'))
button = CTK.Button(_('New'), {'id': 'rule-new-button', 'class': 'panel-button', 'title': _('Add Behavior Rule')})
button.bind ('click', dialog.JS_to_show())
dialog.bind ('submit_success', dialog.JS_to_close())
dialog.bind ('submit_success', self.JS_to_trigger('submit_success'));
dialog.bind ('open_wizard',
dialog.JS_to_close() +
druid.JS_to_goto("'/wizard/vserver/%s/' + event.wizard" %(vsrv_num)) +
wizard.JS_to_show())
self += button
self += dialog
self += wizard
# Clone
dialog = CTK.Dialog ({'title': _('Clone Behavior Rule'), 'width': 480})
dialog.AddButton (_('Clone'), JS_CLONE + dialog.JS_to_close())
dialog.AddButton (_('Cancel'), "close")
dialog += CTK.RawHTML ('<p>%s</p>' %(_(NOTE_CLONE_DIALOG)))
button = CTK.Button(_('Clone'), {'id': 'rule-clone-button', 'class': 'panel-button', 'title': _('Clone Selected Behavior Rule')})
button.bind ('click', dialog.JS_to_show())
self += dialog
self += button
def __call__ (self):
title = _('Behavior')
vsrv_num = re.findall (URL_BASE_R, CTK.request.url)[0]
# Ensure the VServer exists
if not CTK.cfg.keys('vserver!%s'%(vsrv_num)):
return CTK.HTTP_Redir ('/vserver')
# Content
left = CTK.Box({'class': 'panel'})
left += CTK.RawHTML('<h2>%s</h2>'%(title))
# Virtual Server List
refresh = CTK.Refreshable ({'id': 'rules_panel'})
refresh.register (lambda: self.PanelList(refresh, right, vsrv_num).Render())
# Refresh on 'New' or 'Clone'
buttons = self.PanelButtons (vsrv_num)
buttons.bind ('submit_success', refresh.JS_to_refresh (on_success=JS_ACTIVATE_LAST))
left += buttons
left += CTK.Box({'class': 'filterbox'}, CTK.TextField({'class':'filter', 'optional_string': _('Rule Filtering'), 'optional': True}))
right = CTK.Box({'class': 'rules_content'})
left += refresh
# Refresh the list whenever the content change
right.bind ('changed', refresh.JS_to_refresh());
right.bind ('submit_success', refresh.JS_to_refresh());
# Refresh the list when it's been reordered
left.bind ('reordered', refresh.JS_to_refresh())
# Build the page
headers = Tab_HEADER + Submit_HEADER + TextField_HEADER + SortableList_HEADER
page = Page.Base(title, body_id='rules', helps=HELPS, headers=headers)
page += left
page += right
return page.Render()
class RenderParticular:
def __call__ (self):
headers = ['<script type="text/javascript" src="/CTK/js/jquery-1.3.2.min.js"></script>',
'<script type="text/javascript" src="/CTK/js/jquery.cookie.js"></script>']
page = CTK.PageEmpty (headers=headers)
props = {'cookie_name': SelectionPanel.COOKIE_NAME_DEFAULT}
page += CTK.RawHTML (HTML_JS_BLOCK %(JS_PARTICULAR %(props)))
return page.Render()
CTK.publish (URL_BASE_R, Render)
CTK.publish (URL_PARTICULAR_R, RenderParticular)
CTK.publish (URL_APPLY_R, Commit, method="POST", validation=VALIDATIONS)
CTK.publish (URL_NEW_MANUAL_R, NewManual)
|
Just-D/chromium-1
|
refs/heads/master
|
tools/cygprofile/symbol_extractor_unittest.py
|
23
|
#!/usr/bin/python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import symbol_extractor
import unittest
class TestSymbolInfo(unittest.TestCase):
def testIgnoresBlankLine(self):
symbol_info = symbol_extractor._FromObjdumpLine('')
self.assertIsNone(symbol_info)
def testIgnoresMalformedLine(self):
# This line is too short.
line = ('00c1b228 F .text 00000060 _ZN20trace_event')
symbol_info = symbol_extractor._FromObjdumpLine(line)
self.assertIsNone(symbol_info)
# This line has the wrong marker.
line = '00c1b228 l f .text 00000060 _ZN20trace_event'
symbol_info = symbol_extractor._FromObjdumpLine(line)
self.assertIsNone(symbol_info)
def testAssertionErrorOnInvalidLines(self):
# This line has an invalid scope.
line = ('00c1b228 z F .text 00000060 _ZN20trace_event')
self.assertRaises(AssertionError, symbol_extractor._FromObjdumpLine, line)
# This line has too many fields.
line = ('00c1b228 l F .text 00000060 _ZN20trace_event too many')
self.assertRaises(AssertionError, symbol_extractor._FromObjdumpLine, line)
# This line has invalid characters in the symbol.
line = ('00c1b228 l F .text 00000060 _ZN20trace_$bad')
self.assertRaises(AssertionError, symbol_extractor._FromObjdumpLine, line)
def testSymbolInfo(self):
line = ('00c1c05c l F .text 0000002c '
'_GLOBAL__sub_I_chrome_main_delegate.cc')
test_name = '_GLOBAL__sub_I_chrome_main_delegate.cc'
test_offset = 0x00c1c05c
test_size = 0x2c
test_section = '.text'
symbol_info = symbol_extractor._FromObjdumpLine(line)
self.assertIsNotNone(symbol_info)
self.assertEquals(test_offset, symbol_info.offset)
self.assertEquals(test_size, symbol_info.size)
self.assertEquals(test_name, symbol_info.name)
self.assertEquals(test_section, symbol_info.section)
def testHiddenSymbol(self):
line = ('00c1c05c l F .text 0000002c '
'.hidden _GLOBAL__sub_I_chrome_main_delegate.cc')
test_name = '_GLOBAL__sub_I_chrome_main_delegate.cc'
test_offset = 0x00c1c05c
test_size = 0x2c
test_section = '.text'
symbol_info = symbol_extractor._FromObjdumpLine(line)
self.assertIsNotNone(symbol_info)
self.assertEquals(test_offset, symbol_info.offset)
self.assertEquals(test_size, symbol_info.size)
self.assertEquals(test_name, symbol_info.name)
self.assertEquals(test_section, symbol_info.section)
class TestSymbolInfosFromStream(unittest.TestCase):
def testSymbolInfosFromStream(self):
lines = ['Garbage',
'',
'00c1c05c l F .text 0000002c first',
''
'more garbage',
'00155 g F .text 00000012 second']
symbol_infos = symbol_extractor._SymbolInfosFromStream(lines)
self.assertEquals(len(symbol_infos), 2)
first = symbol_extractor.SymbolInfo('first', 0x00c1c05c, 0x2c, '.text')
self.assertEquals(first, symbol_infos[0])
second = symbol_extractor.SymbolInfo('second', 0x00155, 0x12, '.text')
self.assertEquals(second, symbol_infos[1])
class TestSymbolInfoMappings(unittest.TestCase):
def setUp(self):
self.symbol_infos = [
symbol_extractor.SymbolInfo('firstNameAtOffset', 0x42, 42, '.text'),
symbol_extractor.SymbolInfo('secondNameAtOffset', 0x42, 42, '.text'),
symbol_extractor.SymbolInfo('thirdSymbol', 0x64, 20, '.text')]
def testGroupSymbolInfosByOffset(self):
offset_to_symbol_info = symbol_extractor.GroupSymbolInfosByOffset(
self.symbol_infos)
self.assertEquals(len(offset_to_symbol_info), 2)
self.assertIn(0x42, offset_to_symbol_info)
self.assertEquals(offset_to_symbol_info[0x42][0], self.symbol_infos[0])
self.assertEquals(offset_to_symbol_info[0x42][1], self.symbol_infos[1])
self.assertIn(0x64, offset_to_symbol_info)
self.assertEquals(offset_to_symbol_info[0x64][0], self.symbol_infos[2])
def testCreateNameToSymbolInfo(self):
name_to_symbol_info = symbol_extractor.CreateNameToSymbolInfo(
self.symbol_infos)
self.assertEquals(len(name_to_symbol_info), 3)
for i in range(3):
name = self.symbol_infos[i].name
self.assertIn(name, name_to_symbol_info)
self.assertEquals(self.symbol_infos[i], name_to_symbol_info[name])
def testSymbolCollisions(self):
symbol_infos_with_collision = list(self.symbol_infos)
symbol_infos_with_collision.append(symbol_extractor.SymbolInfo(
'secondNameAtOffset', 0x84, 42, '.text'))
# The symbol added above should not affect the output.
name_to_symbol_info = symbol_extractor.CreateNameToSymbolInfo(
self.symbol_infos)
self.assertEquals(len(name_to_symbol_info), 3)
for i in range(3):
name = self.symbol_infos[i].name
self.assertIn(name, name_to_symbol_info)
self.assertEquals(self.symbol_infos[i], name_to_symbol_info[name])
if __name__ == '__main__':
unittest.main()
|
faizankshaikh/Project
|
refs/heads/master
|
trials/icdar_trial4.py
|
1
|
import os
import random
import pylab
import time
import csv
import enchant
import pandas as pd
import numpy as np
import cPickle as pkl
from lasagne import layers, updates
from scipy.misc import imread, imresize
from lasagne.nonlinearities import softmax
from nolearn.lasagne import NeuralNet, BatchIterator
from sklearn.feature_extraction.image import extract_patches
data_root = '/home/faizy/workspace/.project/project/datasets/'
model_root = '/home/faizy/workspace/.project/project/models/'
icdar_root = 'icdar15/'
test_root = 'Challenge2_Test_Task3_Images'
test_size = 1095
alphabet = 'abcdefghijklmnopqrstuvwxyz'
filename = 'sub01.txt'
# load models
f = open(model_root + 'detector_2.pkl', 'rb')
detector = pkl.load(f)
f.close()
f = open(model_root + 'recognizer.pkl', 'rb')
recognizer = pkl.load(f)
f.close()
d = enchant.Dict()
'''
# visualize dataset
i = random.randrange(1, test_size)
img = imread(os.path.join(data_root, icdar_root, test_root, ('word_' + str(i) + '.png')), flatten = True)
pylab.imshow(img)
pylab.gray()
pylab.show()
'''
def main():
pred = []
id_arr = []
start_time = time.time()
for i in range(1, test_size + 1):
img = imread(os.path.join(data_root, icdar_root, test_root, ('word_' + str(i) + '.png')), flatten = True)
image_height = img.shape[0]
image_width = img.shape[1]
id_arr.append(str(i) + '.png')
# check for smaller width image
if image_width > image_height:
patches = extract_patches(img, (image_height, image_height*0.60))
else:
patches = extract_patches(img, (image_height, image_width))
new_lst = []
for i in range(patches.shape[0]):
for j in range(patches.shape[1]):
new_lst.append(imresize(patches[i, j, :, :], (32, 32)))
new_list = np.stack(new_lst)
tester = new_list.reshape(patches.shape[0]*patches.shape[1], 1, 32, 32).astype('float32')
tester.shape
tester /= tester.std(axis = None)
tester -= tester.mean()
tester = tester.astype('float32')
preder = detector.predict_proba(tester)
heatmap = preder[:, 1].reshape((patches.shape[0], patches.shape[1]))
predict_signal = np.reshape(heatmap, patches.shape[1]*patches.shape[0])
x_1 = np.arange(patches.shape[1])
y_1 = np.zeros(patches.shape[1])
x_2 = np.arange(32, patches.shape[1] + 32)
y_2 = np.ones(patches.shape[1])
scores_ = predict_signal
boxes = np.stack((x_1, y_1, x_2, y_2, scores_)).T
def nms(dets, thresh):
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
order = scores.argsort()[::-1]
keep = []
while order.size > 0:
i = order[0]
keep.append(i)
xx1 = np.maximum(x1[i], x1[order[1:]])
yy1 = np.maximum(y1[i], y1[order[1:]])
xx2 = np.minimum(x2[i], x2[order[1:]])
yy2 = np.minimum(y2[i], y2[order[1:]])
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
ovr = inter / (areas[i] + areas[order[1:]] - inter)
inds = np.where(ovr <= thresh)[0]
order = order[inds + 1]
return np.sort(np.array(keep, dtype = int))
peakind = nms(boxes,thresh=0)
word = np.zeros((len(peakind), 1, 32, 32))
for idx, item in enumerate(peakind):
word[idx, ...] = tester[item, 0, :, :]
word = word.astype('float32')
predict = recognizer.predict(word)
def classer(arrayer):
classer_array = []
for i in range(len(arrayer)):
if (0 <= arrayer[i] < 10):
classer_array.append(arrayer[i])
elif (10 <= arrayer[i] < 36) :
classer_array.append(alphabet[arrayer[i] - 10].upper())
elif (36 <= arrayer[i] < 62):
classer_array.append(alphabet[arrayer[i] - 36])
else :
print 'Is the array correct!?'
return classer_array
real_pred = classer(predict)
real_pred = map(str, real_pred)
letter_stream = ''.join(real_pred)
print letter_stream
if image_width > image_height:
if d.suggest(letter_stream):
pred.append(d.suggest(letter_stream)[0])
else:
pred.append(letter_stream)
else:
pred.append(letter_stream)
with open('sub01.txt', 'w') as f:
for l1, l2 in zip(id_arr, pred):
f.write(l1 + ', ' + '"' + l2 + '"' + '\n')
print "time taken: ", time.time() - start_time
if __name__ == '__main__':
main()
|
hwjworld/xiaodun-platform
|
refs/heads/master
|
lms/envs/cms/__init__.py
|
12133432
| |
aperezalbela/scrapy_bvl
|
refs/heads/master
|
bvl/__init__.py
|
12133432
| |
nicholac/universe_abm
|
refs/heads/master
|
celestials/__init__.py
|
12133432
| |
meineerde/vncauthproxy
|
refs/heads/master
|
vncauthproxy/__init__.py
|
12133432
| |
FireWRT/OpenWrt-Firefly-Libraries
|
refs/heads/master
|
staging_dir/host/lib/python3.4/test/test_threadedtempfile.py
|
171
|
"""
Create and delete FILES_PER_THREAD temp files (via tempfile.TemporaryFile)
in each of NUM_THREADS threads, recording the number of successes and
failures. A failure is a bug in tempfile, and may be due to:
+ Trying to create more than one tempfile with the same name.
+ Trying to delete a tempfile that doesn't still exist.
+ Something we've never seen before.
By default, NUM_THREADS == 20 and FILES_PER_THREAD == 50. This is enough to
create about 150 failures per run under Win98SE in 2.0, and runs pretty
quickly. Guido reports needing to boost FILES_PER_THREAD to 500 before
provoking a 2.0 failure under Linux.
"""
NUM_THREADS = 20
FILES_PER_THREAD = 50
import tempfile
from test.support import threading_setup, threading_cleanup, run_unittest, import_module
threading = import_module('threading')
import unittest
import io
from traceback import print_exc
startEvent = threading.Event()
class TempFileGreedy(threading.Thread):
error_count = 0
ok_count = 0
def run(self):
self.errors = io.StringIO()
startEvent.wait()
for i in range(FILES_PER_THREAD):
try:
f = tempfile.TemporaryFile("w+b")
f.close()
except:
self.error_count += 1
print_exc(file=self.errors)
else:
self.ok_count += 1
class ThreadedTempFileTest(unittest.TestCase):
def test_main(self):
threads = []
thread_info = threading_setup()
for i in range(NUM_THREADS):
t = TempFileGreedy()
threads.append(t)
t.start()
startEvent.set()
ok = 0
errors = []
for t in threads:
t.join()
ok += t.ok_count
if t.error_count:
errors.append(str(t.name) + str(t.errors.getvalue()))
threading_cleanup(*thread_info)
msg = "Errors: errors %d ok %d\n%s" % (len(errors), ok,
'\n'.join(errors))
self.assertEqual(errors, [], msg)
self.assertEqual(ok, NUM_THREADS * FILES_PER_THREAD)
def test_main():
run_unittest(ThreadedTempFileTest)
if __name__ == "__main__":
test_main()
|
Lightmatter/django-inlineformfield
|
refs/heads/master
|
.tox/py27/lib/python2.7/site-packages/pyflakes/scripts/pyflakes.py
|
16
|
"""
Implementation of the command-line I{pyflakes} tool.
"""
from __future__ import absolute_import
# For backward compatibility
from pyflakes.api import check, checkPath, checkRecursive, iterSourceCode, main
|
ShownX/incubator-mxnet
|
refs/heads/master
|
example/mxnet_adversarial_vae/vaegan_mxnet.py
|
18
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''
Created on Jun 15, 2017
@author: shujon
'''
from __future__ import print_function
import mxnet as mx
import numpy as np
from sklearn.datasets import fetch_mldata
from matplotlib import pyplot as plt
import logging
import cv2
from datetime import datetime
from PIL import Image
import os
import argparse
from scipy.io import savemat
#from layer import GaussianSampleLayer
######################################################################
#An adversarial variational autoencoder implementation in mxnet
# following the implementation at https://github.com/JeremyCCHsu/tf-vaegan
# of paper `Larsen, Anders Boesen Lindbo, et al. "Autoencoding beyond pixels using a
# learned similarity metric." arXiv preprint arXiv:1512.09300 (2015).`
######################################################################
#constant operator in mxnet, not used in this code
@mx.init.register
class MyConstant(mx.init.Initializer):
def __init__(self, value):
super(MyConstant, self).__init__(value=value)
self.value = value
def _init_weight(self, _, arr):
arr[:] = mx.nd.array(self.value)
#######################################################################
#The encoder is a CNN which takes 32x32 image as input
# generates the 100 dimensional shape embedding as a sample from normal distribution
# using predicted meand and variance
#######################################################################
def encoder(nef, z_dim, batch_size, no_bias=True, fix_gamma=True, eps=1e-5 + 1e-12):
BatchNorm = mx.sym.BatchNorm
data = mx.sym.Variable('data')
#label = mx.sym.Variable('label')
e1 = mx.sym.Convolution(data, name='enc1', kernel=(5,5), stride=(2,2), pad=(2,2), num_filter=nef, no_bias=no_bias)
ebn1 = BatchNorm(e1, name='encbn1', fix_gamma=fix_gamma, eps=eps)
eact1 = mx.sym.LeakyReLU(ebn1, name='encact1', act_type='leaky', slope=0.2)
e2 = mx.sym.Convolution(eact1, name='enc2', kernel=(5,5), stride=(2,2), pad=(2,2), num_filter=nef*2, no_bias=no_bias)
ebn2 = BatchNorm(e2, name='encbn2', fix_gamma=fix_gamma, eps=eps)
eact2 = mx.sym.LeakyReLU(ebn2, name='encact2', act_type='leaky', slope=0.2)
e3 = mx.sym.Convolution(eact2, name='enc3', kernel=(5,5), stride=(2,2), pad=(2,2), num_filter=nef*4, no_bias=no_bias)
ebn3 = BatchNorm(e3, name='encbn3', fix_gamma=fix_gamma, eps=eps)
eact3 = mx.sym.LeakyReLU(ebn3, name='encact3', act_type='leaky', slope=0.2)
e4 = mx.sym.Convolution(eact3, name='enc4', kernel=(5,5), stride=(2,2), pad=(2,2), num_filter=nef*8, no_bias=no_bias)
ebn4 = BatchNorm(e4, name='encbn4', fix_gamma=fix_gamma, eps=eps)
eact4 = mx.sym.LeakyReLU(ebn4, name='encact4', act_type='leaky', slope=0.2)
eact4 = mx.sym.Flatten(eact4)
z_mu = mx.sym.FullyConnected(eact4, num_hidden=z_dim, name="enc_mu")
z_lv = mx.sym.FullyConnected(eact4, num_hidden=z_dim, name="enc_lv")
#eps = mx.symbol.random_normal(loc=0, scale=1, shape=(batch_size,z_dim) )
#std = mx.symbol.sqrt(mx.symbol.exp(z_lv))
#z = mx.symbol.elemwise_add(z_mu, mx.symbol.broadcast_mul(eps, std))
z = z_mu + mx.symbol.broadcast_mul(mx.symbol.exp(0.5*z_lv),mx.symbol.random_normal(loc=0, scale=1,shape=(batch_size,z_dim)))
return z_mu, z_lv, z
#######################################################################
#The genrator is a CNN which takes 100 dimensional embedding as input
# and reconstructs the input image given to the encoder
#######################################################################
def generator(ngf, nc, no_bias=True, fix_gamma=True, eps=1e-5 + 1e-12, z_dim=100, activation='sigmoid'):
BatchNorm = mx.sym.BatchNorm
rand = mx.sym.Variable('rand')
rand = mx.sym.Reshape(rand, shape=(-1, z_dim, 1, 1))
#g1 = mx.sym.FullyConnected(rand, name="g1", num_hidden=2*2*ngf*8, no_bias=True)
g1 = mx.sym.Deconvolution(rand, name='gen1', kernel=(5,5), stride=(2,2),target_shape=(2,2), num_filter=ngf*8, no_bias=no_bias)
gbn1 = BatchNorm(g1, name='genbn1', fix_gamma=fix_gamma, eps=eps)
gact1 = mx.sym.Activation(gbn1, name="genact1", act_type="relu")
# 4 x 4
#gact1 = mx.sym.Reshape(gact1, shape=(-1, ngf * 8, 2, 2))
#g1 = mx.sym.Deconvolution(g0, name='g1', kernel=(4,4), num_filter=ngf*8, no_bias=no_bias)
#gbn1 = BatchNorm(g1, name='gbn1', fix_gamma=fix_gamma, eps=eps)
#gact1 = mx.sym.Activation(gbn1, name='gact1', act_type='relu')
g2 = mx.sym.Deconvolution(gact1, name='gen2', kernel=(5,5), stride=(2,2),target_shape=(4,4), num_filter=ngf*4, no_bias=no_bias)
gbn2 = BatchNorm(g2, name='genbn2', fix_gamma=fix_gamma, eps=eps)
gact2 = mx.sym.Activation(gbn2, name='genact2', act_type='relu')
g3 = mx.sym.Deconvolution(gact2, name='gen3', kernel=(5,5), stride=(2,2), target_shape=(8,8), num_filter=ngf*2, no_bias=no_bias)
gbn3 = BatchNorm(g3, name='genbn3', fix_gamma=fix_gamma, eps=eps)
gact3 = mx.sym.Activation(gbn3, name='genact3', act_type='relu')
g4 = mx.sym.Deconvolution(gact3, name='gen4', kernel=(5,5), stride=(2,2), target_shape=(16,16), num_filter=ngf, no_bias=no_bias)
gbn4 = BatchNorm(g4, name='genbn4', fix_gamma=fix_gamma, eps=eps)
gact4 = mx.sym.Activation(gbn4, name='genact4', act_type='relu')
g5 = mx.sym.Deconvolution(gact4, name='gen5', kernel=(5,5), stride=(2,2), target_shape=(32,32), num_filter=nc, no_bias=no_bias)
gout = mx.sym.Activation(g5, name='genact5', act_type=activation)
return gout
#######################################################################
# First part of the discriminator which takes a 32x32 image as input
# and output a convolutional feature map, this is required to calculate
# the layer loss
#######################################################################
def discriminator1(ndf, no_bias=True, fix_gamma=True, eps=1e-5 + 1e-12):
BatchNorm = mx.sym.BatchNorm
data = mx.sym.Variable('data')
#label = mx.sym.Variable('label')
d1 = mx.sym.Convolution(data, name='d1', kernel=(5,5), stride=(2,2), pad=(2,2), num_filter=ndf, no_bias=no_bias)
dact1 = mx.sym.LeakyReLU(d1, name='dact1', act_type='leaky', slope=0.2)
d2 = mx.sym.Convolution(dact1, name='d2', kernel=(5,5), stride=(2,2), pad=(2,2), num_filter=ndf*2, no_bias=no_bias)
dbn2 = BatchNorm(d2, name='dbn2', fix_gamma=fix_gamma, eps=eps)
dact2 = mx.sym.LeakyReLU(dbn2, name='dact2', act_type='leaky', slope=0.2)
d3 = mx.sym.Convolution(dact2, name='d3', kernel=(5,5), stride=(2,2), pad=(2,2), num_filter=ndf*4, no_bias=no_bias)
dbn3 = BatchNorm(d3, name='dbn3', fix_gamma=fix_gamma, eps=eps)
dact3 = mx.sym.LeakyReLU(dbn3, name='dact3', act_type='leaky', slope=0.2)
return dact3
#######################################################################
# Second part of the discriminator which takes a 256x8x8 feature map as input
# and generates the loss based on whether the input image was a real one or fake one
#######################################################################
def discriminator2(ndf, no_bias=True, fix_gamma=True, eps=1e-5 + 1e-12):
BatchNorm = mx.sym.BatchNorm
data = mx.sym.Variable('data')
label = mx.sym.Variable('label')
d4 = mx.sym.Convolution(data, name='d4', kernel=(5,5), stride=(2,2), pad=(2,2), num_filter=ndf*8, no_bias=no_bias)
dbn4 = BatchNorm(d4, name='dbn4', fix_gamma=fix_gamma, eps=eps)
dact4 = mx.sym.LeakyReLU(dbn4, name='dact4', act_type='leaky', slope=0.2)
#d5 = mx.sym.Convolution(dact4, name='d5', kernel=(4,4), num_filter=1, no_bias=no_bias)
#d5 = mx.sym.Flatten(d5)
h = mx.sym.Flatten(dact4)
d5 = mx.sym.FullyConnected(h, num_hidden=1, name="d5")
#dloss = (0.5 * (label == 0) + (label != 0) ) * mx.sym.LogisticRegressionOutput(data=d5, label=label, name='dloss')
dloss = mx.sym.LogisticRegressionOutput(data=d5, label=label, name='dloss')
return dloss
#######################################################################
# GaussianLogDensity loss calculation for layer wise loss
#######################################################################
def GaussianLogDensity(x, mu, log_var, name='GaussianLogDensity', EPSILON = 1e-6):
c = mx.sym.ones_like(log_var)*2.0 * 3.1416
c = mx.symbol.log(c)
var = mx.sym.exp(log_var)
x_mu2 = mx.symbol.square(x - mu) # [Issue] not sure the dim works or not?
x_mu2_over_var = mx.symbol.broadcast_div(x_mu2, var + EPSILON)
log_prob = -0.5 * (c + log_var + x_mu2_over_var)
#log_prob = (x_mu2)
log_prob = mx.symbol.sum(log_prob, axis=1, name=name) # keep_dims=True,
return log_prob
#######################################################################
# Calculate the discriminator layer loss
#######################################################################
def DiscriminatorLayerLoss():
data = mx.sym.Variable('data')
label = mx.sym.Variable('label')
data = mx.sym.Flatten(data)
label = mx.sym.Flatten(label)
label = mx.sym.BlockGrad(label)
zeros = mx.sym.zeros_like(data)
output = -GaussianLogDensity(label, data, zeros)
dloss = mx.symbol.MakeLoss(mx.symbol.mean(output),name='lloss')
#dloss = mx.sym.MAERegressionOutput(data=data, label=label, name='lloss')
return dloss
#######################################################################
# KLDivergence loss
#######################################################################
def KLDivergenceLoss():
data = mx.sym.Variable('data')
mu1, lv1 = mx.sym.split(data, num_outputs=2, axis=0)
mu2 = mx.sym.zeros_like(mu1)
lv2 = mx.sym.zeros_like(lv1)
v1 = mx.sym.exp(lv1)
v2 = mx.sym.exp(lv2)
mu_diff_sq = mx.sym.square(mu1 - mu2)
dimwise_kld = .5 * (
(lv2 - lv1) + mx.symbol.broadcast_div(v1, v2) + mx.symbol.broadcast_div(mu_diff_sq, v2) - 1.)
KL = mx.symbol.sum(dimwise_kld, axis=1)
KLloss = mx.symbol.MakeLoss(mx.symbol.mean(KL),name='KLloss')
return KLloss
#######################################################################
# Get the dataset
#######################################################################
def get_data(path, activation):
#mnist = fetch_mldata('MNIST original')
#import ipdb; ipdb.set_trace()
data = []
image_names = []
#set the path to the 32x32 images of caltech101 dataset created using the convert_data_inverted.py script
#path = '/home/ubuntu/datasets/caltech101/data/images32x32/'
#path_wo_ext = '/home/ubuntu/datasets/caltech101/data/images/'
for filename in os.listdir(path):
img = cv2.imread(os.path.join(path,filename), cv2.IMREAD_GRAYSCALE)
image_names.append(filename)
if img is not None:
data.append(img)
data = np.asarray(data)
if activation == 'sigmoid':
#converting image values from 0 to 1 as the generator activation is sigmoid
data = data.astype(np.float32)/(255.0)
elif activation == 'tanh':
#converting image values from -1 to 1 as the generator activation is tanh
data = data.astype(np.float32)/(255.0/2) - 1.0
data = data.reshape((data.shape[0], 1, data.shape[1], data.shape[2]))
np.random.seed(1234) # set seed for deterministic ordering
p = np.random.permutation(data.shape[0])
X = data[p]
return X, image_names
#######################################################################
# Create a random iterator for generator
#######################################################################
class RandIter(mx.io.DataIter):
def __init__(self, batch_size, ndim):
self.batch_size = batch_size
self.ndim = ndim
self.provide_data = [('rand', (batch_size, ndim, 1, 1))]
self.provide_label = []
def iter_next(self):
return True
def getdata(self):
return [mx.random.normal(0, 1.0, shape=(self.batch_size, self.ndim, 1, 1))]
#######################################################################
# fill the ith grid of the buffer matrix with the values from the img
# buf : buffer matrix
# i : serial of the image in the 2D grid
# img : image data
# shape : ( height width depth ) of image
#######################################################################
def fill_buf(buf, i, img, shape):
#n = buf.shape[0]/shape[1]
# grid height is a multiple of individual image height
m = buf.shape[0]/shape[0]
sx = (i%m)*shape[1]
sy = (i/m)*shape[0]
buf[sy:sy+shape[0], sx:sx+shape[1], :] = img
#######################################################################
# create a grid of images and save it as a final image
# title : grid image name
# X : array of images
#######################################################################
def visual(title, X, activation):
assert len(X.shape) == 4
X = X.transpose((0, 2, 3, 1))
if activation == 'sigmoid':
X = np.clip((X)*(255.0), 0, 255).astype(np.uint8)
elif activation == 'tanh':
X = np.clip((X+1.0)*(255.0/2.0), 0, 255).astype(np.uint8)
n = np.ceil(np.sqrt(X.shape[0]))
buff = np.zeros((int(n*X.shape[1]), int(n*X.shape[2]), int(X.shape[3])), dtype=np.uint8)
for i, img in enumerate(X):
fill_buf(buff, i, img, X.shape[1:3])
#buff = cv2.cvtColor(buff, cv2.COLOR_BGR2RGB)
#local_out = 1
#num = 1
cv2.imwrite('%s.jpg' % (title), buff)
#######################################################################
# adverial training of the VAE
#######################################################################
def train(dataset, nef, ndf, ngf, nc, batch_size, Z, lr, beta1, epsilon, ctx, check_point, g_dl_weight, output_path, checkpoint_path, data_path, activation,num_epoch, save_after_every, visualize_after_every, show_after_every):
#encoder
z_mu, z_lv, z = encoder(nef, Z, batch_size)
symE = mx.sym.Group([z_mu, z_lv, z])
#generator
symG = generator(ngf, nc, no_bias=True, fix_gamma=True, eps=1e-5 + 1e-12, z_dim = Z, activation=activation )
#discriminator
h = discriminator1(ndf)
dloss = discriminator2(ndf)
#symD = mx.sym.Group([dloss, h])
symD1 = h
symD2 = dloss
#symG, symD = make_dcgan_sym(nef, ngf, ndf, nc)
#mx.viz.plot_network(symG, shape={'rand': (batch_size, 100, 1, 1)}).view()
#mx.viz.plot_network(symD, shape={'data': (batch_size, nc, 64, 64)}).view()
# ==============data==============
#if dataset == 'caltech':
X_train, _ = get_data(data_path, activation)
#import ipdb; ipdb.set_trace()
train_iter = mx.io.NDArrayIter(X_train, batch_size=batch_size, shuffle=True)
#elif dataset == 'imagenet':
# train_iter = ImagenetIter(imgnet_path, batch_size, (3, 32, 32))
#print('=============================================', str(batch_size), str(Z))
rand_iter = RandIter(batch_size, Z)
label = mx.nd.zeros((batch_size,), ctx=ctx)
# =============module E=============
modE = mx.mod.Module(symbol=symE, data_names=('data',), label_names=None, context=ctx)
modE.bind(data_shapes=train_iter.provide_data)
modE.init_params(initializer=mx.init.Normal(0.02))
modE.init_optimizer(
optimizer='adam',
optimizer_params={
'learning_rate': lr,
'wd': 1e-6,
'beta1': beta1,
'epsilon': epsilon,
'rescale_grad': (1.0/batch_size)
})
mods = [modE]
# =============module G=============
modG = mx.mod.Module(symbol=symG, data_names=('rand',), label_names=None, context=ctx)
modG.bind(data_shapes=rand_iter.provide_data, inputs_need_grad=True)
modG.init_params(initializer=mx.init.Normal(0.02))
modG.init_optimizer(
optimizer='adam',
optimizer_params={
'learning_rate': lr,
'wd': 1e-6,
'beta1': beta1,
'epsilon': epsilon,
#'rescale_grad': (1.0/batch_size)
})
mods.append(modG)
# =============module D=============
modD1 = mx.mod.Module(symD1, label_names=[], context=ctx)
modD2 = mx.mod.Module(symD2, label_names=('label',), context=ctx)
modD = mx.mod.SequentialModule()
modD.add(modD1).add(modD2, take_labels=True, auto_wiring=True)
#modD = mx.mod.Module(symbol=symD, data_names=('data',), label_names=('label',), context=ctx)
modD.bind(data_shapes=train_iter.provide_data,
label_shapes=[('label', (batch_size,))],
inputs_need_grad=True)
modD.init_params(initializer=mx.init.Normal(0.02))
modD.init_optimizer(
optimizer='adam',
optimizer_params={
'learning_rate': lr,
'wd': 1e-3,
'beta1': beta1,
'epsilon': epsilon,
'rescale_grad': (1.0/batch_size)
})
mods.append(modD)
# =============module DL=============
symDL = DiscriminatorLayerLoss()
modDL = mx.mod.Module(symbol=symDL, data_names=('data',), label_names=('label',), context=ctx)
modDL.bind(data_shapes=[('data', (batch_size,nef * 4,4,4))], ################################################################################################################################ fix 512 here
label_shapes=[('label', (batch_size,nef * 4,4,4))],
inputs_need_grad=True)
modDL.init_params(initializer=mx.init.Normal(0.02))
modDL.init_optimizer(
optimizer='adam',
optimizer_params={
'learning_rate': lr,
'wd': 0.,
'beta1': beta1,
'epsilon': epsilon,
'rescale_grad': (1.0/batch_size)
})
# =============module KL=============
symKL = KLDivergenceLoss()
modKL = mx.mod.Module(symbol=symKL, data_names=('data',), label_names=None, context=ctx)
modKL.bind(data_shapes=[('data', (batch_size*2,Z))],
inputs_need_grad=True)
modKL.init_params(initializer=mx.init.Normal(0.02))
modKL.init_optimizer(
optimizer='adam',
optimizer_params={
'learning_rate': lr,
'wd': 0.,
'beta1': beta1,
'epsilon': epsilon,
'rescale_grad': (1.0/batch_size)
})
mods.append(modKL)
def norm_stat(d):
return mx.nd.norm(d)/np.sqrt(d.size)
mon = mx.mon.Monitor(10, norm_stat, pattern=".*output|d1_backward_data", sort=True)
mon = None
if mon is not None:
for mod in mods:
pass
# ============calculating prediction accuracy==============
def facc(label, pred):
pred = pred.ravel()
label = label.ravel()
return ((pred > 0.5) == label).mean()
# ============calculating binary cross-entropy loss==============
def fentropy(label, pred):
pred = pred.ravel()
label = label.ravel()
return -(label*np.log(pred+1e-12) + (1.-label)*np.log(1.-pred+1e-12)).mean()
# ============calculating KL divergence loss==============
def kldivergence(label, pred):
#pred = pred.ravel()
#label = label.ravel()
mean, log_var = np.split(pred, 2, axis=0)
var = np.exp(log_var)
KLLoss = -0.5 * np.sum(1 + log_var - np.power(mean, 2) - var)
KLLoss = KLLoss / nElements
return KLLoss
mG = mx.metric.CustomMetric(fentropy)
mD = mx.metric.CustomMetric(fentropy)
mE = mx.metric.CustomMetric(kldivergence)
mACC = mx.metric.CustomMetric(facc)
print('Training...')
stamp = datetime.now().strftime('%Y_%m_%d-%H_%M')
# =============train===============
for epoch in range(num_epoch):
train_iter.reset()
for t, batch in enumerate(train_iter):
rbatch = rand_iter.next()
if mon is not None:
mon.tic()
modG.forward(rbatch, is_train=True)
outG = modG.get_outputs()
#print('======================================================================')
#print(outG)
# update discriminator on fake
label[:] = 0
modD.forward(mx.io.DataBatch(outG, [label]), is_train=True)
modD.backward()
#modD.update()
gradD11 = [[grad.copyto(grad.context) for grad in grads] for grads in modD1._exec_group.grad_arrays]
gradD12 = [[grad.copyto(grad.context) for grad in grads] for grads in modD2._exec_group.grad_arrays]
modD.update_metric(mD, [label])
modD.update_metric(mACC, [label])
#update discriminator on decoded
modE.forward(batch, is_train=True)
mu, lv, z = modE.get_outputs()
#z = GaussianSampleLayer(mu, lv)
z = z.reshape((batch_size, Z, 1, 1))
sample = mx.io.DataBatch([z], label=None, provide_data = [('rand', (batch_size, Z, 1, 1))])
modG.forward(sample, is_train=True)
xz = modG.get_outputs()
label[:] = 0
modD.forward(mx.io.DataBatch(xz, [label]), is_train=True)
modD.backward()
#modD.update()
gradD21 = [[grad.copyto(grad.context) for grad in grads] for grads in modD1._exec_group.grad_arrays]
gradD22 = [[grad.copyto(grad.context) for grad in grads] for grads in modD2._exec_group.grad_arrays]
modD.update_metric(mD, [label])
modD.update_metric(mACC, [label])
# update discriminator on real
label[:] = 1
batch.label = [label]
modD.forward(batch, is_train=True)
lx = [out.copyto(out.context) for out in modD1.get_outputs()]
modD.backward()
for gradsr, gradsf, gradsd in zip(modD1._exec_group.grad_arrays, gradD11, gradD21):
for gradr, gradf, gradd in zip(gradsr, gradsf, gradsd):
gradr += 0.5 * (gradf + gradd)
for gradsr, gradsf, gradsd in zip(modD2._exec_group.grad_arrays, gradD12, gradD22):
for gradr, gradf, gradd in zip(gradsr, gradsf, gradsd):
gradr += 0.5 * (gradf + gradd)
modD.update()
modD.update_metric(mD, [label])
modD.update_metric(mACC, [label])
# update generator twice as the discriminator is too strong
#>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1 <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
modG.forward(rbatch, is_train=True)
outG = modG.get_outputs()
label[:] = 1
modD.forward(mx.io.DataBatch(outG, [label]), is_train=True)
modD.backward()
diffD = modD1.get_input_grads()
modG.backward(diffD)
#modG.update()
gradG1 = [[grad.copyto(grad.context) for grad in grads] for grads in modG._exec_group.grad_arrays]
mG.update([label], modD.get_outputs())
modG.forward(sample, is_train=True)
xz = modG.get_outputs()
label[:] = 1
modD.forward(mx.io.DataBatch(xz, [label]), is_train=True)
modD.backward()
diffD = modD1.get_input_grads()
modG.backward(diffD)
gradG2 = [[grad.copyto(grad.context) for grad in grads] for grads in modG._exec_group.grad_arrays]
#modG.update()
mG.update([label], modD.get_outputs())
modG.forward(sample, is_train=True)
xz = modG.get_outputs()
modD1.forward(mx.io.DataBatch(xz, []), is_train=True)
outD1 = modD1.get_outputs()
modDL.forward(mx.io.DataBatch(outD1, lx), is_train=True)
modDL.backward()
dlGrad = modDL.get_input_grads()
modD1.backward(dlGrad)
diffD = modD1.get_input_grads()
modG.backward(diffD)
for grads, gradsG1, gradsG2 in zip(modG._exec_group.grad_arrays, gradG1, gradG2):
for grad, gradg1, gradg2 in zip(grads, gradsG1, gradsG2):
grad = g_dl_weight * grad + 0.5 * (gradg1 + gradg2)
modG.update()
mG.update([label], modD.get_outputs())
#>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 2 <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
modG.forward(rbatch, is_train=True)
outG = modG.get_outputs()
label[:] = 1
modD.forward(mx.io.DataBatch(outG, [label]), is_train=True)
modD.backward()
diffD = modD1.get_input_grads()
modG.backward(diffD)
#modG.update()
gradG1 = [[grad.copyto(grad.context) for grad in grads] for grads in modG._exec_group.grad_arrays]
mG.update([label], modD.get_outputs())
modG.forward(sample, is_train=True)
xz = modG.get_outputs()
label[:] = 1
modD.forward(mx.io.DataBatch(xz, [label]), is_train=True)
modD.backward()
diffD = modD1.get_input_grads()
modG.backward(diffD)
gradG2 = [[grad.copyto(grad.context) for grad in grads] for grads in modG._exec_group.grad_arrays]
#modG.update()
mG.update([label], modD.get_outputs())
modG.forward(sample, is_train=True)
xz = modG.get_outputs()
modD1.forward(mx.io.DataBatch(xz, []), is_train=True)
outD1 = modD1.get_outputs()
modDL.forward(mx.io.DataBatch(outD1, lx), is_train=True)
modDL.backward()
dlGrad = modDL.get_input_grads()
modD1.backward(dlGrad)
diffD = modD1.get_input_grads()
modG.backward(diffD)
for grads, gradsG1, gradsG2 in zip(modG._exec_group.grad_arrays, gradG1, gradG2):
for grad, gradg1, gradg2 in zip(grads, gradsG1, gradsG2):
grad = g_dl_weight * grad + 0.5 * (gradg1 + gradg2)
modG.update()
mG.update([label], modD.get_outputs())
##update encoder--------------------------------------------------
#modE.forward(batch, is_train=True)
#mu, lv, z = modE.get_outputs()
#z = z.reshape((batch_size, Z, 1, 1))
#sample = mx.io.DataBatch([z], label=None, provide_data = [('rand', (batch_size, Z, 1, 1))])
modG.forward(sample, is_train=True)
xz = modG.get_outputs()
#update generator
modD1.forward(mx.io.DataBatch(xz, []), is_train=True)
outD1 = modD1.get_outputs()
modDL.forward(mx.io.DataBatch(outD1, lx), is_train=True)
DLloss = modDL.get_outputs()
modDL.backward()
dlGrad = modDL.get_input_grads()
modD1.backward(dlGrad)
diffD = modD1.get_input_grads()
modG.backward(diffD)
#modG.update()
#print('updating encoder=====================================')
#update encoder
nElements = batch_size
#var = mx.ndarray.exp(lv)
modKL.forward(mx.io.DataBatch([mx.ndarray.concat(mu,lv, dim=0)]), is_train=True)
KLloss = modKL.get_outputs()
modKL.backward()
gradKLLoss = modKL.get_input_grads()
diffG = modG.get_input_grads()
#print('======================================================================')
#print(np.sum(diffG[0].asnumpy()))
diffG = diffG[0].reshape((batch_size, Z))
modE.backward(mx.ndarray.split(gradKLLoss[0], num_outputs=2, axis=0) + [diffG])
modE.update()
#print('mu type : ')
#print(type(mu))
pred = mx.ndarray.concat(mu,lv, dim=0)
#print(pred)
mE.update([pred], [pred])
if mon is not None:
mon.toc_print()
t += 1
if t % show_after_every == 0:
print('epoch:', epoch, 'iter:', t, 'metric:', mACC.get(), mG.get(), mD.get(), mE.get(), KLloss[0].asnumpy(), DLloss[0].asnumpy())
mACC.reset()
mG.reset()
mD.reset()
mE.reset()
if epoch % visualize_after_every == 0:
visual(output_path +'gout'+str(epoch), outG[0].asnumpy(), activation)
#diff = diffD[0].asnumpy()
#diff = (diff - diff.mean())/diff.std()
#visual('diff', diff)
visual(output_path + 'data'+str(epoch), batch.data[0].asnumpy(), activation)
if check_point and epoch % save_after_every == 0:
print('Saving...')
modG.save_params(checkpoint_path + '/%s_G-%04d.params'%(dataset, epoch))
modD.save_params(checkpoint_path + '/%s_D-%04d.params'%(dataset, epoch))
modE.save_params(checkpoint_path + '/%s_E-%04d.params'%(dataset, epoch))
#######################################################################
# Test the VAE with a pretrained encoder and generator.
# Keep the batch size 1
#######################################################################
def test(nef, ngf, nc, batch_size, Z, ctx, pretrained_encoder_path, pretrained_generator_path, output_path, data_path, activation, save_embedding, embedding_path = ''):
#encoder
z_mu, z_lv, z = encoder(nef, Z, batch_size)
symE = mx.sym.Group([z_mu, z_lv, z])
#generator
symG = generator(ngf, nc, no_bias=True, fix_gamma=True, eps=1e-5 + 1e-12, z_dim = Z, activation=activation )
#symG, symD = make_dcgan_sym(nef, ngf, ndf, nc)
#mx.viz.plot_network(symG, shape={'rand': (batch_size, 100, 1, 1)}).view()
#mx.viz.plot_network(symD, shape={'data': (batch_size, nc, 64, 64)}).view()
# ==============data==============
X_test, image_names = get_data(data_path, activation)
#import ipdb; ipdb.set_trace()
test_iter = mx.io.NDArrayIter(X_test, batch_size=batch_size, shuffle=False)
# =============module E=============
modE = mx.mod.Module(symbol=symE, data_names=('data',), label_names=None, context=ctx)
modE.bind(data_shapes=test_iter.provide_data)
#modE.init_params(initializer=mx.init.Normal(0.02))
modE.load_params(pretrained_encoder_path)
# =============module G=============
modG = mx.mod.Module(symbol=symG, data_names=('rand',), label_names=None, context=ctx)
modG.bind(data_shapes=[('rand', (1, Z, 1, 1))])
#modG.init_params(initializer=mx.init.Normal(0.02))
modG.load_params(pretrained_generator_path)
print('Testing...')
# =============test===============
test_iter.reset()
for t, batch in enumerate(test_iter):
#update discriminator on decoded
modE.forward(batch, is_train=False)
mu, lv, z = modE.get_outputs()
#z = GaussianSampleLayer(mu, lv)
mu = mu.reshape((batch_size, Z, 1, 1))
sample = mx.io.DataBatch([mu], label=None, provide_data = [('rand', (batch_size, Z, 1, 1))])
modG.forward(sample, is_train=False)
outG = modG.get_outputs()
visual(output_path + '/' + 'gout'+str(t), outG[0].asnumpy(), activation)
visual(output_path + '/' + 'data'+str(t), batch.data[0].asnumpy(), activation)
image_name = image_names[t].split('.')[0]
if save_embedding:
savemat(embedding_path+'/'+image_name+'.mat', {'embedding':mu.asnumpy()})
def parse_args():
parser = argparse.ArgumentParser(description='Train and Test an Adversarial Variatiional Encoder')
parser.add_argument('--train', help='train the network', action='store_true')
parser.add_argument('--test', help='test the network', action='store_true')
parser.add_argument('--save_embedding', help='saves the shape embedding of each input image', action='store_true')
parser.add_argument('--dataset', help='dataset name', default='caltech', type=str)
parser.add_argument('--activation', help='activation i.e. sigmoid or tanh', default='sigmoid', type=str)
parser.add_argument('--training_data_path', help='training data path', default='/home/ubuntu/datasets/caltech101/data/images32x32/', type=str)
parser.add_argument('--testing_data_path', help='testing data path', default='/home/ubuntu/datasets/MPEG7dataset/images/', type=str)
parser.add_argument('--pretrained_encoder_path', help='pretrained encoder model path', default='checkpoints32x32_sigmoid/caltech_E-0045.params', type=str)
parser.add_argument('--pretrained_generator_path', help='pretrained generator model path', default='checkpoints32x32_sigmoid/caltech_G-0045.params', type=str)
parser.add_argument('--output_path', help='output path for the generated images', default='outputs32x32_sigmoid/', type=str)
parser.add_argument('--embedding_path', help='output path for the generated embeddings', default='outputs32x32_sigmoid/', type=str)
parser.add_argument('--checkpoint_path', help='checkpoint saving path ', default='checkpoints32x32_sigmoid/', type=str)
parser.add_argument('--nef', help='encoder filter count in the first layer', default=64, type=int)
parser.add_argument('--ndf', help='discriminator filter count in the first layer', default=64, type=int)
parser.add_argument('--ngf', help='generator filter count in the second last layer', default=64, type=int)
parser.add_argument('--nc', help='generator filter count in the last layer i.e. 1 for grayscale image, 3 for RGB image', default=1, type=int)
parser.add_argument('--batch_size', help='batch size, keep it 1 during testing', default=64, type=int)
parser.add_argument('--Z', help='embedding size', default=100, type=int)
parser.add_argument('--lr', help='learning rate', default=0.0002, type=float)
parser.add_argument('--beta1', help='beta1 for adam optimizer', default=0.5, type=float)
parser.add_argument('--epsilon', help='epsilon for adam optimizer', default=1e-5, type=float)
parser.add_argument('--g_dl_weight', help='discriminator layer loss weight', default=1e-1, type=float)
parser.add_argument('--gpu', help='gpu index', default=0, type=int)
parser.add_argument('--num_epoch', help='number of maximum epochs ', default=45, type=int)
parser.add_argument('--save_after_every', help='save checkpoint after every this number of epochs ', default=5, type=int)
parser.add_argument('--visualize_after_every', help='save output images after every this number of epochs', default=5, type=int)
parser.add_argument('--show_after_every', help='show metrics after this number of iterations', default=10, type=int)
args = parser.parse_args()
return args
def main():
args = parse_args()
# gpu context
ctx = mx.gpu(args.gpu)
# checkpoint saving flags
check_point = True
if args.train:
train(args.dataset, args.nef, args.ndf, args.ngf, args.nc, args.batch_size, args.Z, args.lr, args.beta1, args.epsilon, ctx, check_point, args.g_dl_weight, args.output_path, args.checkpoint_path, args.training_data_path, args.activation, args.num_epoch, args.save_after_every, args.visualize_after_every, args.show_after_every)
if args.test:
test(args.nef, args.ngf, args.nc, 1, args.Z, ctx, args.pretrained_encoder_path, args.pretrained_generator_path, args.output_path, args.testing_data_path, args.activation, args.save_embedding, args.embedding_path)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
main()
|
boyuegame/kbengine
|
refs/heads/master
|
kbe/src/lib/python/Lib/distutils/command/bdist_wininst.py
|
86
|
"""distutils.command.bdist_wininst
Implements the Distutils 'bdist_wininst' command: create a windows installer
exe-program."""
import sys, os
from distutils.core import Command
from distutils.util import get_platform
from distutils.dir_util import create_tree, remove_tree
from distutils.errors import *
from distutils.sysconfig import get_python_version
from distutils import log
class bdist_wininst(Command):
description = "create an executable installer for MS Windows"
user_options = [('bdist-dir=', None,
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('target-version=', None,
"require a specific python version" +
" on the target system"),
('no-target-compile', 'c',
"do not compile .py to .pyc on the target system"),
('no-target-optimize', 'o',
"do not compile .py to .pyo (optimized)"
"on the target system"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('bitmap=', 'b',
"bitmap to use for the installer instead of python-powered logo"),
('title=', 't',
"title to display on the installer background instead of default"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('install-script=', None,
"basename of installation script to be run after"
"installation or before deinstallation"),
('pre-install-script=', None,
"Fully qualified filename of a script to be run before "
"any files are installed. This script need not be in the "
"distribution"),
('user-access-control=', None,
"specify Vista's UAC handling - 'none'/default=no "
"handling, 'auto'=use UAC if target Python installed for "
"all users, 'force'=always use UAC"),
]
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
'skip-build']
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.no_target_compile = 0
self.no_target_optimize = 0
self.target_version = None
self.dist_dir = None
self.bitmap = None
self.title = None
self.skip_build = None
self.install_script = None
self.pre_install_script = None
self.user_access_control = None
def finalize_options(self):
self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
if self.bdist_dir is None:
if self.skip_build and self.plat_name:
# If build is skipped and plat_name is overridden, bdist will
# not see the correct 'plat_name' - so set that up manually.
bdist = self.distribution.get_command_obj('bdist')
bdist.plat_name = self.plat_name
# next the command will be initialized using that name
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'wininst')
if not self.target_version:
self.target_version = ""
if not self.skip_build and self.distribution.has_ext_modules():
short_version = get_python_version()
if self.target_version and self.target_version != short_version:
raise DistutilsOptionError(
"target version can only be %s, or the '--skip-build'" \
" option must be specified" % (short_version,))
self.target_version = short_version
self.set_undefined_options('bdist',
('dist_dir', 'dist_dir'),
('plat_name', 'plat_name'),
)
if self.install_script:
for script in self.distribution.scripts:
if self.install_script == os.path.basename(script):
break
else:
raise DistutilsOptionError(
"install_script '%s' not found in scripts"
% self.install_script)
def run(self):
if (sys.platform != "win32" and
(self.distribution.has_ext_modules() or
self.distribution.has_c_libraries())):
raise DistutilsPlatformError \
("distribution contains extensions and/or C libraries; "
"must be compiled on a Windows 32 platform")
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install', reinit_subcommands=1)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = 0
install.plat_name = self.plat_name
install_lib = self.reinitialize_command('install_lib')
# we do not want to include pyc or pyo files
install_lib.compile = 0
install_lib.optimize = 0
if self.distribution.has_ext_modules():
# If we are building an installer for a Python version other
# than the one we are currently running, then we need to ensure
# our build_lib reflects the other Python version rather than ours.
# Note that for target_version!=sys.version, we must have skipped the
# build step, so there is no issue with enforcing the build of this
# version.
target_version = self.target_version
if not target_version:
assert self.skip_build, "Should have already checked this"
target_version = sys.version[0:3]
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
build = self.get_finalized_command('build')
build.build_lib = os.path.join(build.build_base,
'lib' + plat_specifier)
# Use a custom scheme for the zip-file, because we have to decide
# at installation time which scheme to use.
for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
value = key.upper()
if key == 'headers':
value = value + '/Include/$dist_name'
setattr(install,
'install_' + key,
value)
log.info("installing to %s", self.bdist_dir)
install.ensure_finalized()
# avoid warning of 'install_lib' about installing
# into a directory not in sys.path
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
install.run()
del sys.path[0]
# And make an archive relative to the root of the
# pseudo-installation tree.
from tempfile import mktemp
archive_basename = mktemp()
fullname = self.distribution.get_fullname()
arcname = self.make_archive(archive_basename, "zip",
root_dir=self.bdist_dir)
# create an exe containing the zip-file
self.create_exe(arcname, fullname, self.bitmap)
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
self.distribution.dist_files.append(('bdist_wininst', pyversion,
self.get_installer_filename(fullname)))
# remove the zip-file again
log.debug("removing temporary file '%s'", arcname)
os.remove(arcname)
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
def get_inidata(self):
# Return data describing the installation.
lines = []
metadata = self.distribution.metadata
# Write the [metadata] section.
lines.append("[metadata]")
# 'info' will be displayed in the installer's dialog box,
# describing the items to be installed.
info = (metadata.long_description or '') + '\n'
# Escape newline characters
def escape(s):
return s.replace("\n", "\\n")
for name in ["author", "author_email", "description", "maintainer",
"maintainer_email", "name", "url", "version"]:
data = getattr(metadata, name, "")
if data:
info = info + ("\n %s: %s" % \
(name.capitalize(), escape(data)))
lines.append("%s=%s" % (name, escape(data)))
# The [setup] section contains entries controlling
# the installer runtime.
lines.append("\n[Setup]")
if self.install_script:
lines.append("install_script=%s" % self.install_script)
lines.append("info=%s" % escape(info))
lines.append("target_compile=%d" % (not self.no_target_compile))
lines.append("target_optimize=%d" % (not self.no_target_optimize))
if self.target_version:
lines.append("target_version=%s" % self.target_version)
if self.user_access_control:
lines.append("user_access_control=%s" % self.user_access_control)
title = self.title or self.distribution.get_fullname()
lines.append("title=%s" % escape(title))
import time
import distutils
build_info = "Built %s with distutils-%s" % \
(time.ctime(time.time()), distutils.__version__)
lines.append("build_info=%s" % build_info)
return "\n".join(lines)
def create_exe(self, arcname, fullname, bitmap=None):
import struct
self.mkpath(self.dist_dir)
cfgdata = self.get_inidata()
installer_name = self.get_installer_filename(fullname)
self.announce("creating %s" % installer_name)
if bitmap:
bitmapdata = open(bitmap, "rb").read()
bitmaplen = len(bitmapdata)
else:
bitmaplen = 0
file = open(installer_name, "wb")
file.write(self.get_exe_bytes())
if bitmap:
file.write(bitmapdata)
# Convert cfgdata from unicode to ascii, mbcs encoded
if isinstance(cfgdata, str):
cfgdata = cfgdata.encode("mbcs")
# Append the pre-install script
cfgdata = cfgdata + b"\0"
if self.pre_install_script:
# We need to normalize newlines, so we open in text mode and
# convert back to bytes. "latin-1" simply avoids any possible
# failures.
with open(self.pre_install_script, "r",
encoding="latin-1") as script:
script_data = script.read().encode("latin-1")
cfgdata = cfgdata + script_data + b"\n\0"
else:
# empty pre-install script
cfgdata = cfgdata + b"\0"
file.write(cfgdata)
# The 'magic number' 0x1234567B is used to make sure that the
# binary layout of 'cfgdata' is what the wininst.exe binary
# expects. If the layout changes, increment that number, make
# the corresponding changes to the wininst.exe sources, and
# recompile them.
header = struct.pack("<iii",
0x1234567B, # tag
len(cfgdata), # length
bitmaplen, # number of bytes in bitmap
)
file.write(header)
file.write(open(arcname, "rb").read())
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
if self.target_version:
# if we create an installer for a specific python version,
# it's better to include this in the name
installer_name = os.path.join(self.dist_dir,
"%s.%s-py%s.exe" %
(fullname, self.plat_name, self.target_version))
else:
installer_name = os.path.join(self.dist_dir,
"%s.%s.exe" % (fullname, self.plat_name))
return installer_name
def get_exe_bytes(self):
from distutils.msvccompiler import get_build_version
# If a target-version other than the current version has been
# specified, then using the MSVC version from *this* build is no good.
# Without actually finding and executing the target version and parsing
# its sys.version, we just hard-code our knowledge of old versions.
# NOTE: Possible alternative is to allow "--target-version" to
# specify a Python executable rather than a simple version string.
# We can then execute this program to obtain any info we need, such
# as the real sys.version string for the build.
cur_version = get_python_version()
if self.target_version and self.target_version != cur_version:
# If the target version is *later* than us, then we assume they
# use what we use
# string compares seem wrong, but are what sysconfig.py itself uses
if self.target_version > cur_version:
bv = get_build_version()
else:
if self.target_version < "2.4":
bv = 6.0
else:
bv = 7.1
else:
# for current version - use authoritative check.
bv = get_build_version()
# wininst-x.y.exe is in the same directory as this file
directory = os.path.dirname(__file__)
# we must use a wininst-x.y.exe built with the same C compiler
# used for python. XXX What about mingw, borland, and so on?
# if plat_name starts with "win" but is not "win32"
# we want to strip "win" and leave the rest (e.g. -amd64)
# for all other cases, we don't want any suffix
if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
sfix = self.plat_name[3:]
else:
sfix = ''
filename = os.path.join(directory, "wininst-%.1f%s.exe" % (bv, sfix))
f = open(filename, "rb")
try:
return f.read()
finally:
f.close()
|
espressopp/espressopp
|
refs/heads/master
|
src/interaction/AngularCosineSquared.py
|
1
|
# Copyright (C) 2012,2013
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Calculates the Angular Cosine Squared potential as:
.. math::
U = K [cos(\\theta) - cos(\\theta_{0})]^2,
where angle :math:`\\theta` is the planar angle formed by three binded particles
(triplet or triple).
This potential is employed by:
.. py:class:: espressopp.interaction.AngularCosineSquared (K = 1.0, theta0 = 0.0)
:param real K: energy amplitude
:param real theta0: angle in radians
:rtype: triple potential
A triple potential applied to every triple in the system creates an *interaction*.
This is done via:
.. py:class:: espressopp.interaction.FixedTripleListAngularCosineSquared (system, fixed_triple_list, potential)
:param std::shared_ptr system: system object
:param list fixed_triple_list: a fixed list of all triples in the system
:param potential: triple potential (in this case, :py:class:`espressopp.interaction.AngularCosineSquared`).
:rtype: interaction
**Methods**
.. py:method:: getFixedTripleList()
:rtype: A Python list of fixed triples (e.g., in the chains).
.. py:method:: setPotential(type1, type2, potential)
:param type1:
:param type2:
:param potential:
:type type1:
:type type2:
:type potential:
**Example 1.** Creating a fixed triple list by :py:class:`espressopp.FixedTripleList`.
>>> # we assume a polymer solution of n_chains of the length chain_len each.
>>> # At first, create a list_of_triples for the system:
>>> N = n_chains * chain_len # number of particles in the system
>>> list_of_tripples = [] # empty list of triples
>>> for n in range (n_chains): # loop over chains
>>> for m in range (chain_len): # loop over chain beads
>>> pid = n * chain_len + m
>>> if (pid > 1) and (pid < N - 1):
>>> list_of_tripples.append( (pid-1, pid, pid+1) )
>>>
>>> # create fixed triple list
>>> fixed_triple_list = espressopp.FixedTripleList(system.storage)
>>> fixed_triple_list.addTriples(list_of_triples)
**Example 2.** Employing an Angular Cosine Squared potential.
>>> # Note, the fixed_triple_list has to be generated in advance! (see Example 1)
>>>
>>> # set up the potential
>>> potAngCosSq = espressopp.interaction.AngularCosineSquared(K=0.5, theta0=0.0)
>>>
>>> # set up the interaction
>>> interAngCosSq = espressopp.interaction.FixedTripleListAngularCosineSquared(system, fixed_triple_list, potAngCosSq)
>>>
>>> # finally, add the interaction to the system
>>> system.addInteraction(interAngCosSq)
"""
from espressopp import pmi
from espressopp.esutil import *
from espressopp.interaction.AngularPotential import *
from espressopp.interaction.Interaction import *
from _espressopp import interaction_AngularCosineSquared, \
interaction_FixedTripleListAngularCosineSquared
class AngularCosineSquaredLocal(AngularPotentialLocal, interaction_AngularCosineSquared):
def __init__(self, K=1.0, theta0=0.0):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
cxxinit(self, interaction_AngularCosineSquared, K, theta0)
class FixedTripleListAngularCosineSquaredLocal(InteractionLocal, interaction_FixedTripleListAngularCosineSquared):
def __init__(self, system, vl, potential):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
cxxinit(self, interaction_FixedTripleListAngularCosineSquared, system, vl, potential)
def setPotential(self, type1, type2, potential):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
self.cxxclass.setPotential(self, type1, type2, potential)
def getFixedTripleList(self):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
return self.cxxclass.getFixedTripleList(self)
if pmi.isController:
class AngularCosineSquared(AngularPotential):
pmiproxydefs = dict(
cls = 'espressopp.interaction.AngularCosineSquaredLocal',
pmiproperty = ['K', 'theta0']
)
class FixedTripleListAngularCosineSquared(Interaction, metaclass=pmi.Proxy):
pmiproxydefs = dict(
cls = 'espressopp.interaction.FixedTripleListAngularCosineSquaredLocal',
pmicall = ['setPotential','getFixedTripleList']
)
|
tmxdyf/pjsip-jni
|
refs/heads/master
|
tests/pjsua/scripts-call/305_ice_comp_2_1.py
|
22
|
# $Id$
#
from inc_cfg import *
# Different number of ICE components
test_param = TestParam(
"Callee=use ICE, caller=use ICE",
[
InstanceParam("callee", "--null-audio --use-ice --max-calls=1", enable_buffer=True),
InstanceParam("caller", "--null-audio --use-ice --max-calls=1 --ice-no-rtcp", enable_buffer=True)
]
)
|
ojengwa/oh-mainline
|
refs/heads/master
|
vendor/packages/requests/requests/packages/chardet/chardistribution.py
|
2754
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE,
EUCTW_TYPICAL_DISTRIBUTION_RATIO)
from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE,
EUCKR_TYPICAL_DISTRIBUTION_RATIO)
from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE,
GB2312_TYPICAL_DISTRIBUTION_RATIO)
from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE,
BIG5_TYPICAL_DISTRIBUTION_RATIO)
from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE,
JIS_TYPICAL_DISTRIBUTION_RATIO)
from .compat import wrap_ord
ENOUGH_DATA_THRESHOLD = 1024
SURE_YES = 0.99
SURE_NO = 0.01
MINIMUM_DATA_THRESHOLD = 3
class CharDistributionAnalysis:
def __init__(self):
# Mapping table to get frequency order from char order (get from
# GetOrder())
self._mCharToFreqOrder = None
self._mTableSize = None # Size of above table
# This is a constant value which varies from language to language,
# used in calculating confidence. See
# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
# for further detail.
self._mTypicalDistributionRatio = None
self.reset()
def reset(self):
"""reset analyser, clear any state"""
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
self._mTotalChars = 0 # Total characters encountered
# The number of characters whose frequency order is less than 512
self._mFreqChars = 0
def feed(self, aBuf, aCharLen):
"""feed a character with known length"""
if aCharLen == 2:
# we only care about 2-bytes character in our distribution analysis
order = self.get_order(aBuf)
else:
order = -1
if order >= 0:
self._mTotalChars += 1
# order is valid
if order < self._mTableSize:
if 512 > self._mCharToFreqOrder[order]:
self._mFreqChars += 1
def get_confidence(self):
"""return confidence based on existing data"""
# if we didn't receive any character in our consideration range,
# return negative answer
if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
return SURE_NO
if self._mTotalChars != self._mFreqChars:
r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars)
* self._mTypicalDistributionRatio))
if r < SURE_YES:
return r
# normalize confidence (we don't want to be 100% sure)
return SURE_YES
def got_enough_data(self):
# It is not necessary to receive all data to draw conclusion.
# For charset detection, certain amount of data is enough
return self._mTotalChars > ENOUGH_DATA_THRESHOLD
def get_order(self, aBuf):
# We do not handle characters based on the original encoding string,
# but convert this encoding string to a number, here called order.
# This allows multiple encodings of a language to share one frequency
# table.
return -1
class EUCTWDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCTWCharToFreqOrder
self._mTableSize = EUCTW_TABLE_SIZE
self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-TW encoding, we are interested
# first byte range: 0xc4 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xC4:
return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class EUCKRDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCKRCharToFreqOrder
self._mTableSize = EUCKR_TABLE_SIZE
self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-KR encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xB0:
return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class GB2312DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = GB2312CharToFreqOrder
self._mTableSize = GB2312_TABLE_SIZE
self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for GB2312 encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0xB0) and (second_char >= 0xA1):
return 94 * (first_char - 0xB0) + second_char - 0xA1
else:
return -1
class Big5DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = Big5CharToFreqOrder
self._mTableSize = BIG5_TABLE_SIZE
self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for big5 encoding, we are interested
# first byte range: 0xa4 -- 0xfe
# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if first_char >= 0xA4:
if second_char >= 0xA1:
return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
else:
return 157 * (first_char - 0xA4) + second_char - 0x40
else:
return -1
class SJISDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for sjis encoding, we are interested
# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0x81) and (first_char <= 0x9F):
order = 188 * (first_char - 0x81)
elif (first_char >= 0xE0) and (first_char <= 0xEF):
order = 188 * (first_char - 0xE0 + 31)
else:
return -1
order = order + second_char - 0x40
if second_char > 0x7F:
order = -1
return order
class EUCJPDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-JP encoding, we are interested
# first byte range: 0xa0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
char = wrap_ord(aBuf[0])
if char >= 0xA0:
return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1
else:
return -1
|
alu042/edx-platform
|
refs/heads/master
|
lms/djangoapps/mobile_api/social_facebook/groups/serializers.py
|
128
|
"""
Serializer for user API
"""
from rest_framework import serializers
from django.core.validators import RegexValidator
class GroupSerializer(serializers.Serializer):
"""
Serializes facebook groups request
"""
name = serializers.CharField(max_length=150)
description = serializers.CharField(max_length=200, required=False)
privacy = serializers.ChoiceField(choices=[("open", "open"), ("closed", "closed")], required=False)
class GroupsMembersSerializer(serializers.Serializer):
"""
Serializes facebook invitations request
"""
member_ids = serializers.CharField(
required=True,
validators=[
RegexValidator(
regex=r'^([\d]+,?)*$',
message='A comma separated list of member ids must be provided',
code='member_ids error'
),
]
)
|
alexandrucoman/vbox-neutron-agent
|
refs/heads/master
|
neutron/agent/l3/router_processing_queue.py
|
11
|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import datetime
import Queue
from oslo_utils import timeutils
# Lower value is higher priority
PRIORITY_RPC = 0
PRIORITY_SYNC_ROUTERS_TASK = 1
DELETE_ROUTER = 1
class RouterUpdate(object):
"""Encapsulates a router update
An instance of this object carries the information necessary to prioritize
and process a request to update a router.
"""
def __init__(self, router_id, priority,
action=None, router=None, timestamp=None):
self.priority = priority
self.timestamp = timestamp
if not timestamp:
self.timestamp = timeutils.utcnow()
self.id = router_id
self.action = action
self.router = router
def __lt__(self, other):
"""Implements priority among updates
Lower numerical priority always gets precedence. When comparing two
updates of the same priority then the one with the earlier timestamp
gets procedence. In the unlikely event that the timestamps are also
equal it falls back to a simple comparison of ids meaning the
precedence is essentially random.
"""
if self.priority != other.priority:
return self.priority < other.priority
if self.timestamp != other.timestamp:
return self.timestamp < other.timestamp
return self.id < other.id
class ExclusiveRouterProcessor(object):
"""Manager for access to a router for processing
This class controls access to a router in a non-blocking way. The first
instance to be created for a given router_id is granted exclusive access to
the router.
Other instances may be created for the same router_id while the first
instance has exclusive access. If that happens then it doesn't block and
wait for access. Instead, it signals to the master instance that an update
came in with the timestamp.
This way, a thread will not block to wait for access to a router. Instead
it effectively signals to the thread that is working on the router that
something has changed since it started working on it. That thread will
simply finish its current iteration and then repeat.
This class keeps track of the last time that a router data was fetched and
processed. The timestamp that it keeps must be before when the data used
to process the router last was fetched from the database. But, as close as
possible. The timestamp should not be recorded, however, until the router
has been processed using the fetch data.
"""
_masters = {}
_router_timestamps = {}
def __init__(self, router_id):
self._router_id = router_id
if router_id not in self._masters:
self._masters[router_id] = self
self._queue = []
self._master = self._masters[router_id]
def _i_am_master(self):
return self == self._master
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self._i_am_master():
del self._masters[self._router_id]
def _get_router_data_timestamp(self):
return self._router_timestamps.get(self._router_id,
datetime.datetime.min)
def fetched_and_processed(self, timestamp):
"""Records the data timestamp after it is used to update the router"""
new_timestamp = max(timestamp, self._get_router_data_timestamp())
self._router_timestamps[self._router_id] = new_timestamp
def queue_update(self, update):
"""Queues an update from a worker
This is the queue used to keep new updates that come in while a router
is being processed. These updates have already bubbled to the front of
the RouterProcessingQueue.
"""
self._master._queue.append(update)
def updates(self):
"""Processes the router until updates stop coming
Only the master instance will process the router. However, updates may
come in from other workers while it is in progress. This method loops
until they stop coming.
"""
if self._i_am_master():
while self._queue:
# Remove the update from the queue even if it is old.
update = self._queue.pop(0)
# Process the update only if it is fresh.
if self._get_router_data_timestamp() < update.timestamp:
yield update
class RouterProcessingQueue(object):
"""Manager of the queue of routers to process."""
def __init__(self):
self._queue = Queue.PriorityQueue()
def add(self, update):
self._queue.put(update)
def each_update_to_next_router(self):
"""Grabs the next router from the queue and processes
This method uses a for loop to process the router repeatedly until
updates stop bubbling to the front of the queue.
"""
next_update = self._queue.get()
with ExclusiveRouterProcessor(next_update.id) as rp:
# Queue the update whether this worker is the master or not.
rp.queue_update(next_update)
# Here, if the current worker is not the master, the call to
# rp.updates() will not yield and so this will essentially be a
# noop.
for update in rp.updates():
yield (rp, update)
|
40323210/bg6_cdw11
|
refs/heads/master
|
users/b/g101/b40123200.py
|
7
|
from flask import Blueprint, request
bg101 = Blueprint('bg101', __name__, url_prefix='/bg101', template_folder='templates')
head_str = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 2D 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango-8v03.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango2D-6v13.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/CangoAxes-1v33.js"></script>
</head>
<body>
<script>
window.onload=function(){
brython(1);
}
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
'''
tail_str = '''
</script>
</body>
</html>
'''
chain_str = '''
<script type="text/python">
from javascript import JSConstructor
from browser import alert
from browser import window
import math
cango = JSConstructor(window.Cango)
cobj = JSConstructor(window.Cobj)
shapedefs = window.shapeDefs
obj2d = JSConstructor(window.Obj2D)
cgo = cango("plotarea")
cgo.setWorldCoords(-250, -250, 500, 500)
# 畫軸線
cgo.drawAxes(0, 240, 0, 240, {
"strokeColor":"#aaaaaa",
"fillColor": "#aaaaaa",
"xTickInterval": 20,
"xLabelInterval": 20,
"yTickInterval": 20,
"yLabelInterval": 20})
deg = math.pi/180
# 將繪製鏈條輪廓的內容寫成 class 物件
class chain():
# 輪廓的外型設為 class variable
chamber = "M -6.8397, -1.4894 \
A 7, 7, 0, 1, 0, 6.8397, -1.4894 \
A 40, 40, 0, 0, 1, 6.8397, -18.511 \
A 7, 7, 0, 1, 0, -6.8397, -18.511 \
A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
#chamber = "M 0, 0 L 0, -20 z"
cgoChamber = window.svgToCgoSVG(chamber)
def __init__(self, fillcolor="green", border=True, strokecolor= "tan", linewidth=2, scale=1):
self.fillcolor = fillcolor
self.border = border
self.strokecolor = strokecolor
self.linewidth = linewidth
self.scale = scale
# 利用鏈條起點與終點定義繪圖
def basic(self, x1, y1, x2, y2):
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
# 注意, cgo.Chamber 為成員變數
cmbr = cobj(self.cgoChamber, "SHAPE", {
"fillColor": self.fillcolor,
"border": self.border,
"strokeColor": self.strokecolor,
"lineWidth": self.linewidth })
# hole 為原點位置
hole = cobj(shapedefs.circle(4*self.scale), "PATH")
cmbr.appendPath(hole)
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
# 因為鏈條的角度由原點向下垂直, 所以必須轉 90 度, 再考量 atan2 的轉角
basic1.rotate(math.atan2(y2-y1, x2-x1)/deg+90)
# 放大 scale 倍
cgo.render(basic1, x1, y1, self.scale, 0)
# 利用鏈條起點與旋轉角度定義繪圖, 使用內定的 color, border 與 linewidth 變數
def basic_rot(self, x1, y1, rot, v=False):
# 若 v 為 True 則為虛擬 chain, 不 render
self.x1 = x1
self.y1 = y1
self.rot = rot
self.v = v
# 注意, cgoChamber 為成員變數
cmbr = cobj(self.cgoChamber, "SHAPE", {
"fillColor": self.fillcolor,
"border": self.border,
"strokeColor": self.strokecolor,
"lineWidth": self.linewidth })
# hole0 為原點位置
hole = cobj(shapedefs.circle(4*self.scale), "PATH")
cmbr.appendPath(hole)
# 根據旋轉角度, 計算 x2 與 y2
x2 = x1 + 20*math.cos(rot*deg)*self.scale
y2 = y1 + 20*math.sin(rot*deg)*self.scale
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
# 因為鏈條的角度由原點向下垂直, 所以必須轉 90 度, 再考量 atan2 的轉角
basic1.rotate(rot+90)
# 放大 scale 倍
if v == False:
cgo.render(basic1, x1, y1, self.scale, 0)
return x2, y2
'''
# 傳繪 A 函式內容
def a(x, y, scale=1, color="green"):
outstring = '''
# 利用 chain class 建立案例, 對應到 mychain 變數
mychain = chain(scale='''+str(scale)+''', fillcolor="'''+str(color)+'''")
# 畫 A
# 左邊兩個垂直單元
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+''', 90)
x2, y2 = mychain.basic_rot(x1, y1, 90)
# 左斜邊兩個單元
x3, y3 = mychain.basic_rot(x2, y2, 80)
x4, y4 = mychain.basic_rot(x3, y3, 71)
# 最上方水平單元
x5, y5 = mychain.basic_rot(x4, y4, 0)
# 右斜邊兩個單元
x6, y6 = mychain.basic_rot(x5, y5, -71)
x7, y7 = mychain.basic_rot(x6, y6, -80)
# 右邊兩個垂直單元
x8, y8 = mychain.basic_rot(x7, y7, -90)
x9, y9 = mychain.basic_rot(x8, y8, -90)
# 中間兩個水平單元
x10, y10 = mychain.basic_rot(x8, y8, -180)
mychain.basic(x10, y10, x1, y1)
'''
return outstring
# 傳繪 B 函式內容
def b(x, y):
outstring = '''
# 利用 chain class 建立案例, 對應到 mychain 變數
mychain = chain()
# 畫 B
# 左邊四個垂直單元
# 每一個字元間隔為 65 pixels
#x1, y1 = mychain.basic_rot(0+ 65, 0, 90)
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+''', 90)
x2, y2 = mychain.basic_rot(x1, y1, 90)
x3, y3 = mychain.basic_rot(x2, y2, 90)
x4, y4 = mychain.basic_rot(x3, y3, 90)
# 上方一個水平單元
x5, y5 = mychain.basic_rot(x4, y4, 0)
# 右斜 -30 度
x6, y6 = mychain.basic_rot(x5, y5, -30)
# 右上垂直向下單元
x7, y7 = mychain.basic_rot(x6, y6, -90)
# 右斜 240 度
x8, y8 = mychain.basic_rot(x7, y7, 210)
# 中間水平
mychain.basic(x8, y8, x2, y2)
# 右下斜 -30 度
x10, y10 = mychain.basic_rot(x8, y8, -30)
# 右下垂直向下單元
x11, y11 = mychain.basic_rot(x10, y10, -90)
# 右下斜 240 度
x12, y12 = mychain.basic_rot(x11, y11, 210)
# 水平接回起點
mychain.basic(x12,y12, '''+str(x)+","+str(y)+''')
'''
return outstring
# 傳繪 C 函式內容
def c(x, y):
outstring = '''
# 利用 chain class 建立案例, 對應到 mychain 變數
mychain = chain()
# 上半部
# 左邊中間垂直起點, 圓心位於線段中央, y 方向再向上平移兩個鏈條圓心距單位
#x1, y1 = mychain.basic_rot(0+65*2, -10+10+20*math.sin(80*deg)+20*math.sin(30*deg), 90)
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+'''-10+10+20*math.sin(80*deg)+20*math.sin(30*deg), 90)
# 上方轉 80 度
x2, y2 = mychain.basic_rot(x1, y1, 80)
# 上方轉 30 度
x3, y3 = mychain.basic_rot(x2, y2, 30)
# 上方水平
x4, y4 = mychain.basic_rot(x3, y3, 0)
# 下半部, 從起點開始 -80 度
#x5, y5 = mychain.basic_rot(0+65*2, -10+10+20*math.sin(80*deg)+20*math.sin(30*deg), -80)
x5, y5 = mychain.basic_rot('''+str(x)+","+str(y)+'''-10+10+20*math.sin(80*deg)+20*math.sin(30*deg), -80)
# 下斜 -30 度
x6, y6 = mychain.basic_rot(x5, y5, -30)
# 下方水平單元
x7, y7 = mychain.basic_rot(x6, y6, -0)
'''
return outstring
# 傳繪 D 函式內容
def d(x, y):
outstring = '''
# 利用 chain class 建立案例, 對應到 mychain 變數
mychain = chain()
# 左邊四個垂直單元
#x1, y1 = mychain.basic_rot(0+65*3, 0, 90)
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+''', 90)
x2, y2 = mychain.basic_rot(x1, y1, 90)
x3, y3 = mychain.basic_rot(x2, y2, 90)
x4, y4 = mychain.basic_rot(x3, y3, 90)
# 上方一個水平單元
x5, y5 = mychain.basic_rot(x4, y4, 0)
# 右斜 -40 度
x6, y6 = mychain.basic_rot(x5, y5, -40)
x7, y7 = mychain.basic_rot(x6, y6, -60)
# 右中垂直向下單元
x8, y8 = mychain.basic_rot(x7, y7, -90)
# -120 度
x9, y9 = mychain.basic_rot(x8, y8, -120)
# -140
x10, y10 = mychain.basic_rot(x9, y9, -140)
# 水平接回原點
#mychain.basic(x10, y10, 0+65*3, 0, color="red")
mychain.basic(x10, y10, '''+str(x)+","+str(y)+''')
'''
return outstring
def circle(x, y):
outstring = '''
mychain = chain()
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+''', 50)
'''
for i in range(2, 10):
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*40)+") \n"
return outstring
def circle1(x, y, degree=10):
# 20 為鏈條兩圓距
# chain 所圍之圓圈半徑為 20/2/math.asin(degree*math.pi/180/2)
# degree = math.asin(20/2/radius)*180/math.pi
#degree = 10
first_degree = 90 - degree
repeat = 360 / degree
outstring = '''
mychain = chain()
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+", "+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*degree)+") \n"
return outstring
def circle2(x, y, degree=10):
# 20 為鏈條兩圓距
# chain 所圍之圓圈半徑為 20/2/math.asin(degree*math.pi/180/2)
# degree = math.asin(20/2/radius)*180/math.pi
#degree = 10
first_degree = 90 - degree
repeat = 360 / degree
outstring = '''
mychain = chain()
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+", "+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*degree)+") \n"
return outstring
def twocircle(x, y):
# 20 為鏈條兩圓距
# chain 所圍之圓圈半徑為 20/2/math.asin(degree*math.pi/180/2)
# degree = math.asin(20/2/radius)*180/math.pi
x = 50
y = 0
degree = 12
# 78, 66, 54, 42, 30, 18, 6度
#必須有某些 chain 算座標但是不 render
first_degree = 90 - degree
repeat = 360 / degree
# 第1節也是 virtual chain
outstring = '''
mychain = chain()
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+", "+str(first_degree)+''', True)
#x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+", "+str(first_degree)+''')
'''
# 這裡要上下各多留一節虛擬 chain, 以便最後進行連接 (x7, y7) 與 (x22, y22)
for i in range(2, int(repeat)+1):
#if i < 7 or i > 23:
if i <= 7 or i >= 23:
# virautl chain
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*degree)+", True) \n"
#outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*degree)+") \n"
else:
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*degree)+") \n"
p = -150
k = 0
degree = 20
# 70, 50, 30, 10
# 從 i=5 開始, 就是 virautl chain
first_degree = 90 - degree
repeat = 360 / degree
# 第1節不是 virtual chain
outstring += '''
#mychain = chain()
p1, k1 = mychain.basic_rot('''+str(p)+","+str(k)+", "+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
if i >= 5 and i <= 13:
# virautl chain
outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+", 90-"+str(i*degree)+", True) \n"
#outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+", 90-"+str(i*degree)+") \n"
else:
outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+", 90-"+str(i*degree)+") \n"
# 上段連接直線
# 從 p5, k5 作為起點
first_degree = 10
repeat = 11
outstring += '''
m1, n1 = mychain.basic_rot(p4, k4, '''+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "m"+str(i)+", n"+str(i)+"=mychain.basic_rot(m"+str(i-1)+", n"+str(i-1)+", "+str(first_degree)+")\n"
# 下段連接直線
# 從 p12, k12 作為起點
first_degree = -10
repeat = 11
outstring += '''
r1, s1 = mychain.basic_rot(p13, k13, '''+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "r"+str(i)+", s"+str(i)+"=mychain.basic_rot(r"+str(i-1)+", s"+str(i-1)+", "+str(first_degree)+")\n"
# 上段右方接點為 x7, y7, 左側則為 m11, n11
outstring += "mychain.basic(x7, y7, m11, n11)\n"
# 下段右方接點為 x22, y22, 左側則為 r11, s11
outstring += "mychain.basic(x22, y22, r11, s11)\n"
return outstring
def eighteenthirty(x, y):
'''
從圖解法與符號式解法得到的兩條外切線座標點
(-203.592946177111, 0.0), (0.0, 0.0), (-214.364148466539, 56.5714145924675), (-17.8936874260919, 93.9794075692901)
(-203.592946177111, 0.0), (0.0, 0.0), (-214.364148466539, -56.5714145924675), (-17.8936874260919, -93.9794075692901)
左邊關鍵鍊條起點 (-233.06, 49.48), 角度 20.78, 圓心 (-203.593, 0.0)
右邊關鍵鍊條起點 (-17.89, 93.9), 角度 4.78, 圓心 (0, 0)
'''
# 20 為鏈條兩圓距
# chain 所圍之圓圈半徑為 20/2/math.asin(degree*math.pi/180/2)
# degree = math.asin(20/2/radius)*180/math.pi
#x = 50
#y = 0
degree = 20
first_degree = 20.78
startx = -233.06+100+x
starty = 49.48+y
repeat = 360 / degree
# 先畫出左邊第一關鍵節
outstring = '''
mychain = chain()
x1, y1 = mychain.basic_rot('''+str(startx)+","+str(starty)+", "+str(first_degree)+''')
'''
# 接著繪製左邊的非虛擬鍊條
for i in range(2, int(repeat)+1):
if i >=2 and i <=11:
# virautl chain
#outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+","+str(first_degree+degree-i*degree)+") \n"
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+","+str(first_degree+degree-i*degree)+", True) \n"
else:
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+","+str(first_degree+degree-i*degree)+") \n"
# 接著處理右邊的非虛擬鍊條
# 先畫出右邊第一關鍵節
p = -17.89+100+x
k = 93.98+y
degree = 12
first_degree = 4.78
repeat = 360 / degree
# 第1節不是 virtual chain
outstring += '''
#mychain = chain()
p1, k1 = mychain.basic_rot('''+str(p)+","+str(k)+", "+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
if i >=18:
# virautl chain
outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+","+str(first_degree+degree-i*degree)+", True) \n"
#outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+","+str(first_degree+degree-i*degree)+") \n"
else:
outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+","+str(first_degree+degree-i*degree)+") \n"
# 上段連接直線
# 從 x1, y1 作為起點
first_degree = 10.78
repeat = 10
outstring += '''
m1, n1 = mychain.basic_rot(x1, y1, '''+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "m"+str(i)+", n"+str(i)+"=mychain.basic_rot(m"+str(i-1)+", n"+str(i-1)+", "+str(first_degree)+")\n"
# 下段連接直線
# 從 x11, y11 作為起點
first_degree = -10.78
repeat = 10
outstring += '''
r1, s1 = mychain.basic_rot(x11, y11, '''+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "r"+str(i)+", s"+str(i)+"=mychain.basic_rot(r"+str(i-1)+", s"+str(i-1)+", "+str(first_degree)+")\n"
return outstring
@bg101.route('/a')
def draw_a():
return head_str + chain_str + a(0, 0) + tail_str
@bg101.route('/b')
def draw_b():
# 每個橫向字元距離為 65 pixels, 上下字距則為 110 pixels
return head_str + chain_str + b(0+65, 0) + tail_str
@bg101.route('/c')
def draw_c():
# 每個橫向字元距離為 65 pixels
return head_str + chain_str + c(0+65*2, 0) + tail_str
@bg101.route('/d')
def draw_d():
return head_str + chain_str + d(0+65*3, 0) + tail_str
@bg101.route('/ab')
def draw_ab():
#return head_str + chain_str + a(0, 0) + b(0+65, 0) + tail_str
return head_str + chain_str + a(0, 0) + b(0, 0-110) + tail_str
@bg101.route('/ac')
def draw_ac():
return head_str + chain_str + a(0, 0) + c(0+65, 0) + tail_str
@bg101.route('/bc')
def draw_bc():
return head_str + chain_str + b(0, 0) + c(0+65, 0) + tail_str
@bg101.route('/abc')
def draw_abc():
return head_str + chain_str + a(0, 0) + b(0+65, 0) + c(0+65*2, 0) + tail_str
@bg101.route('/aaaa')
def draw_aaaa():
outstring = head_str + chain_str
scale = 2
for i in range(20):
scale = scale*0.9
outstring += a(0+10*i, 0, scale=scale)
return outstring + tail_str
#return head_str + chain_str + a(0, 0, scale=1) + a(0+65, 0, scale=0.8, color="red") + a(0+65*2, 0, scale=0.6) + a(0+65*3, 0, scale=0.4, color="red") + tail_str
@bg101.route('/badc')
def draw_badc():
return head_str + chain_str + b(0, 0) + a(0+65, 0) + d(0+65*2, 0) + c(0+65*3, 0) + tail_str
@bg101.route('/abcd')
def draw_abcd():
#return head_str + chain_str + a(0, 0) + b(0+65, 0) + c(0+65*2, 0) + d(0+65*3, 0) + tail_str
return head_str + chain_str + a(0, 110) + b(0, 110-110) + c(0, 110-110*2) + d(0, 110-110*3) + tail_str
@bg101.route('/circle')
def drawcircle():
return head_str + chain_str + circle(0, 0) + tail_str
@bg101.route('/circle1/<degree>', defaults={'x': 0, 'y': 0})
@bg101.route('/circle1/<x>/<degree>', defaults={'y': 0})
@bg101.route('/circle1/<x>/<y>/<degree>')
#@bg101.route('/circle1/<int:x>/<int:y>/<int:degree>')
def drawcircle1(x,y,degree):
return head_str + chain_str + circle1(int(x), int(y), int(degree)) + tail_str
@bg101.route('/circle2/<degree>', defaults={'x': 0, 'y': 0})
@bg101.route('/circle2/<x>/<degree>', defaults={'y': 0})
@bg101.route('/circle2/<x>/<y>/<degree>')
#@bg101.route('/circle2/<int:x>/<int:y>/<int:degree>')
def drawcircle2(x,y,degree):
return head_str + chain_str + circle2(int(x), int(y), int(degree)) + tail_str
@bg101.route('/twocircle/<x>/<y>')
@bg101.route('/twocircle', defaults={'x':0, 'y':0})
def drawtwocircle(x,y):
return head_str + chain_str + twocircle(int(x), int(y)) + tail_str
@bg101.route('/eighteenthirty/<x>/<y>')
@bg101.route('/eighteenthirty', defaults={'x':0, 'y':0})
def draweithteenthirdy(x,y):
return head_str + chain_str + eighteenthirty(int(x), int(y)) + tail_str
@bg101.route('/snap')
# http://svg.dabbles.info/snaptut-base
def snap():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 snap 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="/static/snap.svg-min.js"></script>
<script>
window.onload=function(){
brython(1);
}
</script>
</head>
<body>
<svg width="800" height="800" viewBox="0 0 800 800" id="svgout"></svg>
<script type="text/python">
from javascript import JSConstructor
from browser import alert
from browser import window, document
# 透過 window 與 JSConstructor 從 Brython 物件 snap 擷取 Snap 物件的內容
snap = JSConstructor(window.Snap)
s = snap("#svgout")
# 建立物件時, 同時設定 id 名稱
r = s.rect(10,10,100,100).attr({'id': 'rect'})
c = s.circle(100,100,50).attr({'id': 'circle'})
r.attr('fill', 'red')
c.attr({ 'fill': 'blue', 'stroke': 'black', 'strokeWidth': 10 })
r.attr({ 'stroke': '#123456', 'strokeWidth': 20 })
s.text(180,100, '點按一下圖形').attr({'fill' : 'blue', 'stroke': 'blue', 'stroke-width': 0.2 })
g = s.group().attr({'id': 'tux'})
def hoverover(ev):
g.animate({'transform': 's1.5r45,t180,20'}, 1000, window.mina.bounce)
def hoverout(ev):
g.animate({'transform': 's1r0,t180,20'}, 1000, window.mina.bounce)
# callback 函式
def onSVGLoaded(data):
#s.append(data)
g.append(data)
#g.hover(hoverover, hoverout )
g.text(300,100, '拿滑鼠指向我')
# 利用 window.Snap.load 載入 svg 檔案
tux = window.Snap.load("/static/Dreaming_tux.svg", onSVGLoaded)
g.transform('t180,20')
# 與視窗事件對應的函式
def rtoyellow(ev):
r.attr('fill', 'yellow')
def ctogreen(ev):
c.attr('fill', 'green')
# 根據物件 id 綁定滑鼠事件執行對應函式
document['rect'].bind('click', rtoyellow)
document['circle'].bind('click', ctogreen)
document['tux'].bind('mouseover', hoverover)
document['tux'].bind('mouseleave', hoverout)
</script>
</body>
</html>
'''
return outstring
@bg101.route('/snap_link')
# http://svg.dabbles.info/
def snap_link():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 snap 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="/static/snap.svg-min.js"></script>
<script>
window.onload=function(){
brython(1);
}
</script>
</head>
<body>
<svg width="800" height="800" viewBox="0 0 800 800" id="svgout"></svg>
<script type="text/python">
from javascript import JSConstructor
from browser import alert
from browser import window, document
# 透過 window 與 JSConstructor 從 Brython 物件 snap 擷取 Snap 物件的內容
snap = JSConstructor(window.Snap)
# 使用 id 為 "svgout" 的 svg 標註進行繪圖
s = snap("#svgout")
offsetY = 50
# 是否標訂出繪圖範圍
#borderRect = s.rect(0,0,800,640,10,10).attr({ 'stroke': "silver", 'fill': "silver", 'strokeWidth': "3" })
g = s.group().transform('t250,120')
r0 = s.rect(150,150,100,100,20,20).attr({ 'fill': "orange", 'opacity': "0.8", 'stroke': "black", 'strokeWidth': "2" })
c0 = s.circle(225,225,10).attr({ 'fill': "silver", 'stroke': "black", 'strokeWidth': "4" }).attr({ 'id': 'c0' })
g0 = s.group( r0,c0 ).attr({ 'id': 'g0' })
#g0.animate({ 'transform' : 't250,120r360,225,225' },4000)
g0.appendTo( g )
g0.animate({ 'transform' : 'r360,225,225' },4000)
# 讓 g0 可以拖動
g0.drag()
r1 = s.rect(100,100,100,100,20,20).attr({ 'fill': "red", 'opacity': "0.8", 'stroke': "black", 'strokeWidth': "2" })
c1 = s.circle(175,175,10).attr({ 'fill': "silver", 'stroke': "black" , 'strokeWidth': "4"}).attr({ 'id': 'c1' })
g1 = s.group( r1,c1 ).attr({ 'id': 'g1' })
g1.appendTo( g0 ).attr({ 'id': 'g1' })
g1.animate({ 'transform' : 'r360,175,175' },4000)
r2 = s.rect(50,50,100,100,20,20).attr({ 'fill': "blue", 'opacity': "0.8", 'stroke': "black", 'strokeWidth': "2" })
c2 = s.circle(125,125,10).attr({ 'fill': "silver", 'stroke': "black", 'strokeWidth': "4" }).attr({ 'id': 'c2' })
g2 = s.group(r2,c2).attr({ 'id': 'g2' })
g2.appendTo( g1 );
g2.animate( { 'transform' : 'r360,125,125' },4000);
r3 = s.rect(0,0,100,100,20,20).attr({ 'fill': "yellow", 'opacity': "0.8", 'stroke': "black", 'strokeWidth': "2" })
c3 = s.circle(75,75,10).attr({ 'fill': "silver", 'stroke': "black", 'strokeWidth': "4" }).attr({ 'id': 'c3' })
g3 = s.group(r3,c3).attr({ 'id': 'g3' })
g3.appendTo( g2 )
g3.animate( { 'transform' : 'r360,75,75' },4000)
r4 = s.rect(-50,-50,100,100,20,20).attr({ 'fill': "green", 'opacity': "0.8", 'stroke': "black", 'strokeWidth': "2" })
c4 = s.circle(25,25,10).attr({ 'fill': "silver", 'stroke': "black", 'strokeWidth': "4" }).attr({ 'id': 'c4' })
g4 = s.group(r4,c4).attr({ 'id': 'g4' });
g4.appendTo( g3 )
g4.animate( { 'transform' : 'r360,25,25' },4000)
</script>
</body>
</html>
'''
return outstring
@bg101.route('/snap_gear')
def snap_gear():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 snap 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="/static/snap.svg-min.js"></script>
<script>
window.onload=function(){
brython(1);
}
</script>
</head>
<body>
<svg width="800" height="800" viewBox="0 0 800 800" id="svgout"></svg>
<script type="text/python">
from javascript import JSConstructor
from browser import alert
from browser import window, document
# 透過 window 與 JSConstructor 從 Brython 物件 snap 擷取 Snap 物件的內容
snap = JSConstructor(window.Snap)
s = snap("#svgout")
# 畫直線
s.line(0, 0, 100, 100).attr({ 'fill': "silver", 'stroke': "black", 'strokeWidth': "1" }).attr({ 'id': 'line1' })
</script>
</body>
</html>
'''
return outstring
|
cervinko/calibre-web
|
refs/heads/master
|
vendor/sqlalchemy/events.py
|
13
|
# sqlalchemy/events.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Core event interfaces."""
from . import event, exc, util
engine = util.importlater('sqlalchemy', 'engine')
pool = util.importlater('sqlalchemy', 'pool')
class DDLEvents(event.Events):
"""
Define event listeners for schema objects,
that is, :class:`.SchemaItem` and :class:`.SchemaEvent`
subclasses, including :class:`.MetaData`, :class:`.Table`,
:class:`.Column`.
:class:`.MetaData` and :class:`.Table` support events
specifically regarding when CREATE and DROP
DDL is emitted to the database.
Attachment events are also provided to customize
behavior whenever a child schema element is associated
with a parent, such as, when a :class:`.Column` is associated
with its :class:`.Table`, when a :class:`.ForeignKeyConstraint`
is associated with a :class:`.Table`, etc.
Example using the ``after_create`` event::
from sqlalchemy import event
from sqlalchemy import Table, Column, Metadata, Integer
m = MetaData()
some_table = Table('some_table', m, Column('data', Integer))
def after_create(target, connection, **kw):
connection.execute("ALTER TABLE %s SET name=foo_%s" %
(target.name, target.name))
event.listen(some_table, "after_create", after_create)
DDL events integrate closely with the
:class:`.DDL` class and the :class:`.DDLElement` hierarchy
of DDL clause constructs, which are themselves appropriate
as listener callables::
from sqlalchemy import DDL
event.listen(
some_table,
"after_create",
DDL("ALTER TABLE %(table)s SET name=foo_%(table)s")
)
The methods here define the name of an event as well
as the names of members that are passed to listener
functions.
See also:
:ref:`event_toplevel`
:class:`.DDLElement`
:class:`.DDL`
:ref:`schema_ddl_sequences`
"""
def before_create(self, target, connection, **kw):
"""Called before CREATE statments are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
CREATE statement or statements will be emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
"""
def after_create(self, target, connection, **kw):
"""Called after CREATE statments are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
CREATE statement or statements have been emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
"""
def before_drop(self, target, connection, **kw):
"""Called before DROP statments are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
DROP statement or statements will be emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
"""
def after_drop(self, target, connection, **kw):
"""Called after DROP statments are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
DROP statement or statements have been emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
"""
def before_parent_attach(self, target, parent):
"""Called before a :class:`.SchemaItem` is associated with
a parent :class:`.SchemaItem`.
:param target: the target object
:param parent: the parent to which the target is being attached.
:func:`.event.listen` also accepts a modifier for this event:
:param propagate=False: When True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`.Table.tometadata` is used.
"""
def after_parent_attach(self, target, parent):
"""Called after a :class:`.SchemaItem` is associated with
a parent :class:`.SchemaItem`.
:param target: the target object
:param parent: the parent to which the target is being attached.
:func:`.event.listen` also accepts a modifier for this event:
:param propagate=False: When True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`.Table.tometadata` is used.
"""
def column_reflect(self, inspector, table, column_info):
"""Called for each unit of 'column info' retrieved when
a :class:`.Table` is being reflected.
The dictionary of column information as returned by the
dialect is passed, and can be modified. The dictionary
is that returned in each element of the list returned
by :meth:`.reflection.Inspector.get_columns`.
The event is called before any action is taken against
this dictionary, and the contents can be modified.
The :class:`.Column` specific arguments ``info``, ``key``,
and ``quote`` can also be added to the dictionary and
will be passed to the constructor of :class:`.Column`.
Note that this event is only meaningful if either
associated with the :class:`.Table` class across the
board, e.g.::
from sqlalchemy.schema import Table
from sqlalchemy import event
def listen_for_reflect(inspector, table, column_info):
"receive a column_reflect event"
# ...
event.listen(
Table,
'column_reflect',
listen_for_reflect)
...or with a specific :class:`.Table` instance using
the ``listeners`` argument::
def listen_for_reflect(inspector, table, column_info):
"receive a column_reflect event"
# ...
t = Table(
'sometable',
autoload=True,
listeners=[
('column_reflect', listen_for_reflect)
])
This because the reflection process initiated by ``autoload=True``
completes within the scope of the constructor for :class:`.Table`.
"""
class SchemaEventTarget(object):
"""Base class for elements that are the targets of :class:`.DDLEvents`
events.
This includes :class:`.SchemaItem` as well as :class:`.SchemaType`.
"""
dispatch = event.dispatcher(DDLEvents)
def _set_parent(self, parent):
"""Associate with this SchemaEvent's parent object."""
raise NotImplementedError()
def _set_parent_with_dispatch(self, parent):
self.dispatch.before_parent_attach(self, parent)
self._set_parent(parent)
self.dispatch.after_parent_attach(self, parent)
class PoolEvents(event.Events):
"""Available events for :class:`.Pool`.
The methods here define the name of an event as well
as the names of members that are passed to listener
functions.
e.g.::
from sqlalchemy import event
def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
"handle an on checkout event"
event.listen(Pool, 'checkout', my_on_checkout)
In addition to accepting the :class:`.Pool` class and
:class:`.Pool` instances, :class:`.PoolEvents` also accepts
:class:`.Engine` objects and the :class:`.Engine` class as
targets, which will be resolved to the ``.pool`` attribute of the
given engine or the :class:`.Pool` class::
engine = create_engine("postgresql://scott:tiger@localhost/test")
# will associate with engine.pool
event.listen(engine, 'checkout', my_on_checkout)
"""
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
if issubclass(target, engine.Engine):
return pool.Pool
elif issubclass(target, pool.Pool):
return target
elif isinstance(target, engine.Engine):
return target.pool
else:
return target
def connect(self, dbapi_connection, connection_record):
"""Called once for each new DB-API connection or Pool's ``creator()``.
:param dbapi_con:
A newly connected raw DB-API connection (not a SQLAlchemy
``Connection`` wrapper).
:param con_record:
The ``_ConnectionRecord`` that persistently manages the connection
"""
def first_connect(self, dbapi_connection, connection_record):
"""Called exactly once for the first DB-API connection.
:param dbapi_con:
A newly connected raw DB-API connection (not a SQLAlchemy
``Connection`` wrapper).
:param con_record:
The ``_ConnectionRecord`` that persistently manages the connection
"""
def checkout(self, dbapi_connection, connection_record, connection_proxy):
"""Called when a connection is retrieved from the Pool.
:param dbapi_con:
A raw DB-API connection
:param con_record:
The ``_ConnectionRecord`` that persistently manages the connection
:param con_proxy:
The ``_ConnectionFairy`` which manages the connection for the span of
the current checkout.
If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current
connection will be disposed and a fresh connection retrieved.
Processing of all checkout listeners will abort and restart
using the new connection.
"""
def checkin(self, dbapi_connection, connection_record):
"""Called when a connection returns to the pool.
Note that the connection may be closed, and may be None if the
connection has been invalidated. ``checkin`` will not be called
for detached connections. (They do not return to the pool.)
:param dbapi_con:
A raw DB-API connection
:param con_record:
The ``_ConnectionRecord`` that persistently manages the connection
"""
def reset(self, dbapi_con, con_record):
"""Called before the "reset" action occurs for a pooled connection.
This event represents
when the ``rollback()`` method is called on the DBAPI connection
before it is returned to the pool. The behavior of "reset" can
be controlled, including disabled, using the ``reset_on_return``
pool argument.
The :meth:`.PoolEvents.reset` event is usually followed by the
the :meth:`.PoolEvents.checkin` event is called, except in those
cases where the connection is discarded immediately after reset.
:param dbapi_con:
A raw DB-API connection
:param con_record:
The ``_ConnectionRecord`` that persistently manages the connection
.. versionadded:: 0.8
.. seealso::
:meth:`.ConnectionEvents.rollback`
:meth:`.ConnectionEvents.commit`
"""
class ConnectionEvents(event.Events):
"""Available events for :class:`.Connectable`, which includes
:class:`.Connection` and :class:`.Engine`.
The methods here define the name of an event as well as the names of
members that are passed to listener functions.
An event listener can be associated with any :class:`.Connectable`
class or instance, such as an :class:`.Engine`, e.g.::
from sqlalchemy import event, create_engine
def before_cursor_execute(conn, cursor, statement, parameters, context,
executemany):
log.info("Received statement: %s" % statement)
engine = create_engine('postgresql://scott:tiger@localhost/test')
event.listen(engine, "before_cursor_execute", before_cursor_execute)
or with a specific :class:`.Connection`::
with engine.begin() as conn:
@event.listens_for(conn, 'before_cursor_execute')
def before_cursor_execute(conn, cursor, statement, parameters,
context, executemany):
log.info("Received statement: %s" % statement)
The :meth:`.before_execute` and :meth:`.before_cursor_execute`
events can also be established with the ``retval=True`` flag, which
allows modification of the statement and parameters to be sent
to the database. The :meth:`.before_cursor_execute` event is
particularly useful here to add ad-hoc string transformations, such
as comments, to all executions::
from sqlalchemy.engine import Engine
from sqlalchemy import event
@event.listens_for(Engine, "before_cursor_execute", retval=True)
def comment_sql_calls(conn, cursor, statement, parameters,
context, executemany):
statement = statement + " -- some comment"
return statement, parameters
.. note:: :class:`.ConnectionEvents` can be established on any
combination of :class:`.Engine`, :class:`.Connection`, as well
as instances of each of those classes. Events across all
four scopes will fire off for a given instance of
:class:`.Connection`. However, for performance reasons, the
:class:`.Connection` object determines at instantiation time
whether or not its parent :class:`.Engine` has event listeners
established. Event listeners added to the :class:`.Engine`
class or to an instance of :class:`.Engine` *after* the instantiation
of a dependent :class:`.Connection` instance will usually
*not* be available on that :class:`.Connection` instance. The newly
added listeners will instead take effect for :class:`.Connection`
instances created subsequent to those event listeners being
established on the parent :class:`.Engine` class or instance.
:param retval=False: Applies to the :meth:`.before_execute` and
:meth:`.before_cursor_execute` events only. When True, the
user-defined event function must have a return value, which
is a tuple of parameters that replace the given statement
and parameters. See those methods for a description of
specific return arguments.
.. versionchanged:: 0.8 :class:`.ConnectionEvents` can now be associated
with any :class:`.Connectable` including :class:`.Connection`,
in addition to the existing support for :class:`.Engine`.
"""
@classmethod
def _listen(cls, target, identifier, fn, retval=False):
target._has_events = True
if not retval:
if identifier == 'before_execute':
orig_fn = fn
def wrap_before_execute(conn, clauseelement,
multiparams, params):
orig_fn(conn, clauseelement, multiparams, params)
return clauseelement, multiparams, params
fn = wrap_before_execute
elif identifier == 'before_cursor_execute':
orig_fn = fn
def wrap_before_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
orig_fn(conn, cursor, statement,
parameters, context, executemany)
return statement, parameters
fn = wrap_before_cursor_execute
elif retval and \
identifier not in ('before_execute', 'before_cursor_execute'):
raise exc.ArgumentError(
"Only the 'before_execute' and "
"'before_cursor_execute' engine "
"event listeners accept the 'retval=True' "
"argument.")
event.Events._listen(target, identifier, fn)
def before_execute(self, conn, clauseelement, multiparams, params):
"""Intercept high level execute() events, receiving uncompiled
SQL constructs and other objects prior to rendering into SQL.
This event is good for debugging SQL compilation issues as well
as early manipulation of the parameters being sent to the database,
as the parameter lists will be in a consistent format here.
This event can be optionally established with the ``retval=True``
flag. The ``clauseelement``, ``multiparams``, and ``params``
arguments should be returned as a three-tuple in this case::
@event.listens_for(Engine, "before_execute", retval=True)
def before_execute(conn, conn, clauseelement, multiparams, params):
# do something with clauseelement, multiparams, params
return clauseelement, multiparams, params
:param conn: :class:`.Connection` object
:param clauseelement: SQL expression construct, :class:`.Compiled`
instance, or string statement passed to :meth:`.Connection.execute`.
:param multiparams: Multiple parameter sets, a list of dictionaries.
:param params: Single parameter set, a single dictionary.
See also:
:meth:`.before_cursor_execute`
"""
def after_execute(self, conn, clauseelement, multiparams, params, result):
"""Intercept high level execute() events after execute.
:param conn: :class:`.Connection` object
:param clauseelement: SQL expression construct, :class:`.Compiled`
instance, or string statement passed to :meth:`.Connection.execute`.
:param multiparams: Multiple parameter sets, a list of dictionaries.
:param params: Single parameter set, a single dictionary.
:param result: :class:`.ResultProxy` generated by the execution.
"""
def before_cursor_execute(self, conn, cursor, statement,
parameters, context, executemany):
"""Intercept low-level cursor execute() events before execution,
receiving the string
SQL statement and DBAPI-specific parameter list to be invoked
against a cursor.
This event is a good choice for logging as well as late modifications
to the SQL string. It's less ideal for parameter modifications except
for those which are specific to a target backend.
This event can be optionally established with the ``retval=True``
flag. The ``statement`` and ``parameters`` arguments should be
returned as a two-tuple in this case::
@event.listens_for(Engine, "before_cursor_execute", retval=True)
def before_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
# do something with statement, parameters
return statement, parameters
See the example at :class:`.ConnectionEvents`.
:param conn: :class:`.Connection` object
:param cursor: DBAPI cursor object
:param statement: string SQL statement
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
:param context: :class:`.ExecutionContext` object in use. May
be ``None``.
:param executemany: boolean, if ``True``, this is an ``executemany()``
call, if ``False``, this is an ``execute()`` call.
See also:
:meth:`.before_execute`
:meth:`.after_cursor_execute`
"""
def after_cursor_execute(self, conn, cursor, statement,
parameters, context, executemany):
"""Intercept low-level cursor execute() events after execution.
:param conn: :class:`.Connection` object
:param cursor: DBAPI cursor object. Will have results pending
if the statement was a SELECT, but these should not be consumed
as they will be needed by the :class:`.ResultProxy`.
:param statement: string SQL statement
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
:param context: :class:`.ExecutionContext` object in use. May
be ``None``.
:param executemany: boolean, if ``True``, this is an ``executemany()``
call, if ``False``, this is an ``execute()`` call.
"""
def dbapi_error(self, conn, cursor, statement, parameters,
context, exception):
"""Intercept a raw DBAPI error.
This event is called with the DBAPI exception instance
received from the DBAPI itself, *before* SQLAlchemy wraps the
exception with it's own exception wrappers, and before any
other operations are performed on the DBAPI cursor; the
existing transaction remains in effect as well as any state
on the cursor.
The use case here is to inject low-level exception handling
into an :class:`.Engine`, typically for logging and
debugging purposes. In general, user code should **not** modify
any state or throw any exceptions here as this will
interfere with SQLAlchemy's cleanup and error handling
routines.
Subsequent to this hook, SQLAlchemy may attempt any
number of operations on the connection/cursor, including
closing the cursor, rolling back of the transaction in the
case of connectionless execution, and disposing of the entire
connection pool if a "disconnect" was detected. The
exception is then wrapped in a SQLAlchemy DBAPI exception
wrapper and re-thrown.
:param conn: :class:`.Connection` object
:param cursor: DBAPI cursor object
:param statement: string SQL statement
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
:param context: :class:`.ExecutionContext` object in use. May
be ``None``.
:param exception: The **unwrapped** exception emitted directly from the
DBAPI. The class here is specific to the DBAPI module in use.
.. versionadded:: 0.7.7
"""
def begin(self, conn):
"""Intercept begin() events.
:param conn: :class:`.Connection` object
"""
def rollback(self, conn):
"""Intercept rollback() events, as initiated by a
:class:`.Transaction`.
Note that the :class:`.Pool` also "auto-rolls back"
a DBAPI connection upon checkin, if the ``reset_on_return``
flag is set to its default value of ``'rollback'``.
To intercept this
rollback, use the :meth:`.PoolEvents.reset` hook.
:param conn: :class:`.Connection` object
.. seealso::
:meth:`.PoolEvents.reset`
"""
def commit(self, conn):
"""Intercept commit() events, as initiated by a
:class:`.Transaction`.
Note that the :class:`.Pool` may also "auto-commit"
a DBAPI connection upon checkin, if the ``reset_on_return``
flag is set to the value ``'commit'``. To intercept this
commit, use the :meth:`.PoolEvents.reset` hook.
:param conn: :class:`.Connection` object
"""
def savepoint(self, conn, name=None):
"""Intercept savepoint() events.
:param conn: :class:`.Connection` object
:param name: specified name used for the savepoint.
"""
def rollback_savepoint(self, conn, name, context):
"""Intercept rollback_savepoint() events.
:param conn: :class:`.Connection` object
:param name: specified name used for the savepoint.
:param context: :class:`.ExecutionContext` in use. May be ``None``.
"""
def release_savepoint(self, conn, name, context):
"""Intercept release_savepoint() events.
:param conn: :class:`.Connection` object
:param name: specified name used for the savepoint.
:param context: :class:`.ExecutionContext` in use. May be ``None``.
"""
def begin_twophase(self, conn, xid):
"""Intercept begin_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
"""
def prepare_twophase(self, conn, xid):
"""Intercept prepare_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
"""
def rollback_twophase(self, conn, xid, is_prepared):
"""Intercept rollback_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
:param is_prepared: boolean, indicates if
:meth:`.TwoPhaseTransaction.prepare` was called.
"""
def commit_twophase(self, conn, xid, is_prepared):
"""Intercept commit_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
:param is_prepared: boolean, indicates if
:meth:`.TwoPhaseTransaction.prepare` was called.
"""
|
b3j0f/middleware
|
refs/heads/master
|
b3j0f/middleware/cls.py
|
1
|
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2016 Jonathan Labéjof <jonathan.labejof@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# --------------------------------------------------------------------
"""Middleware class module.
This module provides class tools in order to ease the development of middleware.
"""
__all__ = ['Middleware']
from .core import register
from six import add_metaclass
class _MetaMiddleware(type):
"""Handle middleware definition."""
def __init__(cls, name, bases, attrs):
super(_MetaMiddleware, cls).__init__(name, bases, attrs)
register(protocols=cls.protocols(), middleware=cls)
@add_metaclass(_MetaMiddleware)
class Middleware(object):
"""Class to use such as a super class in order to automatically register it
to specific protocols.
This class is instanciated with uri parameters."""
__protocols__ = [] #: protocol registration definition.
@classmethod
def protocols(cls):
"""Get all class protocols.
:rtype: set"""
baseclasses = cls.mro()
result = set()
for baseclass in baseclasses:
if hasattr(baseclass, '__protocols__'):
result |= set(baseclass.__protocols__)
else:
break
return result
|
mbayon/TFG-MachineLearning
|
refs/heads/master
|
venv/lib/python3.6/site-packages/scipy/sparse/extract.py
|
27
|
"""Functions to extract parts of sparse matrices
"""
from __future__ import division, print_function, absolute_import
__docformat__ = "restructuredtext en"
__all__ = ['find', 'tril', 'triu']
from .coo import coo_matrix
def find(A):
"""Return the indices and values of the nonzero elements of a matrix
Parameters
----------
A : dense or sparse matrix
Matrix whose nonzero elements are desired.
Returns
-------
(I,J,V) : tuple of arrays
I,J, and V contain the row indices, column indices, and values
of the nonzero matrix entries.
Examples
--------
>>> from scipy.sparse import csr_matrix, find
>>> A = csr_matrix([[7.0, 8.0, 0],[0, 0, 9.0]])
>>> find(A)
(array([0, 0, 1], dtype=int32), array([0, 1, 2], dtype=int32), array([ 7., 8., 9.]))
"""
A = coo_matrix(A, copy=True)
A.sum_duplicates()
# remove explicit zeros
nz_mask = A.data != 0
return A.row[nz_mask], A.col[nz_mask], A.data[nz_mask]
def tril(A, k=0, format=None):
"""Return the lower triangular portion of a matrix in sparse format
Returns the elements on or below the k-th diagonal of the matrix A.
- k = 0 corresponds to the main diagonal
- k > 0 is above the main diagonal
- k < 0 is below the main diagonal
Parameters
----------
A : dense or sparse matrix
Matrix whose lower trianglar portion is desired.
k : integer : optional
The top-most diagonal of the lower triangle.
format : string
Sparse format of the result, e.g. format="csr", etc.
Returns
-------
L : sparse matrix
Lower triangular portion of A in sparse format.
See Also
--------
triu : upper triangle in sparse format
Examples
--------
>>> from scipy.sparse import csr_matrix, tril
>>> A = csr_matrix([[1, 2, 0, 0, 3], [4, 5, 0, 6, 7], [0, 0, 8, 9, 0]],
... dtype='int32')
>>> A.toarray()
array([[1, 2, 0, 0, 3],
[4, 5, 0, 6, 7],
[0, 0, 8, 9, 0]])
>>> tril(A).toarray()
array([[1, 0, 0, 0, 0],
[4, 5, 0, 0, 0],
[0, 0, 8, 0, 0]])
>>> tril(A).nnz
4
>>> tril(A, k=1).toarray()
array([[1, 2, 0, 0, 0],
[4, 5, 0, 0, 0],
[0, 0, 8, 9, 0]])
>>> tril(A, k=-1).toarray()
array([[0, 0, 0, 0, 0],
[4, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
>>> tril(A, format='csc')
<3x5 sparse matrix of type '<class 'numpy.int32'>'
with 4 stored elements in Compressed Sparse Column format>
"""
# convert to COOrdinate format where things are easy
A = coo_matrix(A, copy=False)
mask = A.row + k >= A.col
return _masked_coo(A, mask).asformat(format)
def triu(A, k=0, format=None):
"""Return the upper triangular portion of a matrix in sparse format
Returns the elements on or above the k-th diagonal of the matrix A.
- k = 0 corresponds to the main diagonal
- k > 0 is above the main diagonal
- k < 0 is below the main diagonal
Parameters
----------
A : dense or sparse matrix
Matrix whose upper trianglar portion is desired.
k : integer : optional
The bottom-most diagonal of the upper triangle.
format : string
Sparse format of the result, e.g. format="csr", etc.
Returns
-------
L : sparse matrix
Upper triangular portion of A in sparse format.
See Also
--------
tril : lower triangle in sparse format
Examples
--------
>>> from scipy.sparse import csr_matrix, triu
>>> A = csr_matrix([[1, 2, 0, 0, 3], [4, 5, 0, 6, 7], [0, 0, 8, 9, 0]],
... dtype='int32')
>>> A.toarray()
array([[1, 2, 0, 0, 3],
[4, 5, 0, 6, 7],
[0, 0, 8, 9, 0]])
>>> triu(A).toarray()
array([[1, 2, 0, 0, 3],
[0, 5, 0, 6, 7],
[0, 0, 8, 9, 0]])
>>> triu(A).nnz
8
>>> triu(A, k=1).toarray()
array([[0, 2, 0, 0, 3],
[0, 0, 0, 6, 7],
[0, 0, 0, 9, 0]])
>>> triu(A, k=-1).toarray()
array([[1, 2, 0, 0, 3],
[4, 5, 0, 6, 7],
[0, 0, 8, 9, 0]])
>>> triu(A, format='csc')
<3x5 sparse matrix of type '<class 'numpy.int32'>'
with 8 stored elements in Compressed Sparse Column format>
"""
# convert to COOrdinate format where things are easy
A = coo_matrix(A, copy=False)
mask = A.row + k <= A.col
return _masked_coo(A, mask).asformat(format)
def _masked_coo(A, mask):
row = A.row[mask]
col = A.col[mask]
data = A.data[mask]
return coo_matrix((data, (row, col)), shape=A.shape, dtype=A.dtype)
|
asedunov/intellij-community
|
refs/heads/master
|
python/testData/copyPaste/TopLevelIfStatementWithMultilineCondition.after.py
|
35
|
if (True or (True or
False)):
x = 1
y = 2
|
robertnishihara/ray
|
refs/heads/master
|
rllib/agents/ars/__init__.py
|
3
|
from ray.rllib.agents.ars.ars import ARSTrainer, DEFAULT_CONFIG
from ray.rllib.agents.ars.ars_tf_policy import ARSTFPolicy
from ray.rllib.agents.ars.ars_torch_policy import ARSTorchPolicy
__all__ = [
"ARSTFPolicy",
"ARSTorchPolicy",
"ARSTrainer",
"DEFAULT_CONFIG",
]
|
Jionglun/-w16b_test
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/site.py
|
805
|
import sys
|
lancezlin/pyjs
|
refs/heads/master
|
examples/libtest/ArgsTest.py
|
6
|
from UnitTest import UnitTest
def aArgs(*args):
return args
def ftest(a, b):
return [a, b]
class ArgsTest(UnitTest):
def testNaming1(self):
values = ftest(1, 2)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
def testNaming2(self):
values = ftest(a=1, b=2)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
def testNaming3(self):
values = ftest(1, b=2)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
def testNaming4(self):
exc_raised = False
try:
values = ftest(1, c=2)
except TypeError, t:
exc_raised = True
self.assertTrue(exc_raised, "TypeError 'c' unexpected arg not raised")
def testNaming5(self):
exc_raised = False
try:
values = ftest()
except TypeError, t:
exc_raised = True
self.assertTrue(exc_raised, "TypeError 'ftest() takes exactly 2 arguments (0 given)' not raised")
def testSimpleCall(self):
values = foo(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = foo2(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testKeywordCall1(self):
values = foo2(c=3, b=2, a=1)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testKeywordCall2(self):
values = foo2(b=2, a=1, c=3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testKeywordCall3(self):
values = foo2(1, c=3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], None)
self.assertEquals(values[2], 3)
def testKeywordCall4(self):
values = foo2()
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], None)
def testKeywordCall5(self):
values = foo2(c=True)
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], True)
def testStarArgs(self):
args = (1,2)
res = aArgs(*args)
self.assertEquals(args, res)
args = "123"
try:
res = aArgs(*args)
called = True
exc = None
except TypeError, e:
called = False
exc = e
# weird one: a string is a sequence, so it gets away with being
# called on its own as *args! eeugh.
self.assertTrue(called,
"exception not expected but function called:" + repr(res) + repr(exc))
self.assertEquals(res, ("1", "2", "3"))
args = 1
try:
res = aArgs(*args)
called = True
except TypeError:
called = False
self.assertFalse(called,
"exception expected but not raised - TypeError: aArgs() argument after * must be a sequence")
args = (1,)
res = aArgs(*args)
self.assertEquals(args, res)
args = (1,)
res = aArgs(args)
self.assertEquals((args,), res)
def testDefaultValuesCall(self):
values = foo3(b=7)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 7)
self.assertEquals(values[2], 3)
values = foo3(a=9)
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = foo3()
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testVarargsCall(self):
values = foo4(9, 8, 7, 2, 3, 4)
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 8)
self.assertEquals(values[2], 7)
self.assertEquals(values[3][0], 2)
self.assertEquals(values[3][1], 3)
self.assertEquals(values[3][2], 4)
values = foo4(9, 8, 7, 3, 2, 1)
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 8)
self.assertEquals(values[2], 7)
self.assertEquals(values[3][0], 3)
self.assertEquals(values[3][1], 2)
self.assertEquals(values[3][2], 1)
def testKwargsCall(self):
values = foo5(9, 8, 7, x=5, y=7)
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 8)
self.assertEquals(values[2], 7)
self.assertEquals(values[3]["x"], 5)
self.assertEquals(values[3]["y"], 7)
def testComboCall(self):
values = foo6(9, 8, 7, 1, 2, 3, x=4, y=5)
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 8)
self.assertEquals(values[2], 7)
self.assertEquals(values[3][0], 1)
self.assertEquals(values[3][1], 2)
self.assertEquals(values[3][2], 3)
self.assertEquals(values[4]["x"], 4)
self.assertEquals(values[4]["y"], 5)
def testEdgeCall(self):
values = foo7(1,2,3,b=2)
self.assertEqual(values[0], 1)
self.assertEqual(values[1], (2,3))
self.assertEqual(values[2], {'b':2})
values = foo7(1, 2, 3, {'b':2})
self.assertEqual(values[0], 1)
self.assertEqual(values[1], (2,3,{'b':2}))
self.assertEqual(values[2], {})
vaules = foo8(1, b=2)
self.assertEqual(vaules[0], 1)
self.assertEqual(vaules[1], {'b':2})
vaules = foo8({'b':2})
self.assertEqual(vaules[0], {'b':2})
self.assertEqual(vaules[1], {})
def testSimpleCtorCall(self):
values = ArgsTestClass_foo(1, 2, 3).x
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass_foo2(1, 2, 3).x
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testKeywordCtorCall(self):
values = ArgsTestClass_foo2(c=3, b=2, a=1).x
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass_foo2(b=2, a=1, c=3).x
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass_foo2().x
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], None)
values = ArgsTestClass_foo2(c=True).x
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], True)
def testDefaultValuesCtorCall(self):
values = ArgsTestClass_foo3(b=7).x
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 7)
self.assertEquals(values[2], 3)
values = ArgsTestClass_foo3(a=9).x
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass_foo3().x
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testVarargsCtorCall(self):
values = ArgsTestClass_foo4(9, 8, 7, 2, 3, 4).x
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 8)
self.assertEquals(values[2], 7)
self.assertEquals(values[3][0], 2)
self.assertEquals(values[3][1], 3)
self.assertEquals(values[3][2], 4)
values = ArgsTestClass_foo4(9, 8, 7, 3, 2, 1).x
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 8)
self.assertEquals(values[2], 7)
self.assertEquals(values[3][0], 3)
self.assertEquals(values[3][1], 2)
self.assertEquals(values[3][2], 1)
def testKwargsCtorCall(self):
values = ArgsTestClass_foo5(9, 8, 7, x=5, y=7).x
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 8)
self.assertEquals(values[2], 7)
self.assertEquals(values[3]["x"], 5)
self.assertEquals(values[3]["y"], 7)
def testComboCtorCall(self):
values = ArgsTestClass_foo6(9, 8, 7, 1, 2, 3, x=4, y=5).x
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 8)
self.assertEquals(values[2], 7)
self.assertEquals(values[3][0], 1)
self.assertEquals(values[3][1], 2)
self.assertEquals(values[3][2], 3)
self.assertEquals(values[4]["x"], 4)
self.assertEquals(values[4]["y"], 5)
def testSimpleMethodCall(self):
values = ArgsTestClass().foo(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass().foo2(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testKeywordMethodCall(self):
values = ArgsTestClass().foo2(c=3, b=2, a=1)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass().foo2(b=2, a=1, c=3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass().foo2()
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], None)
values = ArgsTestClass().foo2(c=True)
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], True)
def testDefaultValuesMethodCall(self):
values = ArgsTestClass().foo3(b=7)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 7)
self.assertEquals(values[2], 3)
values = ArgsTestClass().foo3(a=9)
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass().foo3()
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testVarargsMethodCall(self):
values = ArgsTestClass().foo4(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass().foo4(3, 2, 1)
self.assertEquals(values[0], 3)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 1)
def testKwargsMethodCall(self):
values = ArgsTestClass().foo5(x=5, y=7)
self.assertEquals(values["x"], 5)
self.assertEquals(values["y"], 7)
def testComboMethodCall(self):
values = ArgsTestClass().foo6(1, 2, 3, x=4, y=5)
self.assertEquals(values[0][0], 1)
self.assertEquals(values[0][1], 2)
self.assertEquals(values[0][2], 3)
self.assertEquals(values[1]["x"], 4)
self.assertEquals(values[1]["y"], 5)
def testEdgeMethodCall(self):
values = ArgsTestClass().foo7(1,2,3,b=2)
self.assertEqual(values[0], 1)
self.assertEqual(values[1], (2,3))
self.assertEqual(values[2], {'b':2})
values = ArgsTestClass().foo7(1, 2, 3, {'b':2})
self.assertEqual(values[0], 1)
self.assertEqual(values[1], (2,3,{'b':2}))
self.assertEqual(values[2], {})
vaules = ArgsTestClass().foo8(1, b=2)
self.assertEqual(vaules[0], 1)
self.assertEqual(vaules[1], {'b':2})
vaules = ArgsTestClass().foo8({'b':2})
self.assertEqual(vaules[0], {'b':2})
self.assertEqual(vaules[1], {})
def testSimpleStaticMethodCall(self):
values = ArgsTestClass2.foo(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass2.foo2(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testKeywordStaticMethodCall(self):
values = ArgsTestClass2.foo2(c=3, b=2, a=1)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass2.foo2(b=2, a=1, c=3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass2.foo2()
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], None)
values = ArgsTestClass2.foo2(c=True)
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], True)
def testDefaultValuesStaticMethodCall(self):
values = ArgsTestClass2.foo3(b=7)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 7)
self.assertEquals(values[2], 3)
values = ArgsTestClass2.foo3(a=9)
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass2.foo3()
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testVarargsStaticMethodCall(self):
values = ArgsTestClass2.foo4(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass2.foo4(3, 2, 1)
self.assertEquals(values[0], 3)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 1)
def testKwargsStaticMethodCall(self):
values = ArgsTestClass2.foo5(x=5, y=7)
self.assertEquals(values["x"], 5)
self.assertEquals(values["y"], 7)
def testComboStaticMethodCall(self):
values = ArgsTestClass2.foo6(1, 2, 3, x=4, y=5)
self.assertEquals(values[0][0], 1)
self.assertEquals(values[0][1], 2)
self.assertEquals(values[0][2], 3)
self.assertEquals(values[1]["x"], 4)
self.assertEquals(values[1]["y"], 5)
def testEdgeStaticMethodCall(self):
values = ArgsTestClass2.foo7(1,2,3,b=2)
self.assertEqual(values[0], 1)
self.assertEqual(values[1], (2,3))
self.assertEqual(values[2], {'b':2})
values = ArgsTestClass2.foo7(1, 2, 3, {'b':2})
self.assertEqual(values[0], 1)
self.assertEqual(values[1], (2,3,{'b':2}))
self.assertEqual(values[2], {})
vaules = ArgsTestClass2.foo8(1, b=2)
self.assertEqual(vaules[0], 1)
self.assertEqual(vaules[1], {'b':2})
vaules = ArgsTestClass2.foo8({'b':2})
self.assertEqual(vaules[0], {'b':2})
self.assertEqual(vaules[1], {})
def testSimpleClassMethodCall(self):
values = ArgsTestClass3.foo(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass3.foo2(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testKeywordClassMethodCall(self):
values = ArgsTestClass3.foo2(c=3, b=2, a=1)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass3.foo2(b=2, a=1, c=3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass3.foo2()
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], None)
values = ArgsTestClass3.foo2(c=True)
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], True)
def testDefaultValuesClassMethodCall(self):
values = ArgsTestClass3.foo3(b=7)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 7)
self.assertEquals(values[2], 3)
values = ArgsTestClass3.foo3(a=9)
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass3.foo3()
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testVarargsClassMethodCall(self):
values = ArgsTestClass3.foo4(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass3.foo4(3, 2, 1)
self.assertEquals(values[0], 3)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 1)
def testKwargsClassMethodCall(self):
values = ArgsTestClass3.foo5(x=5, y=7)
self.assertEquals(values["x"], 5)
self.assertEquals(values["y"], 7)
def testComboClassMethodCall(self):
values = ArgsTestClass3.foo6(1, 2, 3, x=4, y=5)
self.assertEquals(values[0][0], 1)
self.assertEquals(values[0][1], 2)
self.assertEquals(values[0][2], 3)
self.assertEquals(values[1]["x"], 4)
self.assertEquals(values[1]["y"], 5)
def testEdgeClassMethodCall(self):
values = ArgsTestClass3.foo7(1,2,3,b=2)
self.assertEqual(values[0], 1)
self.assertEqual(values[1], (2,3))
self.assertEqual(values[2], {'b':2})
values = ArgsTestClass3.foo7(1, 2, 3, {'b':2})
self.assertEqual(values[0], 1)
self.assertEqual(values[1], (2,3,{'b':2}))
self.assertEqual(values[2], {})
vaules = ArgsTestClass3.foo8(1, b=2)
self.assertEqual(vaules[0], 1)
self.assertEqual(vaules[1], {'b':2})
vaules = ArgsTestClass3.foo8({'b':2})
self.assertEqual(vaules[0], {'b':2})
self.assertEqual(vaules[1], {})
def testSimpleIndirectClassMethodCall(self):
values = ArgsTestClass3().foo(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass3().foo2(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testKeywordIndirectClassMethodCall(self):
values = ArgsTestClass3().foo2(c=3, b=2, a=1)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass3().foo2(b=2, a=1, c=3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass3().foo2()
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], None)
values = ArgsTestClass3().foo2(c=True)
self.assertEquals(values[0], None)
self.assertEquals(values[1], None)
self.assertEquals(values[2], True)
def testDefaultValuesIndirectClassMethodCall(self):
values = ArgsTestClass3().foo3(b=7)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 7)
self.assertEquals(values[2], 3)
values = ArgsTestClass3().foo3(a=9)
self.assertEquals(values[0], 9)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass3().foo3()
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
def testVarargsIndirectClassMethodCall(self):
values = ArgsTestClass3().foo4(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = ArgsTestClass3().foo4(3, 2, 1)
self.assertEquals(values[0], 3)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 1)
def testKwargsIndirectClassMethodCall(self):
values = ArgsTestClass3().foo5(x=5, y=7)
self.assertEquals(values["x"], 5)
self.assertEquals(values["y"], 7)
def testComboIndirectClassMethodCall(self):
values = ArgsTestClass3().foo6(1, 2, 3, x=4, y=5)
self.assertEquals(values[0][0], 1)
self.assertEquals(values[0][1], 2)
self.assertEquals(values[0][2], 3)
self.assertEquals(values[1]["x"], 4)
self.assertEquals(values[1]["y"], 5)
def testKwArgsRecurse(self):
kwa = kw_args(x=5, y=6)
if kwa:
self.assertEquals(kwa.get('x'), 5)
self.assertEquals(kwa.get('y'), 6)
kwa = kw_args2(x=5, y=6)
if kwa:
self.assertEquals(kwa.get('x'), 5)
self.assertEquals(kwa.get('y'), 6)
values = varargs_kwargs(1,2,3,4,c=3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], (3,4))
self.assertEquals(values[3]['c'], 3)
values = varargs_kwargs2(1,2,3,4,c=3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], (3,4))
self.assertEquals(values[3]['c'], 3)
values = varargs_kwargs2(1)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 3)
values = varargs_kwargs2(1, {'a':1}, {})
self.assertEquals(values[0], 1)
self.assertEquals(values[1]['a'], 1)
values = varargs_kwargs2(1, {'a':1})
self.assertEquals(values[0], 1)
try:
self.assertEquals(values[1], {'a':1})
except TypeError, e:
self.fail("Last arg in *args,**kwargs is dict problem")
def testKwArgsInherit(self):
c = KwArgs(x=5, y=6)
self.assertTrue(hasattr(c, 'kwargs'))
kwa = getattr(c, 'kwargs', None)
if kwa:
self.assertEquals(kwa.get('x'), 5)
self.assertEquals(kwa.get('y'), 6)
self.assertEquals(kwa.get('z'), 7)
try:
c = Kwargs2(x=5, y=6)
self.assertTrue(hasattr(c, 'kwargs'))
kwa = getattr(c, 'kwargs', None)
if kwa:
self.assertEquals(kwa.get('x'), 5)
self.assertEquals(kwa.get('y'), 6)
self.assertEquals(kwa.get('z'), 7)
except:
self.assertTrue(False, "runtime error in kwargs, needs investigating")
c.set_kwargs(x=5, y=6)
self.assertTrue(hasattr(c, 'kwargs'))
kwa = getattr(c, 'kwargs', None)
if kwa:
self.assertEquals(kwa.get('x'), 5)
self.assertEquals(kwa.get('y'), 6)
self.assertEquals(kwa.get('z'), 8)
c.set_kwargs2(x=5, y=6)
self.assertTrue(hasattr(c, 'kwargs'))
kwa = getattr(c, 'kwargs', None)
if kwa:
self.assertEquals(kwa.get('x'), 5)
self.assertEquals(kwa.get('y'), 6)
self.assertEquals(kwa.get('z'), 8)
c.set_kwargs3(x=5, y=6)
self.assertTrue(hasattr(c, 'kwargs'))
kwa = getattr(c, 'kwargs', None)
if kwa:
self.assertEquals(kwa.get('x'), 5)
self.assertEquals(kwa.get('y'), 6)
self.assertEquals(kwa.get('z'), 8)
def testKwArgsNameMapping(self):
kwargs = dict(comment='Comment', name='Name')
def fn(comment=None, name=None):
return dict(comment=comment, name=name)
kwargs_out = fn(**kwargs)
self.assertEquals(kwargs, kwargs_out)
kwargs = {'comment': 'Comment', 'name': 'Name'}
kwargs_out = fn(**kwargs)
self.assertEquals(kwargs, kwargs_out)
def testLookupOrder(self):
def fn(fint = int):
return fint(1.2);
class A:
def fn(self, fint = int):
return fint(1.2);
self.assertEqual(fn(), 1)
self.assertEqual(A().fn(), 1)
def testArgIsModuleName(self):
def fn(ArgsTest):
return foo(ArgsTest, 2, 3)
self.assertEqual(__name__, 'ArgsTest', "Argument to fn must be equal to module name")
self.assertEqual(fn('foo'), ['foo', 2, 3])
def testGetattr(self):
instance = ArgsTestClass()
foo = instance.foo
values = foo(1, 2, 3)
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
values = foo(*(1, 2, 3))
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
try:
values = foo(*(1, 2), **dict(c=3))
self.assertEquals(values[0], 1)
self.assertEquals(values[1], 2)
self.assertEquals(values[2], 3)
except TypeError:
self.fail('foo() takes exactly 4 arguments (5 given), bug #503')
def testArgsUnpack(self):
def func(a, (b, c), d):
return a + b + c + d
try:
self.assertEqual(func(1, (2, 3), 4), 10, 'Function args unpacking not supported, #527')
except:
self.fail('Bug #527 Function args unpacking not supported')
def foo(a, b, c):
return [a, b, c]
def foo2(a=None, b=None, c=None):
return [a, b, c]
def foo3(a=1, b=2, c=3):
return [a, b, c]
def foo4(a, b, c, *args):
return a, b, c, args
def foo5(a, b, c, **kwargs):
return a, b, c, kwargs
def foo6(a, b, c, *args, **kwargs):
return (a, b, c, args, kwargs)
def foo7(a, *args, **kwargs):
return (a, args, kwargs)
def foo8(a, **kwargs):
return (a, kwargs)
class ArgsTestClass_foo:
def __init__(self, a, b, c):
self.x = [a, b, c]
class ArgsTestClass_foo2:
def __init__(self, a=None, b=None, c=None):
self.x = [a, b, c]
class ArgsTestClass_foo3:
def __init__(self, a=1, b=2, c=3):
self.x = [a, b, c]
class ArgsTestClass_foo4:
def __init__(self, a, b, c, *args):
self.x = a, b, c, args
class ArgsTestClass_foo5:
def __init__(self, a, b, c, **kwargs):
self.x = a, b, c, kwargs
class ArgsTestClass_foo6:
def __init__(self, a, b, c, *args, **kwargs):
self.x = (a, b, c, args, kwargs)
class ArgsTestClass:
def foo(self, a, b, c):
return [a, b, c]
def foo2(self, a=None, b=None, c=None):
return [a, b, c]
def foo3(self, a=1, b=2, c=3):
return [a, b, c]
def foo4(self, *args):
return args
def foo5(self, **kwargs):
return kwargs
def foo6(self, *args, **kwargs):
return (args, kwargs)
def foo7(self, a, *args, **kwargs):
return (a, args, kwargs)
def foo8(self, a, **kwargs):
return (a, kwargs)
class ArgsTestClass2:
@staticmethod
def foo(a, b, c):
return [a, b, c]
@staticmethod
def foo2(a=None, b=None, c=None):
return [a, b, c]
@staticmethod
def foo3(a=1, b=2, c=3):
return [a, b, c]
@staticmethod
def foo4(*args):
return args
@staticmethod
def foo5(**kwargs):
return kwargs
@staticmethod
def foo6(*args, **kwargs):
return (args, kwargs)
@staticmethod
def foo7(a, *args, **kwargs):
return (a, args, kwargs)
@staticmethod
def foo8(a, **kwargs):
return (a, kwargs)
class ArgsTestClass3:
@classmethod
def foo(self, a, b, c):
return [a, b, c]
@classmethod
def foo2(self, a=None, b=None, c=None):
return [a, b, c]
@classmethod
def foo3(self, a=1, b=2, c=3):
return [a, b, c]
@classmethod
def foo4(self, *args):
return args
@classmethod
def foo5(self, **kwargs):
return kwargs
@classmethod
def foo6(self, *args, **kwargs):
return (args, kwargs)
@classmethod
def foo7(self, a, *args, **kwargs):
return (a, args, kwargs)
@classmethod
def foo8(self, a, **kwargs):
return (a, kwargs)
class KwArgs:
def __init__(self, z=7, zz=77, **kwargs):
self.kwargs = kwargs
self.kwargs['z'] = z # XXX this causes problems: kwargs is undefined
def set_kwargs(self, z=8, **kwargs):
self.kwargs = kwargs
self.kwargs['z'] = z
class Kwargs2(KwArgs):
def __init__(self, **kwargs):
KwArgs.__init__(self, **kwargs)
def set_kwargs2(self, **kwargs):
KwArgs.set_kwargs(self, **kwargs)
def set_kwargs3(self, **kwargs):
skw = getattr(self, "set_kwargs")
skw(**kwargs)
def kw_args(**kwargs):
return kwargs
def kw_args2(**kwargs):
return kw_args(**kwargs)
def varargs_kwargs(arg1, arg2=2, *args, **kwargs):
return (arg1, arg2, args, kwargs)
def varargs_kwargs2(arg1, arg2=3, *args, **kwargs):
return varargs_kwargs(arg1, arg2, *args, **kwargs)
|
Mbrownshoes/ckanext-bcgov
|
refs/heads/master
|
ckanext/bcgov/logic/__init__.py
|
6
|
# Copyright 2015, Province of British Columbia
# License: https://github.com/bcgov/ckanext-bcgov/blob/master/license
|
lache/RacingKingLee
|
refs/heads/master
|
monitor/engine.win64/2.74/python/lib/encodings/cp1255.py
|
272
|
""" Python Character Mapping Codec cp1255 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1255.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1255',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\u20ac' # 0x80 -> EURO SIGN
'\ufffe' # 0x81 -> UNDEFINED
'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
'\u2020' # 0x86 -> DAGGER
'\u2021' # 0x87 -> DOUBLE DAGGER
'\u02c6' # 0x88 -> MODIFIER LETTER CIRCUMFLEX ACCENT
'\u2030' # 0x89 -> PER MILLE SIGN
'\ufffe' # 0x8A -> UNDEFINED
'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
'\ufffe' # 0x8C -> UNDEFINED
'\ufffe' # 0x8D -> UNDEFINED
'\ufffe' # 0x8E -> UNDEFINED
'\ufffe' # 0x8F -> UNDEFINED
'\ufffe' # 0x90 -> UNDEFINED
'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
'\u2022' # 0x95 -> BULLET
'\u2013' # 0x96 -> EN DASH
'\u2014' # 0x97 -> EM DASH
'\u02dc' # 0x98 -> SMALL TILDE
'\u2122' # 0x99 -> TRADE MARK SIGN
'\ufffe' # 0x9A -> UNDEFINED
'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\ufffe' # 0x9C -> UNDEFINED
'\ufffe' # 0x9D -> UNDEFINED
'\ufffe' # 0x9E -> UNDEFINED
'\ufffe' # 0x9F -> UNDEFINED
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
'\xa2' # 0xA2 -> CENT SIGN
'\xa3' # 0xA3 -> POUND SIGN
'\u20aa' # 0xA4 -> NEW SHEQEL SIGN
'\xa5' # 0xA5 -> YEN SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\xd7' # 0xAA -> MULTIPLICATION SIGN
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\xb4' # 0xB4 -> ACUTE ACCENT
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\xb8' # 0xB8 -> CEDILLA
'\xb9' # 0xB9 -> SUPERSCRIPT ONE
'\xf7' # 0xBA -> DIVISION SIGN
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
'\xbf' # 0xBF -> INVERTED QUESTION MARK
'\u05b0' # 0xC0 -> HEBREW POINT SHEVA
'\u05b1' # 0xC1 -> HEBREW POINT HATAF SEGOL
'\u05b2' # 0xC2 -> HEBREW POINT HATAF PATAH
'\u05b3' # 0xC3 -> HEBREW POINT HATAF QAMATS
'\u05b4' # 0xC4 -> HEBREW POINT HIRIQ
'\u05b5' # 0xC5 -> HEBREW POINT TSERE
'\u05b6' # 0xC6 -> HEBREW POINT SEGOL
'\u05b7' # 0xC7 -> HEBREW POINT PATAH
'\u05b8' # 0xC8 -> HEBREW POINT QAMATS
'\u05b9' # 0xC9 -> HEBREW POINT HOLAM
'\ufffe' # 0xCA -> UNDEFINED
'\u05bb' # 0xCB -> HEBREW POINT QUBUTS
'\u05bc' # 0xCC -> HEBREW POINT DAGESH OR MAPIQ
'\u05bd' # 0xCD -> HEBREW POINT METEG
'\u05be' # 0xCE -> HEBREW PUNCTUATION MAQAF
'\u05bf' # 0xCF -> HEBREW POINT RAFE
'\u05c0' # 0xD0 -> HEBREW PUNCTUATION PASEQ
'\u05c1' # 0xD1 -> HEBREW POINT SHIN DOT
'\u05c2' # 0xD2 -> HEBREW POINT SIN DOT
'\u05c3' # 0xD3 -> HEBREW PUNCTUATION SOF PASUQ
'\u05f0' # 0xD4 -> HEBREW LIGATURE YIDDISH DOUBLE VAV
'\u05f1' # 0xD5 -> HEBREW LIGATURE YIDDISH VAV YOD
'\u05f2' # 0xD6 -> HEBREW LIGATURE YIDDISH DOUBLE YOD
'\u05f3' # 0xD7 -> HEBREW PUNCTUATION GERESH
'\u05f4' # 0xD8 -> HEBREW PUNCTUATION GERSHAYIM
'\ufffe' # 0xD9 -> UNDEFINED
'\ufffe' # 0xDA -> UNDEFINED
'\ufffe' # 0xDB -> UNDEFINED
'\ufffe' # 0xDC -> UNDEFINED
'\ufffe' # 0xDD -> UNDEFINED
'\ufffe' # 0xDE -> UNDEFINED
'\ufffe' # 0xDF -> UNDEFINED
'\u05d0' # 0xE0 -> HEBREW LETTER ALEF
'\u05d1' # 0xE1 -> HEBREW LETTER BET
'\u05d2' # 0xE2 -> HEBREW LETTER GIMEL
'\u05d3' # 0xE3 -> HEBREW LETTER DALET
'\u05d4' # 0xE4 -> HEBREW LETTER HE
'\u05d5' # 0xE5 -> HEBREW LETTER VAV
'\u05d6' # 0xE6 -> HEBREW LETTER ZAYIN
'\u05d7' # 0xE7 -> HEBREW LETTER HET
'\u05d8' # 0xE8 -> HEBREW LETTER TET
'\u05d9' # 0xE9 -> HEBREW LETTER YOD
'\u05da' # 0xEA -> HEBREW LETTER FINAL KAF
'\u05db' # 0xEB -> HEBREW LETTER KAF
'\u05dc' # 0xEC -> HEBREW LETTER LAMED
'\u05dd' # 0xED -> HEBREW LETTER FINAL MEM
'\u05de' # 0xEE -> HEBREW LETTER MEM
'\u05df' # 0xEF -> HEBREW LETTER FINAL NUN
'\u05e0' # 0xF0 -> HEBREW LETTER NUN
'\u05e1' # 0xF1 -> HEBREW LETTER SAMEKH
'\u05e2' # 0xF2 -> HEBREW LETTER AYIN
'\u05e3' # 0xF3 -> HEBREW LETTER FINAL PE
'\u05e4' # 0xF4 -> HEBREW LETTER PE
'\u05e5' # 0xF5 -> HEBREW LETTER FINAL TSADI
'\u05e6' # 0xF6 -> HEBREW LETTER TSADI
'\u05e7' # 0xF7 -> HEBREW LETTER QOF
'\u05e8' # 0xF8 -> HEBREW LETTER RESH
'\u05e9' # 0xF9 -> HEBREW LETTER SHIN
'\u05ea' # 0xFA -> HEBREW LETTER TAV
'\ufffe' # 0xFB -> UNDEFINED
'\ufffe' # 0xFC -> UNDEFINED
'\u200e' # 0xFD -> LEFT-TO-RIGHT MARK
'\u200f' # 0xFE -> RIGHT-TO-LEFT MARK
'\ufffe' # 0xFF -> UNDEFINED
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
henry0312/LightGBM
|
refs/heads/master
|
examples/python-guide/dask/ranking.py
|
2
|
import os
import dask.array as da
import numpy as np
from distributed import Client, LocalCluster
from sklearn.datasets import load_svmlight_file
import lightgbm as lgb
if __name__ == "__main__":
print("loading data")
X, y = load_svmlight_file(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'../../lambdarank/rank.train'))
group = np.loadtxt(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'../../lambdarank/rank.train.query'))
print("initializing a Dask cluster")
cluster = LocalCluster(n_workers=2)
client = Client(cluster)
print("created a Dask LocalCluster")
print("distributing training data on the Dask cluster")
# split training data into two partitions
rows_in_part1 = int(np.sum(group[:100]))
rows_in_part2 = X.shape[0] - rows_in_part1
num_features = X.shape[1]
# make this array dense because we're splitting across
# a sparse boundary to partition the data
X = X.todense()
dX = da.from_array(
x=X,
chunks=[
(rows_in_part1, rows_in_part2),
(num_features,)
]
)
dy = da.from_array(
x=y,
chunks=[
(rows_in_part1, rows_in_part2),
]
)
dg = da.from_array(
x=group,
chunks=[
(100, group.size - 100)
]
)
print("beginning training")
dask_model = lgb.DaskLGBMRanker(n_estimators=10)
dask_model.fit(dX, dy, group=dg)
assert dask_model.fitted_
print("done training")
|
vseledkin/neon
|
refs/heads/master
|
neon/data/dataiterator.py
|
10
|
# ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
Defines basic input datatset types.
"""
import logging
import numpy as np
from neon import NervanaObject
logger = logging.getLogger(__name__)
class DataIterator(NervanaObject):
"""
This generic class defines an interface to iterate over minibatches of
data that has been preloaded into memory. This may be used when the
entire dataset is small enough to fit within memory.
"""
def __init__(self, X, y=None, nclass=None, lshape=None, make_onehot=True):
"""
Implements loading of given data into backend tensor objects. If the
backend is specific to an accelarator device, the data is copied over
to that device.
Args:
X (ndarray, shape: [# examples, feature size]): Input features within the
dataset.
y (ndarray, shape:[# examples, 1], optional): Labels corresponding to the
input features.
If absent, the input features themselves will be returned as
target values (AutoEncoder)
nclass (int, optional): The number of possible types of labels.
(not necessary if not providing labels)
lshape (tuple, optional): Local shape for the input features
(e.g. height, width, channel for images)
make_onehot (bool, optional): True if y is a label that has to be converted to one hot
False if y doesn't need to be converted to one hot
(e.g. in a CAE)
"""
# Treat singletons like list so that iteration follows same syntax
X = X if isinstance(X, list) else [X]
self.ndata = len(X[0])
self.start = 0
# on device tensor with full dataset
self.Xdev = [self.be.array(x) for x in X]
# mini-batch sized buffer
self.Xbuf = [self.be.iobuf(x.shape[1]) for x in X]
if lshape is not None:
for xbuf in self.Xbuf:
xbuf.lshape = lshape
assert self.ndata > self.be.bsz
self.ybuf = None
self.make_onehot = make_onehot
if y is not None:
if make_onehot:
assert nclass is not None
self.ydev = self.be.array(y.reshape((-1, 1)), dtype=np.int32)
self.ybuf = self.be.iobuf(nclass)
else:
self.ydev = self.be.array(y)
self.ybuf = self.be.iobuf(y.shape[1])
@property
def nbatches(self):
return -((self.start - self.ndata) // self.be.bsz)
def reset(self):
"""
For resetting the starting index of this dataset back to zero.
Relevant for when one wants to call repeated evaluations on the dataset
but don't want to wrap around for the last uneven minibatch
Not necessary when ndata is divisible by batch size
"""
self.start = 0
def __iter__(self):
"""
Defines a generator that can be used to iterate over this dataset.
Yields:
tuple: The next minibatch. A minibatch includes both features and
labels.
"""
for i1 in range(self.start, self.ndata, self.be.bsz):
i2 = min(i1 + self.be.bsz, self.ndata)
bsz = i2 - i1
if i2 == self.ndata:
self.start = self.be.bsz - bsz
for xbuf, xdev in zip(self.Xbuf, self.Xdev):
xbuf[:, :bsz] = xdev[i1:i2].T
if self.be.bsz > bsz:
xbuf[:, bsz:] = xdev[:(self.be.bsz - bsz)].T
if self.ybuf is not None:
if self.make_onehot:
self.ybuf[:, :bsz] = self.be.onehot(
self.ydev[i1:i2], axis=0)
if self.be.bsz > bsz:
self.ybuf[:, bsz:] = self.be.onehot(
self.ydev[:(self.be.bsz - bsz)], axis=0)
else:
self.ybuf[:, :bsz] = self.ydev[i1:i2].T
if self.be.bsz > bsz:
self.ybuf[:, bsz:] = self.ydev[:(self.be.bsz - bsz)].T
inputs = self.Xbuf[0] if len(self.Xbuf) == 1 else self.Xbuf
targets = self.ybuf if self.ybuf else inputs
yield (inputs, targets)
if __name__ == '__main__':
from neon.data import load_mnist
(X_train, y_train), (X_test, y_test) = load_mnist()
from neon.backends.nervanagpu import NervanaGPU
ng = NervanaGPU(0, device_id=1)
NervanaObject.be = ng
ng.bsz = 128
train_set = DataIterator(
[X_test[:1000], X_test[:1000]], y_test[:1000], nclass=10)
for i in range(3):
for bidx, (X_batch, y_batch) in enumerate(train_set):
print bidx, train_set.start
pass
|
rohitwaghchaure/alec_frappe5_erpnext
|
refs/heads/develop
|
erpnext/config/accounts.py
|
22
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Documents"),
"icon": "icon-star",
"items": [
{
"type": "doctype",
"name": "Journal Entry",
"description": _("Accounting journal entries.")
},
{
"type": "doctype",
"name": "Sales Invoice",
"description": _("Bills raised to Customers.")
},
{
"type": "doctype",
"name": "Purchase Invoice",
"description": _("Bills raised by Suppliers.")
},
{
"type": "doctype",
"name": "Customer",
"description": _("Customer database.")
},
{
"type": "doctype",
"name": "Supplier",
"description": _("Supplier database.")
},
{
"type": "page",
"name": "Accounts Browser",
"icon": "icon-sitemap",
"label": _("Chart of Accounts"),
"route": "Accounts Browser/Account",
"description": _("Tree of finanial accounts."),
"doctype": "Account",
},
]
},
{
"label": _("Tools"),
"icon": "icon-wrench",
"items": [
{
"type": "doctype",
"name": "Bank Reconciliation",
"description": _("Update bank payment dates with journals.")
},
{
"type": "doctype",
"name": "Payment Reconciliation",
"description": _("Match non-linked Invoices and Payments.")
},
{
"type": "doctype",
"name": "Period Closing Voucher",
"description": _("Close Balance Sheet and book Profit or Loss.")
},
{
"type": "doctype",
"name": "Payment Tool",
"description": _("Create Payment Entries against Orders or Invoices.")
},
]
},
{
"label": _("Setup"),
"icon": "icon-cog",
"items": [
{
"type": "doctype",
"name": "Company",
"description": _("Company (not Customer or Supplier) master.")
},
{
"type": "doctype",
"name": "Fiscal Year",
"description": _("Financial / accounting year.")
},
{
"type": "page",
"name": "Accounts Browser",
"icon": "icon-sitemap",
"label": _("Chart of Accounts"),
"route": "Accounts Browser/Account",
"description": _("Tree of finanial accounts."),
"doctype": "Account",
},
{
"type": "page",
"name": "Accounts Browser",
"icon": "icon-sitemap",
"label": _("Chart of Cost Centers"),
"route": "Accounts Browser/Cost Center",
"description": _("Tree of finanial Cost Centers."),
"doctype": "Cost Center",
},
{
"type": "doctype",
"name": "Accounts Settings",
"description": _("Default settings for accounting transactions.")
},
{
"type": "doctype",
"name": "Sales Taxes and Charges Template",
"description": _("Tax template for selling transactions.")
},
{
"type": "doctype",
"name": "Purchase Taxes and Charges Template",
"description": _("Tax template for buying transactions.")
},
{
"type": "doctype",
"name": "POS Profile",
"label": _("Point-of-Sale Profile"),
"description": _("Rules to calculate shipping amount for a sale")
},
{
"type": "doctype",
"name": "Shipping Rule",
"description": _("Rules for adding shipping costs.")
},
{
"type": "doctype",
"name": "Pricing Rule",
"description": _("Rules for applying pricing and discount.")
},
{
"type": "doctype",
"name": "Currency",
"description": _("Enable / disable currencies.")
},
{
"type": "doctype",
"name": "Currency Exchange",
"description": _("Currency exchange rate master.")
},
{
"type":"doctype",
"name": "Monthly Distribution",
"description": _("Seasonality for setting budgets, targets etc.")
},
{
"type": "doctype",
"name":"Terms and Conditions",
"label": _("Terms and Conditions Template"),
"description": _("Template of terms or contract.")
},
{
"type": "doctype",
"name":"Mode of Payment",
"description": _("e.g. Bank, Cash, Credit Card")
},
{
"type": "doctype",
"name":"C-Form",
"description": _("C-Form records"),
"country": "India"
}
]
},
{
"label": _("Main Reports"),
"icon": "icon-table",
"items": [
{
"type": "report",
"name":"General Ledger",
"doctype": "GL Entry",
"is_query_report": True,
},
{
"type": "report",
"name": "Trial Balance",
"doctype": "GL Entry",
"is_query_report": True,
},
{
"type": "report",
"name": "Gross Profit",
"doctype": "Sales Invoice",
"is_query_report": True
},
{
"type": "report",
"name": "Accounts Receivable",
"doctype": "Sales Invoice",
"is_query_report": True
},
{
"type": "report",
"name": "Accounts Payable",
"doctype": "Purchase Invoice",
"is_query_report": True
},
{
"type": "report",
"name": "Sales Register",
"doctype": "Sales Invoice",
"is_query_report": True
},
{
"type": "report",
"name": "Purchase Register",
"doctype": "Purchase Invoice",
"is_query_report": True
},
{
"type": "report",
"name": "Balance Sheet",
"doctype": "GL Entry",
"is_query_report": True
},
{
"type": "report",
"name": "Profit and Loss Statement",
"doctype": "GL Entry",
"is_query_report": True
},
{
"type": "page",
"name": "financial-analytics",
"label": _("Financial Analytics"),
"icon": "icon-bar-chart",
}
]
},
{
"label": _("Standard Reports"),
"icon": "icon-list",
"items": [
{
"type": "report",
"name": "Bank Reconciliation Statement",
"is_query_report": True,
"doctype": "Journal Entry"
},
{
"type": "report",
"name": "Ordered Items To Be Billed",
"is_query_report": True,
"doctype": "Sales Invoice"
},
{
"type": "report",
"name": "Delivered Items To Be Billed",
"is_query_report": True,
"doctype": "Sales Invoice"
},
{
"type": "report",
"name": "Purchase Order Items To Be Billed",
"is_query_report": True,
"doctype": "Purchase Invoice"
},
{
"type": "report",
"name": "Received Items To Be Billed",
"is_query_report": True,
"doctype": "Purchase Invoice"
},
{
"type": "report",
"name": "Bank Clearance Summary",
"is_query_report": True,
"doctype": "Journal Entry"
},
{
"type": "report",
"name": "Payment Period Based On Invoice Date",
"is_query_report": True,
"doctype": "Journal Entry"
},
{
"type": "report",
"name": "Sales Partners Commission",
"is_query_report": True,
"doctype": "Sales Invoice"
},
{
"type": "report",
"name": "Item-wise Sales Register",
"is_query_report": True,
"doctype": "Sales Invoice"
},
{
"type": "report",
"name": "Item-wise Purchase Register",
"is_query_report": True,
"doctype": "Purchase Invoice"
},
{
"type": "report",
"name": "Budget Variance Report",
"is_query_report": True,
"doctype": "Cost Center"
},
{
"type": "report",
"name": "Purchase Invoice Trends",
"is_query_report": True,
"doctype": "Purchase Invoice"
},
{
"type": "report",
"name": "Sales Invoice Trends",
"is_query_report": True,
"doctype": "Sales Invoice"
},
{
"type": "report",
"name": "Accounts Receivable Summary",
"doctype": "Sales Invoice",
"is_query_report": True
},
{
"type": "report",
"name": "Accounts Payable Summary",
"doctype": "Purchase Invoice",
"is_query_report": True
},
{
"type": "report",
"is_query_report": True,
"name": "Customer Credit Balance",
"doctype": "Customer"
},
]
},
{
"label": _("Help"),
"icon": "icon-facetime-video",
"items": [
{
"type": "help",
"label": _("Chart of Accounts"),
"youtube_id": "DyR-DST-PyA"
},
{
"type": "help",
"label": _("Opening Accounting Balance"),
"youtube_id": "kdgM20Q-q68"
},
{
"type": "help",
"label": _("Setting up Taxes"),
"youtube_id": "nQ1zZdPgdaQ"
}
]
}
]
|
naparuba/opsbro
|
refs/heads/master
|
opsbro/misc/internalcherrypy/cherrypy/wsgiserver/__init__.py
|
238
|
__all__ = ['HTTPRequest', 'HTTPConnection', 'HTTPServer',
'SizeCheckWrapper', 'KnownLengthRFile', 'ChunkedRFile',
'MaxSizeExceeded', 'NoSSLError', 'FatalSSLAlert',
'WorkerThread', 'ThreadPool', 'SSLAdapter',
'CherryPyWSGIServer',
'Gateway', 'WSGIGateway', 'WSGIGateway_10', 'WSGIGateway_u0',
'WSGIPathInfoDispatcher', 'get_ssl_adapter_class']
import sys
if sys.version_info < (3, 0):
from wsgiserver2 import *
else:
# Le sigh. Boo for backward-incompatible syntax.
exec('from .wsgiserver3 import *')
|
etherkit/OpenBeacon2
|
refs/heads/master
|
macos/venv/lib/python3.8/site-packages/_pyinstaller_hooks_contrib/hooks/stdhooks/hook-rdflib.py
|
3
|
# ------------------------------------------------------------------
# Copyright (c) 2020 PyInstaller Development Team.
#
# This file is distributed under the terms of the GNU General Public
# License (version 2.0 or later).
#
# The full license is available in LICENSE.GPL.txt, distributed with
# this software.
#
# SPDX-License-Identifier: GPL-2.0-or-later
# ------------------------------------------------------------------
from PyInstaller.utils.hooks import collect_submodules
hiddenimports = collect_submodules('rdflib.plugins')
|
kojiagile/CLAtoolkit
|
refs/heads/koji
|
clatoolkit_project/xapi/tincan/typed_list.py
|
7
|
# Copyright 2014 Rustici Software
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tincan.serializable_base import SerializableBase
"""
.. module:: typed_list
:synopsis: A wrapper for a list that ensures the list consists of only one type
"""
class TypedList(list, SerializableBase):
_cls = None
def __init__(self, *args, **kwargs):
self._check_cls()
new_args = [self._make_cls(v) for v in list(*args, **kwargs)]
super(TypedList, self).__init__(new_args)
def __setitem__(self, ind, value):
self._check_cls()
value = self._make_cls(value)
super(TypedList, self).__setitem__(ind, value)
def _check_cls(self):
"""If self._cls is not set, raises ValueError.
:raises: ValueError
"""
if self._cls is None:
raise ValueError("_cls has not been set")
def _make_cls(self, value):
"""If value is not instance of self._cls, converts and returns
it. Otherwise, returns value.
:param value: the thing to make a self._cls from
:rtype self._cls
"""
if isinstance(value, self._cls):
return value
return self._cls(value)
def append(self, value):
self._check_cls()
value = self._make_cls(value)
super(TypedList, self).append(value)
def extend(self, value):
self._check_cls()
new_args = [self._make_cls(v) for v in value]
super(TypedList, self).extend(new_args)
def insert(self, ind, value):
self._check_cls()
value = self._make_cls(value)
super(TypedList, self).insert(ind, value)
|
petchat/streamparse
|
refs/heads/master
|
streamparse/cli/common.py
|
6
|
"""
Functions for adding common CLI arguments to argparse sub-commands.
"""
def add_ackers(parser):
""" Add --ackers option to parser """
parser.add_argument('-a', '--ackers',
help='Set number of acker bolts. Takes precedence over '
'--par if both set.')
def add_debug(parser):
""" Add --debug option to parser """
parser.add_argument('-d', '--debug',
action='store_true',
help='Set topology.debug and produce debugging output.')
def add_environment(parser):
""" Add --environment option to parser """
parser.add_argument('-e', '--environment',
help='The environment to use for the command. '
'Corresponds to an environment in your '
'"envs" dictionary in config.json. If you '
'only have one environment specified, '
'streamparse will automatically use this.')
def add_name(parser):
""" Add --name option to parser """
parser.add_argument('-n', '--name',
help='The name of the topology to act on. If you have '
'only one topology defined in your "topologies" '
'directory, streamparse will use it '
'automatically.')
def add_options(parser):
""" Add --option options to parser """
parser.add_argument('-o', '--option',
dest='options',
action='append',
help='Topology option to use upon submit. For example,'
' "-o topology.debug=true" is equivalent to '
'"--debug". May be repeated multiple for multiple'
' options.')
def add_par(parser):
""" Add --par option to parser """
parser.add_argument('-p', '--par',
default=2,
type=int,
help='Parallelism of topology; conveniently sets '
'number of Storm workers and acker bolts at once '
'to passed value. (default: %(default)s)')
def add_pattern(parser):
""" Add --pattern option to parser """
parser.add_argument('--pattern',
help='Pattern of log files to operate on.')
def add_simple_jar(parser):
""" Add --simple_jar option to parser. """
parser.add_argument("-s", "--simple_jar",
action='store_true',
help='Instead of creating an Uber-JAR for the '
'topology, which contains all of its JVM '
'dependencies, create a simple JAR with just the '
'code for the project. This is useful when your '
'project is pure Python and has no JVM '
'dependencies.')
def add_wait(parser):
""" Add --wait option to parser """
parser.add_argument('--wait',
type=int,
default=5,
help='Seconds to wait before killing topology. '
'(default: %(default)s)')
def add_workers(parser):
""" Add --workers option to parser """
parser.add_argument('-w', '--workers',
type=int,
help='Set number of Storm workers. Takes precedence '
'over --par if both set.')
def resolve_ackers_workers(args):
""" Set --ackers and --workers to --par if they're None. """
if args.ackers is None:
args.ackers = args.par
if args.workers is None:
args.workers = args.par
|
efortuna/AndroidSDKClone
|
refs/heads/master
|
ndk/prebuilt/linux-x86_64/lib/python2.7/test/test_exceptions.py
|
46
|
# Python test set -- part 5, built-in exceptions
import os
import sys
import unittest
import pickle, cPickle
from test.test_support import (TESTFN, unlink, run_unittest, captured_output,
check_warnings, cpython_only)
from test.test_pep352 import ignore_deprecation_warnings
# XXX This is not really enough, each *operation* should be tested!
class ExceptionTests(unittest.TestCase):
def testReload(self):
# Reloading the built-in exceptions module failed prior to Py2.2, while it
# should act the same as reloading built-in sys.
try:
from imp import reload
import exceptions
reload(exceptions)
except ImportError, e:
self.fail("reloading exceptions: %s" % e)
def raise_catch(self, exc, excname):
try:
raise exc, "spam"
except exc, err:
buf1 = str(err)
try:
raise exc("spam")
except exc, err:
buf2 = str(err)
self.assertEqual(buf1, buf2)
self.assertEqual(exc.__name__, excname)
def testRaising(self):
self.raise_catch(AttributeError, "AttributeError")
self.assertRaises(AttributeError, getattr, sys, "undefined_attribute")
self.raise_catch(EOFError, "EOFError")
fp = open(TESTFN, 'w')
fp.close()
fp = open(TESTFN, 'r')
savestdin = sys.stdin
try:
try:
sys.stdin = fp
x = raw_input()
except EOFError:
pass
finally:
sys.stdin = savestdin
fp.close()
unlink(TESTFN)
self.raise_catch(IOError, "IOError")
self.assertRaises(IOError, open, 'this file does not exist', 'r')
self.raise_catch(ImportError, "ImportError")
self.assertRaises(ImportError, __import__, "undefined_module")
self.raise_catch(IndexError, "IndexError")
x = []
self.assertRaises(IndexError, x.__getitem__, 10)
self.raise_catch(KeyError, "KeyError")
x = {}
self.assertRaises(KeyError, x.__getitem__, 'key')
self.raise_catch(KeyboardInterrupt, "KeyboardInterrupt")
self.raise_catch(MemoryError, "MemoryError")
self.raise_catch(NameError, "NameError")
try: x = undefined_variable
except NameError: pass
self.raise_catch(OverflowError, "OverflowError")
x = 1
for dummy in range(128):
x += x # this simply shouldn't blow up
self.raise_catch(RuntimeError, "RuntimeError")
self.raise_catch(SyntaxError, "SyntaxError")
try: exec '/\n'
except SyntaxError: pass
self.raise_catch(IndentationError, "IndentationError")
self.raise_catch(TabError, "TabError")
# can only be tested under -tt, and is the only test for -tt
#try: compile("try:\n\t1/0\n \t1/0\nfinally:\n pass\n", '<string>', 'exec')
#except TabError: pass
#else: self.fail("TabError not raised")
self.raise_catch(SystemError, "SystemError")
self.raise_catch(SystemExit, "SystemExit")
self.assertRaises(SystemExit, sys.exit, 0)
self.raise_catch(TypeError, "TypeError")
try: [] + ()
except TypeError: pass
self.raise_catch(ValueError, "ValueError")
self.assertRaises(ValueError, chr, 10000)
self.raise_catch(ZeroDivisionError, "ZeroDivisionError")
try: x = 1 // 0
except ZeroDivisionError: pass
self.raise_catch(Exception, "Exception")
try: x = 1 // 0
except Exception, e: pass
def testSyntaxErrorMessage(self):
# make sure the right exception message is raised for each of
# these code fragments
def ckmsg(src, msg):
try:
compile(src, '<fragment>', 'exec')
except SyntaxError, e:
if e.msg != msg:
self.fail("expected %s, got %s" % (msg, e.msg))
else:
self.fail("failed to get expected SyntaxError")
s = '''while 1:
try:
pass
finally:
continue'''
if not sys.platform.startswith('java'):
ckmsg(s, "'continue' not supported inside 'finally' clause")
s = '''if 1:
try:
continue
except:
pass'''
ckmsg(s, "'continue' not properly in loop")
ckmsg("continue\n", "'continue' not properly in loop")
@cpython_only
def testSettingException(self):
# test that setting an exception at the C level works even if the
# exception object can't be constructed.
class BadException:
def __init__(self_):
raise RuntimeError, "can't instantiate BadException"
def test_capi1():
import _testcapi
try:
_testcapi.raise_exception(BadException, 1)
except TypeError, err:
exc, err, tb = sys.exc_info()
co = tb.tb_frame.f_code
self.assertEqual(co.co_name, "test_capi1")
self.assertTrue(co.co_filename.endswith('test_exceptions'+os.extsep+'py'))
else:
self.fail("Expected exception")
def test_capi2():
import _testcapi
try:
_testcapi.raise_exception(BadException, 0)
except RuntimeError, err:
exc, err, tb = sys.exc_info()
co = tb.tb_frame.f_code
self.assertEqual(co.co_name, "__init__")
self.assertTrue(co.co_filename.endswith('test_exceptions'+os.extsep+'py'))
co2 = tb.tb_frame.f_back.f_code
self.assertEqual(co2.co_name, "test_capi2")
else:
self.fail("Expected exception")
if not sys.platform.startswith('java'):
test_capi1()
test_capi2()
def test_WindowsError(self):
try:
WindowsError
except NameError:
pass
else:
self.assertEqual(str(WindowsError(1001)),
"1001")
self.assertEqual(str(WindowsError(1001, "message")),
"[Error 1001] message")
self.assertEqual(WindowsError(1001, "message").errno, 22)
self.assertEqual(WindowsError(1001, "message").winerror, 1001)
@ignore_deprecation_warnings
def testAttributes(self):
# test that exception attributes are happy
exceptionList = [
(BaseException, (), {'message' : '', 'args' : ()}),
(BaseException, (1, ), {'message' : 1, 'args' : (1,)}),
(BaseException, ('foo',),
{'message' : 'foo', 'args' : ('foo',)}),
(BaseException, ('foo', 1),
{'message' : '', 'args' : ('foo', 1)}),
(SystemExit, ('foo',),
{'message' : 'foo', 'args' : ('foo',), 'code' : 'foo'}),
(IOError, ('foo',),
{'message' : 'foo', 'args' : ('foo',), 'filename' : None,
'errno' : None, 'strerror' : None}),
(IOError, ('foo', 'bar'),
{'message' : '', 'args' : ('foo', 'bar'), 'filename' : None,
'errno' : 'foo', 'strerror' : 'bar'}),
(IOError, ('foo', 'bar', 'baz'),
{'message' : '', 'args' : ('foo', 'bar'), 'filename' : 'baz',
'errno' : 'foo', 'strerror' : 'bar'}),
(IOError, ('foo', 'bar', 'baz', 'quux'),
{'message' : '', 'args' : ('foo', 'bar', 'baz', 'quux')}),
(EnvironmentError, ('errnoStr', 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : ('errnoStr', 'strErrorStr'),
'strerror' : 'strErrorStr', 'errno' : 'errnoStr',
'filename' : 'filenameStr'}),
(EnvironmentError, (1, 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : (1, 'strErrorStr'), 'errno' : 1,
'strerror' : 'strErrorStr', 'filename' : 'filenameStr'}),
(SyntaxError, (), {'message' : '', 'msg' : None, 'text' : None,
'filename' : None, 'lineno' : None, 'offset' : None,
'print_file_and_line' : None}),
(SyntaxError, ('msgStr',),
{'message' : 'msgStr', 'args' : ('msgStr',), 'text' : None,
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : None, 'lineno' : None, 'offset' : None}),
(SyntaxError, ('msgStr', ('filenameStr', 'linenoStr', 'offsetStr',
'textStr')),
{'message' : '', 'offset' : 'offsetStr', 'text' : 'textStr',
'args' : ('msgStr', ('filenameStr', 'linenoStr',
'offsetStr', 'textStr')),
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : 'filenameStr', 'lineno' : 'linenoStr'}),
(SyntaxError, ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr',
'textStr', 'print_file_and_lineStr'),
{'message' : '', 'text' : None,
'args' : ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr',
'textStr', 'print_file_and_lineStr'),
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : None, 'lineno' : None, 'offset' : None}),
(UnicodeError, (), {'message' : '', 'args' : (),}),
(UnicodeEncodeError, ('ascii', u'a', 0, 1, 'ordinal not in range'),
{'message' : '', 'args' : ('ascii', u'a', 0, 1,
'ordinal not in range'),
'encoding' : 'ascii', 'object' : u'a',
'start' : 0, 'reason' : 'ordinal not in range'}),
(UnicodeDecodeError, ('ascii', '\xff', 0, 1, 'ordinal not in range'),
{'message' : '', 'args' : ('ascii', '\xff', 0, 1,
'ordinal not in range'),
'encoding' : 'ascii', 'object' : '\xff',
'start' : 0, 'reason' : 'ordinal not in range'}),
(UnicodeTranslateError, (u"\u3042", 0, 1, "ouch"),
{'message' : '', 'args' : (u'\u3042', 0, 1, 'ouch'),
'object' : u'\u3042', 'reason' : 'ouch',
'start' : 0, 'end' : 1}),
]
try:
exceptionList.append(
(WindowsError, (1, 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : (1, 'strErrorStr'),
'strerror' : 'strErrorStr', 'winerror' : 1,
'errno' : 22, 'filename' : 'filenameStr'})
)
except NameError:
pass
for exc, args, expected in exceptionList:
try:
raise exc(*args)
except BaseException, e:
if type(e) is not exc:
raise
# Verify module name
self.assertEqual(type(e).__module__, 'exceptions')
# Verify no ref leaks in Exc_str()
s = str(e)
for checkArgName in expected:
self.assertEqual(repr(getattr(e, checkArgName)),
repr(expected[checkArgName]),
'exception "%s", attribute "%s"' %
(repr(e), checkArgName))
# test for pickling support
for p in pickle, cPickle:
for protocol in range(p.HIGHEST_PROTOCOL + 1):
new = p.loads(p.dumps(e, protocol))
for checkArgName in expected:
got = repr(getattr(new, checkArgName))
want = repr(expected[checkArgName])
self.assertEqual(got, want,
'pickled "%r", attribute "%s"' %
(e, checkArgName))
def testDeprecatedMessageAttribute(self):
# Accessing BaseException.message and relying on its value set by
# BaseException.__init__ triggers a deprecation warning.
exc = BaseException("foo")
with check_warnings(("BaseException.message has been deprecated "
"as of Python 2.6", DeprecationWarning)) as w:
self.assertEqual(exc.message, "foo")
self.assertEqual(len(w.warnings), 1)
def testRegularMessageAttribute(self):
# Accessing BaseException.message after explicitly setting a value
# for it does not trigger a deprecation warning.
exc = BaseException("foo")
exc.message = "bar"
with check_warnings(quiet=True) as w:
self.assertEqual(exc.message, "bar")
self.assertEqual(len(w.warnings), 0)
# Deleting the message is supported, too.
del exc.message
with self.assertRaises(AttributeError):
exc.message
@ignore_deprecation_warnings
def testPickleMessageAttribute(self):
# Pickling with message attribute must work, as well.
e = Exception("foo")
f = Exception("foo")
f.message = "bar"
for p in pickle, cPickle:
ep = p.loads(p.dumps(e))
self.assertEqual(ep.message, "foo")
fp = p.loads(p.dumps(f))
self.assertEqual(fp.message, "bar")
@ignore_deprecation_warnings
def testSlicing(self):
# Test that you can slice an exception directly instead of requiring
# going through the 'args' attribute.
args = (1, 2, 3)
exc = BaseException(*args)
self.assertEqual(exc[:], args)
self.assertEqual(exc.args[:], args)
def testKeywordArgs(self):
# test that builtin exception don't take keyword args,
# but user-defined subclasses can if they want
self.assertRaises(TypeError, BaseException, a=1)
class DerivedException(BaseException):
def __init__(self, fancy_arg):
BaseException.__init__(self)
self.fancy_arg = fancy_arg
x = DerivedException(fancy_arg=42)
self.assertEqual(x.fancy_arg, 42)
def testInfiniteRecursion(self):
def f():
return f()
self.assertRaises(RuntimeError, f)
def g():
try:
return g()
except ValueError:
return -1
# The test prints an unraisable recursion error when
# doing "except ValueError", this is because subclass
# checking has recursion checking too.
with captured_output("stderr"):
try:
g()
except RuntimeError:
pass
except:
self.fail("Should have raised KeyError")
else:
self.fail("Should have raised KeyError")
def testUnicodeStrUsage(self):
# Make sure both instances and classes have a str and unicode
# representation.
self.assertTrue(str(Exception))
self.assertTrue(unicode(Exception))
self.assertTrue(str(Exception('a')))
self.assertTrue(unicode(Exception(u'a')))
self.assertTrue(unicode(Exception(u'\xe1')))
def testUnicodeChangeAttributes(self):
# See issue 7309. This was a crasher.
u = UnicodeEncodeError('baz', u'xxxxx', 1, 5, 'foo')
self.assertEqual(str(u), "'baz' codec can't encode characters in position 1-4: foo")
u.end = 2
self.assertEqual(str(u), "'baz' codec can't encode character u'\\x78' in position 1: foo")
u.end = 5
u.reason = 0x345345345345345345
self.assertEqual(str(u), "'baz' codec can't encode characters in position 1-4: 965230951443685724997")
u.encoding = 4000
self.assertEqual(str(u), "'4000' codec can't encode characters in position 1-4: 965230951443685724997")
u.start = 1000
self.assertEqual(str(u), "'4000' codec can't encode characters in position 1000-4: 965230951443685724997")
u = UnicodeDecodeError('baz', 'xxxxx', 1, 5, 'foo')
self.assertEqual(str(u), "'baz' codec can't decode bytes in position 1-4: foo")
u.end = 2
self.assertEqual(str(u), "'baz' codec can't decode byte 0x78 in position 1: foo")
u.end = 5
u.reason = 0x345345345345345345
self.assertEqual(str(u), "'baz' codec can't decode bytes in position 1-4: 965230951443685724997")
u.encoding = 4000
self.assertEqual(str(u), "'4000' codec can't decode bytes in position 1-4: 965230951443685724997")
u.start = 1000
self.assertEqual(str(u), "'4000' codec can't decode bytes in position 1000-4: 965230951443685724997")
u = UnicodeTranslateError(u'xxxx', 1, 5, 'foo')
self.assertEqual(str(u), "can't translate characters in position 1-4: foo")
u.end = 2
self.assertEqual(str(u), "can't translate character u'\\x78' in position 1: foo")
u.end = 5
u.reason = 0x345345345345345345
self.assertEqual(str(u), "can't translate characters in position 1-4: 965230951443685724997")
u.start = 1000
self.assertEqual(str(u), "can't translate characters in position 1000-4: 965230951443685724997")
def test_badisinstance(self):
# Bug #2542: if issubclass(e, MyException) raises an exception,
# it should be ignored
class Meta(type):
def __subclasscheck__(cls, subclass):
raise ValueError()
class MyException(Exception):
__metaclass__ = Meta
pass
with captured_output("stderr") as stderr:
try:
raise KeyError()
except MyException, e:
self.fail("exception should not be a MyException")
except KeyError:
pass
except:
self.fail("Should have raised KeyError")
else:
self.fail("Should have raised KeyError")
with captured_output("stderr") as stderr:
def g():
try:
return g()
except RuntimeError:
return sys.exc_info()
e, v, tb = g()
self.assertTrue(e is RuntimeError, e)
self.assertIn("maximum recursion depth exceeded", str(v))
def test_new_returns_invalid_instance(self):
# See issue #11627.
class MyException(Exception):
def __new__(cls, *args):
return object()
with self.assertRaises(TypeError):
raise MyException
def test_assert_with_tuple_arg(self):
try:
assert False, (3,)
except AssertionError as e:
self.assertEqual(str(e), "(3,)")
def test_bad_exception_clearing(self):
# See issue 16445: use of Py_XDECREF instead of Py_CLEAR in
# BaseException_set_message gave a possible way to segfault the
# interpreter.
class Nasty(str):
def __del__(message):
del e.message
e = ValueError(Nasty("msg"))
e.args = ()
del e.message
# Helper class used by TestSameStrAndUnicodeMsg
class ExcWithOverriddenStr(Exception):
"""Subclass of Exception that accepts a keyword 'msg' arg that is
returned by __str__. 'msg' won't be included in self.args"""
def __init__(self, *args, **kwargs):
self.msg = kwargs.pop('msg') # msg should always be present
super(ExcWithOverriddenStr, self).__init__(*args, **kwargs)
def __str__(self):
return self.msg
class TestSameStrAndUnicodeMsg(unittest.TestCase):
"""unicode(err) should return the same message of str(err). See #6108"""
def check_same_msg(self, exc, msg):
"""Helper function that checks if str(exc) == unicode(exc) == msg"""
self.assertEqual(str(exc), msg)
self.assertEqual(str(exc), unicode(exc))
def test_builtin_exceptions(self):
"""Check same msg for built-in exceptions"""
# These exceptions implement a __str__ method that uses the args
# to create a better error message. unicode(e) should return the same
# message.
exceptions = [
SyntaxError('invalid syntax', ('<string>', 1, 3, '2+*3')),
IOError(2, 'No such file or directory'),
KeyError('both should have the same quotes'),
UnicodeDecodeError('ascii', '\xc3\xa0', 0, 1,
'ordinal not in range(128)'),
UnicodeEncodeError('ascii', u'\u1234', 0, 1,
'ordinal not in range(128)')
]
for exception in exceptions:
self.assertEqual(str(exception), unicode(exception))
def test_0_args(self):
"""Check same msg for Exception with 0 args"""
# str() and unicode() on an Exception with no args should return an
# empty string
self.check_same_msg(Exception(), '')
def test_0_args_with_overridden___str__(self):
"""Check same msg for exceptions with 0 args and overridden __str__"""
# str() and unicode() on an exception with overridden __str__ that
# returns an ascii-only string should return the same string
for msg in ('foo', u'foo'):
self.check_same_msg(ExcWithOverriddenStr(msg=msg), msg)
# if __str__ returns a non-ascii unicode string str() should fail
# but unicode() should return the unicode string
e = ExcWithOverriddenStr(msg=u'f\xf6\xf6') # no args
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
def test_1_arg(self):
"""Check same msg for Exceptions with 1 arg"""
for arg in ('foo', u'foo'):
self.check_same_msg(Exception(arg), arg)
# if __str__ is not overridden and self.args[0] is a non-ascii unicode
# string, str() should try to return str(self.args[0]) and fail.
# unicode() should return unicode(self.args[0]) and succeed.
e = Exception(u'f\xf6\xf6')
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
def test_1_arg_with_overridden___str__(self):
"""Check same msg for exceptions with overridden __str__ and 1 arg"""
# when __str__ is overridden and __unicode__ is not implemented
# unicode(e) returns the same as unicode(e.__str__()).
for msg in ('foo', u'foo'):
self.check_same_msg(ExcWithOverriddenStr('arg', msg=msg), msg)
# if __str__ returns a non-ascii unicode string, str() should fail
# but unicode() should succeed.
e = ExcWithOverriddenStr('arg', msg=u'f\xf6\xf6') # 1 arg
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
def test_many_args(self):
"""Check same msg for Exceptions with many args"""
argslist = [
(3, 'foo'),
(1, u'foo', 'bar'),
(4, u'f\xf6\xf6', u'bar', 'baz')
]
# both str() and unicode() should return a repr() of the args
for args in argslist:
self.check_same_msg(Exception(*args), repr(args))
def test_many_args_with_overridden___str__(self):
"""Check same msg for exceptions with overridden __str__ and many args"""
# if __str__ returns an ascii string / ascii unicode string
# both str() and unicode() should succeed
for msg in ('foo', u'foo'):
e = ExcWithOverriddenStr('arg1', u'arg2', u'f\xf6\xf6', msg=msg)
self.check_same_msg(e, msg)
# if __str__ returns a non-ascii unicode string, str() should fail
# but unicode() should succeed
e = ExcWithOverriddenStr('arg1', u'f\xf6\xf6', u'arg3', # 3 args
msg=u'f\xf6\xf6')
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
@cpython_only
def test_exception_with_doc(self):
import _testcapi
doc2 = "This is a test docstring."
doc4 = "This is another test docstring."
self.assertRaises(SystemError, _testcapi.make_exception_with_doc,
"error1")
# test basic usage of PyErr_NewException
error1 = _testcapi.make_exception_with_doc("_testcapi.error1")
self.assertIs(type(error1), type)
self.assertTrue(issubclass(error1, Exception))
self.assertIsNone(error1.__doc__)
# test with given docstring
error2 = _testcapi.make_exception_with_doc("_testcapi.error2", doc2)
self.assertEqual(error2.__doc__, doc2)
# test with explicit base (without docstring)
error3 = _testcapi.make_exception_with_doc("_testcapi.error3",
base=error2)
self.assertTrue(issubclass(error3, error2))
# test with explicit base tuple
class C(object):
pass
error4 = _testcapi.make_exception_with_doc("_testcapi.error4", doc4,
(error3, C))
self.assertTrue(issubclass(error4, error3))
self.assertTrue(issubclass(error4, C))
self.assertEqual(error4.__doc__, doc4)
# test with explicit dictionary
error5 = _testcapi.make_exception_with_doc("_testcapi.error5", "",
error4, {'a': 1})
self.assertTrue(issubclass(error5, error4))
self.assertEqual(error5.a, 1)
self.assertEqual(error5.__doc__, "")
def test_main():
run_unittest(ExceptionTests, TestSameStrAndUnicodeMsg)
if __name__ == '__main__':
test_main()
|
quamilek/django
|
refs/heads/master
|
tests/postgres_tests/test_hstore.py
|
70
|
import json
from django.core import exceptions, serializers
from django.forms import Form
from . import PostgreSQLTestCase
from .models import HStoreModel
try:
from django.contrib.postgres import forms
from django.contrib.postgres.fields import HStoreField
from django.contrib.postgres.validators import KeysValidator
except ImportError:
pass
class SimpleTests(PostgreSQLTestCase):
apps = ['django.contrib.postgres']
def test_save_load_success(self):
value = {'a': 'b'}
instance = HStoreModel(field=value)
instance.save()
reloaded = HStoreModel.objects.get()
self.assertEqual(reloaded.field, value)
def test_null(self):
instance = HStoreModel(field=None)
instance.save()
reloaded = HStoreModel.objects.get()
self.assertEqual(reloaded.field, None)
def test_value_null(self):
value = {'a': None}
instance = HStoreModel(field=value)
instance.save()
reloaded = HStoreModel.objects.get()
self.assertEqual(reloaded.field, value)
class TestQuerying(PostgreSQLTestCase):
def setUp(self):
self.objs = [
HStoreModel.objects.create(field={'a': 'b'}),
HStoreModel.objects.create(field={'a': 'b', 'c': 'd'}),
HStoreModel.objects.create(field={'c': 'd'}),
HStoreModel.objects.create(field={}),
HStoreModel.objects.create(field=None),
]
def test_exact(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__exact={'a': 'b'}),
self.objs[:1]
)
def test_contained_by(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__contained_by={'a': 'b', 'c': 'd'}),
self.objs[:4]
)
def test_contains(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__contains={'a': 'b'}),
self.objs[:2]
)
def test_has_key(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__has_key='c'),
self.objs[1:3]
)
def test_has_keys(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__has_keys=['a', 'c']),
self.objs[1:2]
)
def test_has_any_keys(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__has_any_keys=['a', 'c']),
self.objs[:3]
)
def test_key_transform(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a='b'),
self.objs[:2]
)
def test_keys(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__keys=['a']),
self.objs[:1]
)
def test_values(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__values=['b']),
self.objs[:1]
)
def test_field_chaining(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a__contains='b'),
self.objs[:2]
)
def test_keys_contains(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__keys__contains=['a']),
self.objs[:2]
)
def test_values_overlap(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__values__overlap=['b', 'd']),
self.objs[:3]
)
def test_key_isnull(self):
obj = HStoreModel.objects.create(field={'a': None})
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a__isnull=True),
self.objs[2:5] + [obj]
)
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a__isnull=False),
self.objs[:2]
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(id__in=HStoreModel.objects.filter(field__a='b')),
self.objs[:2]
)
class TestSerialization(PostgreSQLTestCase):
test_data = '[{"fields": {"field": "{\\"a\\": \\"b\\"}"}, "model": "postgres_tests.hstoremodel", "pk": null}]'
def test_dumping(self):
instance = HStoreModel(field={'a': 'b'})
data = serializers.serialize('json', [instance])
self.assertEqual(json.loads(data), json.loads(self.test_data))
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.field, {'a': 'b'})
class TestValidation(PostgreSQLTestCase):
def test_not_a_string(self):
field = HStoreField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean({'a': 1}, None)
self.assertEqual(cm.exception.code, 'not_a_string')
self.assertEqual(cm.exception.message % cm.exception.params, 'The value of "a" is not a string.')
class TestFormField(PostgreSQLTestCase):
def test_valid(self):
field = forms.HStoreField()
value = field.clean('{"a": "b"}')
self.assertEqual(value, {'a': 'b'})
def test_invalid_json(self):
field = forms.HStoreField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('{"a": "b"')
self.assertEqual(cm.exception.messages[0], 'Could not load JSON data.')
self.assertEqual(cm.exception.code, 'invalid_json')
def test_not_string_values(self):
field = forms.HStoreField()
value = field.clean('{"a": 1}')
self.assertEqual(value, {'a': '1'})
def test_empty(self):
field = forms.HStoreField(required=False)
value = field.clean('')
self.assertEqual(value, {})
def test_model_field_formfield(self):
model_field = HStoreField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, forms.HStoreField)
def test_empty_field_has_not_changed(self):
class HStoreFormTest(Form):
f1 = HStoreField()
form_w_hstore = HStoreFormTest()
self.assertFalse(form_w_hstore.has_changed())
class TestValidator(PostgreSQLTestCase):
def test_simple_valid(self):
validator = KeysValidator(keys=['a', 'b'])
validator({'a': 'foo', 'b': 'bar', 'c': 'baz'})
def test_missing_keys(self):
validator = KeysValidator(keys=['a', 'b'])
with self.assertRaises(exceptions.ValidationError) as cm:
validator({'a': 'foo', 'c': 'baz'})
self.assertEqual(cm.exception.messages[0], 'Some keys were missing: b')
self.assertEqual(cm.exception.code, 'missing_keys')
def test_strict_valid(self):
validator = KeysValidator(keys=['a', 'b'], strict=True)
validator({'a': 'foo', 'b': 'bar'})
def test_extra_keys(self):
validator = KeysValidator(keys=['a', 'b'], strict=True)
with self.assertRaises(exceptions.ValidationError) as cm:
validator({'a': 'foo', 'b': 'bar', 'c': 'baz'})
self.assertEqual(cm.exception.messages[0], 'Some unknown keys were provided: c')
self.assertEqual(cm.exception.code, 'extra_keys')
def test_custom_messages(self):
messages = {
'missing_keys': 'Foobar',
}
validator = KeysValidator(keys=['a', 'b'], strict=True, messages=messages)
with self.assertRaises(exceptions.ValidationError) as cm:
validator({'a': 'foo', 'c': 'baz'})
self.assertEqual(cm.exception.messages[0], 'Foobar')
self.assertEqual(cm.exception.code, 'missing_keys')
with self.assertRaises(exceptions.ValidationError) as cm:
validator({'a': 'foo', 'b': 'bar', 'c': 'baz'})
self.assertEqual(cm.exception.messages[0], 'Some unknown keys were provided: c')
self.assertEqual(cm.exception.code, 'extra_keys')
def test_deconstruct(self):
messages = {
'missing_keys': 'Foobar',
}
validator = KeysValidator(keys=['a', 'b'], strict=True, messages=messages)
path, args, kwargs = validator.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.validators.KeysValidator')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'keys': ['a', 'b'], 'strict': True, 'messages': messages})
|
webnotes/wnframework
|
refs/heads/develop
|
core/doctype/workflow_transition/workflow_transition.py
|
578
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
class DocType:
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
|
marcoarruda/MissionPlanner
|
refs/heads/master
|
Lib/site-packages/scipy/cluster/tests/vq_test.py
|
63
|
import numpy as np
from scipy.cluster import vq
def python_vq(all_data,code_book):
import time
t1 = time.time()
codes1,dist1 = vq.vq(all_data,code_book)
t2 = time.time()
#print 'fast (double):', t2 - t1
#print ' first codes:', codes1[:5]
#print ' first dist:', dist1[:5]
#print ' last codes:', codes1[-5:]
#print ' last dist:', dist1[-5:]
float_obs = all_data.astype(np.float32)
float_code = code_book.astype(np.float32)
t1 = time.time()
codes1,dist1 = vq.vq(float_obs,float_code)
t2 = time.time()
#print 'fast (float):', t2 - t1
#print ' first codes:', codes1[:5]
#print ' first dist:', dist1[:5]
#print ' last codes:', codes1[-5:]
#print ' last dist:', dist1[-5:]
return codes1,dist1
def read_data(name):
f = open(name,'r')
data = []
for line in f.readlines():
data.append(map(float,string.split(line)))
f.close()
return array(data)
def main():
np.random.seed((1000,1000))
Ncodes = 40
Nfeatures = 16
Nobs = 4000
code_book = np.random.normal(0,1,(Ncodes,Nfeatures))
features = np.random.normal(0,1,(Nobs,Nfeatures))
codes,dist = python_vq(features,code_book)
if __name__ == '__main__':
main()
|
jmesteve/saas3
|
refs/heads/master
|
openerp/addons_extra/group_ibeacon/__openerp__.py
|
1
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2013 ErpAndCloud All Rights Reserved
# https://github.com/jmesteve
# https://github.com/escrichov
# <engineering@erpandcloud.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'group ibeacon',
'version': '1.0',
'author': 'ErpAndCloud',
'category': 'Hidden',
'description': """
[ENG] Groups ibeacon
""",
'website': 'http://www.erpandcloud.com',
'license': 'AGPL-3',
'images': [],
'depends': ['web_extra',
'ibeacon',
],
'data' : [],
'demo': [],
'installable': True,
'application': True,
'auto_install': False,
}
|
pselle/calibre
|
refs/heads/master
|
src/calibre/ebooks/pdf/render/gradients.py
|
14
|
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import sys, copy
from future_builtins import map
from collections import namedtuple
import sip
from PyQt5.Qt import QLinearGradient, QPointF
from calibre.ebooks.pdf.render.common import Name, Array, Dictionary
Stop = namedtuple('Stop', 't color')
class LinearGradientPattern(Dictionary):
def __init__(self, brush, matrix, pdf, pixel_page_width, pixel_page_height):
self.matrix = (matrix.m11(), matrix.m12(), matrix.m21(), matrix.m22(),
matrix.dx(), matrix.dy())
gradient = sip.cast(brush.gradient(), QLinearGradient)
start, stop, stops = self.spread_gradient(gradient, pixel_page_width,
pixel_page_height, matrix)
# TODO: Handle colors with different opacities
self.const_opacity = stops[0].color[-1]
funcs = Array()
bounds = Array()
encode = Array()
for i, current_stop in enumerate(stops):
if i < len(stops) - 1:
next_stop = stops[i+1]
func = Dictionary({
'FunctionType': 2,
'Domain': Array([0, 1]),
'C0': Array(current_stop.color[:3]),
'C1': Array(next_stop.color[:3]),
'N': 1,
})
funcs.append(func)
encode.extend((0, 1))
if i+1 < len(stops) - 1:
bounds.append(next_stop.t)
func = Dictionary({
'FunctionType': 3,
'Domain': Array([stops[0].t, stops[-1].t]),
'Functions': funcs,
'Bounds': bounds,
'Encode': encode,
})
shader = Dictionary({
'ShadingType': 2,
'ColorSpace': Name('DeviceRGB'),
'AntiAlias': True,
'Coords': Array([start.x(), start.y(), stop.x(), stop.y()]),
'Function': func,
'Extend': Array([True, True]),
})
Dictionary.__init__(self, {
'Type': Name('Pattern'),
'PatternType': 2,
'Shading': shader,
'Matrix': Array(self.matrix),
})
self.cache_key = (self.__class__.__name__, self.matrix,
tuple(shader['Coords']), stops)
def spread_gradient(self, gradient, pixel_page_width, pixel_page_height,
matrix):
start = gradient.start()
stop = gradient.finalStop()
stops = list(map(lambda x: [x[0], x[1].getRgbF()], gradient.stops()))
spread = gradient.spread()
if spread != gradient.PadSpread:
inv = matrix.inverted()[0]
page_rect = tuple(map(inv.map, (
QPointF(0, 0), QPointF(pixel_page_width, 0), QPointF(0, pixel_page_height),
QPointF(pixel_page_width, pixel_page_height))))
maxx = maxy = -sys.maxint-1
minx = miny = sys.maxint
for p in page_rect:
minx, maxx = min(minx, p.x()), max(maxx, p.x())
miny, maxy = min(miny, p.y()), max(maxy, p.y())
def in_page(point):
return (minx <= point.x() <= maxx and miny <= point.y() <= maxy)
offset = stop - start
llimit, rlimit = start, stop
reflect = False
base_stops = copy.deepcopy(stops)
reversed_stops = list(reversed(stops))
do_reflect = spread == gradient.ReflectSpread
totl = abs(stops[-1][0] - stops[0][0])
intervals = [abs(stops[i+1][0] - stops[i][0])/totl
for i in xrange(len(stops)-1)]
while in_page(llimit):
reflect ^= True
llimit -= offset
estops = reversed_stops if (reflect and do_reflect) else base_stops
stops = copy.deepcopy(estops) + stops
first_is_reflected = reflect
reflect = False
while in_page(rlimit):
reflect ^= True
rlimit += offset
estops = reversed_stops if (reflect and do_reflect) else base_stops
stops = stops + copy.deepcopy(estops)
start, stop = llimit, rlimit
num = len(stops) // len(base_stops)
if num > 1:
# Adjust the stop parameter values
t = base_stops[0][0]
rlen = totl/num
reflect = first_is_reflected ^ True
intervals = [i*rlen for i in intervals]
rintervals = list(reversed(intervals))
for i in xrange(num):
reflect ^= True
pos = i * len(base_stops)
tvals = [t]
for ival in (rintervals if reflect and do_reflect else
intervals):
tvals.append(tvals[-1] + ival)
for j in xrange(len(base_stops)):
stops[pos+j][0] = tvals[j]
t = tvals[-1]
# In case there were rounding errors
stops[-1][0] = base_stops[-1][0]
return start, stop, tuple(Stop(s[0], s[1]) for s in stops)
|
ericawright/bedrock
|
refs/heads/master
|
lib/fluent_migrations/newsletter/includes/__init__.py
|
12133432
| |
vmanoria/bluemix-hue-filebrowser
|
refs/heads/master
|
hue-3.8.1-bluemix/desktop/core/ext-py/Django-1.6.10/tests/test_runner_invalid_app/models/__init__.py
|
12133432
| |
ftomassetti/intellij-community
|
refs/heads/master
|
python/testData/refactoring/move/usageFromFunctionResolvesToDunderAll/before/src/c.py
|
12133432
| |
jesseditson/rethinkdb
|
refs/heads/next
|
test/rql_test/connections/http_support/flask/testsuite/test_apps/flaskext/__init__.py
|
12133432
| |
hkariti/ansible
|
refs/heads/devel
|
lib/ansible/modules/commands/__init__.py
|
12133432
| |
t0in4/django
|
refs/heads/master
|
tests/migrations/test_migrations_squashed_complex_multi_apps/__init__.py
|
12133432
| |
Ubuntu-Solutions-Engineering/glance-simplestreams-sync-charm
|
refs/heads/master
|
hooks/charmhelpers/contrib/__init__.py
|
12133432
| |
gumpcs/FETK
|
refs/heads/master
|
src/data_preprocessing/class_imbalance_processing.py
|
12133432
| |
datapackages/tabulator-py
|
refs/heads/master
|
tabulator/validate.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from . import config
from . import helpers
from . import exceptions
# Module API
def validate(source, scheme=None, format=None):
'''Check if tabulator is able to load the source.
Args:
source (Union[str, IO]): The source path or IO object.
scheme (str, optional): The source scheme. Auto-detect by default.
format (str, optional): The source file format. Auto-detect by default.
Returns:
bool: Whether tabulator is able to load the source file.
Raises:
`tabulator.exceptions.SchemeError`: The file scheme is not supported.
`tabulator.exceptions.FormatError`: The file format is not supported.
'''
# Get scheme and format
detected_scheme, detected_format = helpers.detect_scheme_and_format(source)
scheme = scheme or detected_scheme
format = format or detected_format
# Validate scheme and format
if scheme is not None:
if scheme not in config.LOADERS:
raise exceptions.SchemeError('Scheme "%s" is not supported' % scheme)
if format not in config.PARSERS:
raise exceptions.FormatError('Format "%s" is not supported' % format)
return True
|
sharbison3/python-docs-samples
|
refs/heads/master
|
appengine/standard/ndb/modeling/keyproperty_models_test.py
|
8
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes for code snippet for modeling article."""
import pytest
import keyproperty_models as models
def test_models(testbed):
name = 'Takashi Matsuo'
contact = models.Contact(name=name)
contact.put()
contact = contact.key.get()
assert contact.name == name
# This test fails because of the eventual consistency nature of
# HRD. We configure HRD consistency for the test datastore stub to
# match the production behavior.
@pytest.mark.xfail
# [START failing_test]
def test_fails(self):
contact = models.Contact(name='Example')
contact.put()
models.PhoneNumber(
contact=self.contact_key,
phone_type='home',
number='(650) 555 - 2200').put()
numbers = contact.phone_numbers.fetch()
assert 1 == len(numbers)
# [END failing_test]
|
ModdedPA/android_external_chromium_org
|
refs/heads/kitkat
|
media/PRESUBMIT.py
|
51
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for Chromium media component.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
def _CheckForUseOfWrongClock(input_api, output_api):
"""Make sure new lines of media code don't use a clock susceptible to skew."""
def FilterFile(affected_file):
"""Return true if the file could contain code referencing base::Time."""
return affected_file.LocalPath().endswith(
('.h', '.cc', '.cpp', '.cxx', '.mm'))
# Regular expression that should detect any explicit references to the
# base::Time type (or base::Clock/DefaultClock), whether in using decls,
# typedefs, or to call static methods.
base_time_type_pattern = r'(^|\W)base::(Time|Clock|DefaultClock)(\W|$)'
# Regular expression that should detect references to the base::Time class
# members, such as a call to base::Time::Now.
base_time_member_pattern = r'(^|\W)(Time|Clock|DefaultClock)::'
# Regular expression to detect "using base::Time" declarations. We want to
# prevent these from triggerring a warning. For example, it's perfectly
# reasonable for code to be written like this:
#
# using base::Time;
# ...
# int64 foo_us = foo_s * Time::kMicrosecondsPerSecond;
using_base_time_decl_pattern = r'^\s*using\s+(::)?base::Time\s*;'
# Regular expression to detect references to the kXXX constants in the
# base::Time class. We want to prevent these from triggerring a warning.
base_time_konstant_pattern = r'(^|\W)Time::k\w+'
problem_re = input_api.re.compile(
r'(' + base_time_type_pattern + r')|(' + base_time_member_pattern + r')')
exception_re = input_api.re.compile(
r'(' + using_base_time_decl_pattern + r')|(' +
base_time_konstant_pattern + r')')
problems = []
for f in input_api.AffectedSourceFiles(FilterFile):
for line_number, line in f.ChangedContents():
if problem_re.search(line):
if not exception_re.search(line):
problems.append(
' %s:%d\n %s' % (f.LocalPath(), line_number, line.strip()))
if problems:
return [output_api.PresubmitPromptOrNotify(
'You added one or more references to the base::Time class and/or one\n'
'of its member functions (or base::Clock/DefaultClock). In media\n'
'code, it is rarely correct to use a clock susceptible to time skew!\n'
'Instead, could you use base::TimeTicks to track the passage of\n'
'real-world time?\n\n' +
'\n'.join(problems))]
else:
return []
def _CheckChange(input_api, output_api):
results = []
results.extend(_CheckForUseOfWrongClock(input_api, output_api))
return results
def CheckChangeOnUpload(input_api, output_api):
return _CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _CheckChange(input_api, output_api)
|
40223232/final-test-6-22
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/sys.py
|
408
|
# hack to return special attributes
from _sys import *
from javascript import JSObject
has_local_storage=__BRYTHON__.has_local_storage
has_session_storage = __BRYTHON__.has_session_storage
has_json=__BRYTHON__.has_json
brython_debug_mode = __BRYTHON__.debug
argv = ['__main__']
base_exec_prefix = __BRYTHON__.brython_path
base_prefix = __BRYTHON__.brython_path
builtin_module_names=__BRYTHON__.builtin_module_names
byteorder='little'
def exc_info():
exc = __BRYTHON__.exception_stack[-1]
return (exc.__class__,exc,exc.traceback)
exec_prefix = __BRYTHON__.brython_path
executable = __BRYTHON__.brython_path+'/brython.js'
def exit(i=None):
raise SystemExit('')
class flag_class:
def __init__(self):
self.debug=0
self.inspect=0
self.interactive=0
self.optimize=0
self.dont_write_bytecode=0
self.no_user_site=0
self.no_site=0
self.ignore_environment=0
self.verbose=0
self.bytes_warning=0
self.quiet=0
self.hash_randomization=1
flags=flag_class()
def getfilesystemencoding(*args,**kw):
"""getfilesystemencoding() -> string
Return the encoding used to convert Unicode filenames in
operating system filenames."""
return 'utf-8'
maxsize=2147483647
maxunicode=1114111
path = __BRYTHON__.path
#path_hooks = list(JSObject(__BRYTHON__.path_hooks))
meta_path=__BRYTHON__.meta_path
platform="brython"
prefix = __BRYTHON__.brython_path
version = '.'.join(str(x) for x in __BRYTHON__.version_info[:3])
version += " (default, %s) \n[Javascript 1.5] on Brython" % __BRYTHON__.compiled_date
hexversion = 0x03000000 # python 3.0
class __version_info(object):
def __init__(self, version_info):
self.version_info = version_info
self.major = version_info[0]
self.minor = version_info[1]
self.micro = version_info[2]
self.releaselevel = version_info[3]
self.serial = version_info[4]
def __getitem__(self, index):
if isinstance(self.version_info[index], list):
return tuple(self.version_info[index])
return self.version_info[index]
def hexversion(self):
try:
return '0%d0%d0%d' % (self.major, self.minor, self.micro)
finally: #probably some invalid char in minor (rc, etc)
return '0%d0000' % (self.major)
def __str__(self):
_s="sys.version(major=%d, minor=%d, micro=%d, releaselevel='%s', serial=%d)"
return _s % (self.major, self.minor, self.micro,
self.releaselevel, self.serial)
#return str(self.version_info)
def __eq__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) == other
raise Error("Error! I don't know how to compare!")
def __ge__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) >= other
raise Error("Error! I don't know how to compare!")
def __gt__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) > other
raise Error("Error! I don't know how to compare!")
def __le__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) <= other
raise Error("Error! I don't know how to compare!")
def __lt__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) < other
raise Error("Error! I don't know how to compare!")
def __ne__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) != other
raise Error("Error! I don't know how to compare!")
#eventually this needs to be the real python version such as 3.0, 3.1, etc
version_info=__version_info(__BRYTHON__.version_info)
class _implementation:
def __init__(self):
self.name='brython'
self.version = __version_info(__BRYTHON__.implementation)
self.hexversion = self.version.hexversion()
self.cache_tag=None
def __repr__(self):
return "namespace(name='%s' version=%s hexversion='%s')" % (self.name, self.version, self.hexversion)
def __str__(self):
return "namespace(name='%s' version=%s hexversion='%s')" % (self.name, self.version, self.hexversion)
implementation=_implementation()
class _hash_info:
def __init__(self):
self.width=32,
self.modulus=2147483647
self.inf=314159
self.nan=0
self.imag=1000003
self.algorithm='siphash24'
self.hash_bits=64
self.seed_bits=128
cutoff=0
def __repr(self):
#fix me
return "sys.hash_info(width=32, modulus=2147483647, inf=314159, nan=0, imag=1000003, algorithm='siphash24', hash_bits=64, seed_bits=128, cutoff=0)"
hash_info=_hash_info()
warnoptions=[]
def getfilesystemencoding():
return 'utf-8'
#delete objects not in python sys module namespace
del JSObject
del _implementation
|
eaplatanios/tensorflow
|
refs/heads/master
|
tensorflow/python/saved_model/utils.py
|
55
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SavedModel utility functions.
Utility functions to assist with setup and construction of the SavedModel proto.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.python.saved_model.utils_impl import build_tensor_info
from tensorflow.python.saved_model.utils_impl import get_tensor_from_tensor_info
# pylint: enable=unused-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = ["build_tensor_info", "get_tensor_from_tensor_info"]
remove_undocumented(__name__, _allowed_symbols)
|
smunaut/gnuradio
|
refs/heads/master
|
gr-trellis/examples/python/test_cpm.py
|
11
|
#!/usr/bin/env python
##################################################
# Gnuradio Python Flow Graph
# Title: CPM test
# Author: Achilleas Anastasopoulos
# Description: gnuradio flow graph
# Generated: Thu Feb 19 23:16:23 2009
##################################################
from gnuradio import gr
from gnuradio import trellis, digital, filter, blocks
from grc_gnuradio import blks2 as grc_blks2
import math
import numpy
import fsm_utils
from gnuradio import trellis
try:
from gnuradio import analog
except ImportError:
sys.stderr.write("Error: Program requires gr-analog.\n")
sys.exit(1)
try:
import scipy.stats
except ImportError:
print "Error: Program requires scipy (see: www.scipy.org)."
sys.exit(1)
def run_test(seed,blocksize):
tb = gr.top_block()
##################################################
# Variables
##################################################
M = 2
K = 1
P = 2
h = (1.0*K)/P
L = 3
Q = 4
frac = 0.99
f = trellis.fsm(P,M,L)
# CPFSK signals
#p = numpy.ones(Q)/(2.0)
#q = numpy.cumsum(p)/(1.0*Q)
# GMSK signals
BT=0.3;
tt=numpy.arange(0,L*Q)/(1.0*Q)-L/2.0;
#print tt
p=(0.5*scipy.special.erfc(2*math.pi*BT*(tt-0.5)/math.sqrt(math.log(2.0))/math.sqrt(2.0))-0.5*scipy.special.erfc(2*math.pi*BT*(tt+0.5)/math.sqrt(math.log(2.0))/math.sqrt(2.0)))/2.0;
p=p/sum(p)*Q/2.0;
#print p
q=numpy.cumsum(p)/Q;
q=q/q[-1]/2.0;
#print q
(f0T,SS,S,F,Sf,Ff,N) = fsm_utils.make_cpm_signals(K,P,M,L,q,frac)
#print N
#print Ff
Ffa = numpy.insert(Ff,Q,numpy.zeros(N),axis=0)
#print Ffa
MF = numpy.fliplr(numpy.transpose(Ffa))
#print MF
E = numpy.sum(numpy.abs(Sf)**2,axis=0)
Es = numpy.sum(E)/f.O()
#print Es
constellation = numpy.reshape(numpy.transpose(Sf),N*f.O())
#print Ff
#print Sf
#print constellation
#print numpy.max(numpy.abs(SS - numpy.dot(Ff , Sf)))
EsN0_db = 10.0
N0 = Es * 10.0**(-(1.0*EsN0_db)/10.0)
#N0 = 0.0
#print N0
head = 4
tail = 4
numpy.random.seed(seed*666)
data = numpy.random.randint(0, M, head+blocksize+tail+1)
#data = numpy.zeros(blocksize+1+head+tail,'int')
for i in range(head):
data[i]=0
for i in range(tail+1):
data[-i]=0
##################################################
# Blocks
##################################################
random_source_x_0 = blocks.vector_source_b(data.tolist(), False)
digital_chunks_to_symbols_xx_0 = digital.chunks_to_symbols_bf((-1, 1), 1)
filter_interp_fir_filter_xxx_0 = filter.interp_fir_filter_fff(Q, p)
analog_frequency_modulator_fc_0 = analog.frequency_modulator_fc(2*math.pi*h*(1.0/Q))
blocks_add_vxx_0 = blocks.add_vcc(1)
analog_noise_source_x_0 = analog.noise_source_c(analog.GR_GAUSSIAN, (N0/2.0)**0.5, -long(seed))
blokcs_multiply_vxx_0 = blocks.multiply_vcc(1)
analog_sig_source_x_0 = analog.sig_source_c(Q, analog.GR_COS_WAVE, -f0T, 1, 0)
# only works for N=2, do it manually for N>2...
filter_fir_filter_xxx_0_0 = filter.fir_filter_ccc(Q, MF[0].conjugate())
filter_fir_filter_xxx_0_0_0 = filter.fir_filter_ccc(Q, MF[1].conjugate())
blocks_streams_to_stream_0 = blocks.streams_to_stream(gr.sizeof_gr_complex*1, int(N))
blocks_skiphead_0 = blocks.skiphead(gr.sizeof_gr_complex*1, int(N*(1+0)))
viterbi = trellis.viterbi_combined_cb(f, head+blocksize+tail, 0, -1, int(N),
constellation, digital.TRELLIS_EUCLIDEAN)
blocks_vector_sink_x_0 = blocks.vector_sink_b()
##################################################
# Connections
##################################################
tb.connect((random_source_x_0, 0), (digital_chunks_to_symbols_xx_0, 0))
tb.connect((digital_chunks_to_symbols_xx_0, 0), (filter_interp_fir_filter_xxx_0, 0))
tb.connect((filter_interp_fir_filter_xxx_0, 0), (analog_frequency_modulator_fc_0, 0))
tb.connect((analog_frequency_modulator_fc_0, 0), (blocks_add_vxx_0, 0))
tb.connect((analog_noise_source_x_0, 0), (blocks_add_vxx_0, 1))
tb.connect((blocks_add_vxx_0, 0), (blocks_multiply_vxx_0, 0))
tb.connect((analog_sig_source_x_0, 0), (blocks_multiply_vxx_0, 1))
tb.connect((blocks_multiply_vxx_0, 0), (filter_fir_filter_xxx_0_0, 0))
tb.connect((blocks_multiply_vxx_0, 0), (filter_fir_filter_xxx_0_0_0, 0))
tb.connect((filter_fir_filter_xxx_0_0, 0), (blocks_streams_to_stream_0, 0))
tb.connect((filter_fir_filter_xxx_0_0_0, 0), (blocks_streams_to_stream_0, 1))
tb.connect((blocks_streams_to_stream_0, 0), (blocks_skiphead_0, 0))
tb.connect((blocks_skiphead_0, 0), (viterbi, 0))
tb.connect((viterbi, 0), (blocks_vector_sink_x_0, 0))
tb.run()
dataest = blocks_vector_sink_x_0.data()
#print data
#print numpy.array(dataest)
perr = 0
err = 0
for i in range(blocksize):
if data[head+i] != dataest[head+i]:
#print i
err += 1
if err != 0 :
perr = 1
return (err,perr)
if __name__ == '__main__':
blocksize = 1000
ss=0
ee=0
for i in range(10000):
(s,e) = run_test(i,blocksize)
ss += s
ee += e
if (i+1) % 100 == 0:
print i+1,ss,ee,(1.0*ss)/(i+1)/(1.0*blocksize),(1.0*ee)/(i+1)
print i+1,ss,ee,(1.0*ss)/(i+1)/(1.0*blocksize),(1.0*ee)/(i+1)
|
SAP/pyhdb
|
refs/heads/master
|
tests/types/test_meta.py
|
9
|
# Copyright 2014, 2015 SAP SE.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: //www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import pytest
from pyhdb.protocol import types
from pyhdb.exceptions import InterfaceError
def test_automated_mapping_by_type_code():
class DummyType(types.Type):
type_code = 127
assert types.by_type_code[127] == DummyType
assert DummyType not in types.by_python_type.values()
def test_automated_mapping_by_multiple_type_code():
class DummyType(types.Type):
type_code = (126, 127)
assert types.by_type_code[126] == DummyType
assert types.by_type_code[127] == DummyType
assert DummyType not in types.by_python_type.values()
def test_invalid_automated_mapping_by_type_code():
with pytest.raises(InterfaceError):
class DummyType(types.Type):
type_code = 999
def test_automated_mapping_by_python_type():
class DummyType(types.Type):
python_type = None
assert types.by_python_type[None] == DummyType
assert DummyType not in types.by_type_code.values()
def test_automated_mapping_by_multiple_python_type():
class DummyType(types.Type):
python_type = (int, None)
assert types.by_python_type[int] == DummyType
assert types.by_python_type[None] == DummyType
assert DummyType not in types.by_type_code.values()
def test_type_mapping_is_a_weakref():
class DummyType(types.Type):
type_code = 125
python_type = int
assert types.by_type_code[125] == DummyType
assert types.by_python_type[int] == DummyType
del DummyType
import gc
gc.collect()
assert 125 not in types.by_type_code
assert int not in types.by_python_type
def test_all_types_with_code_has_method_from_resultset():
for typ in types.by_type_code.values():
assert hasattr(typ, "from_resultset")
assert callable(typ.from_resultset)
def test_all_types_with_python_type_has_method_to_sql():
for typ in types.by_python_type.values():
assert hasattr(typ, "to_sql")
assert callable(typ.to_sql)
|
ahmed-mahran/hue
|
refs/heads/master
|
desktop/core/ext-py/guppy-0.1.10/guppy/gsl/XHTML.py
|
37
|
#._cv_part guppy.gsl.XHTML
class Node2XHTML:
def __init__(self, mod, node=None, error_report = None, encode_name=None
):
self.mod = mod
self.valid_html40 = False
self.encode = self.mod.encode
if encode_name is None:
encode_name = self.mod.encode_name
self.encode_name = encode_name
if error_report is not None:
self.error_report = error_report
self.document_lang = None
self.header_nodes = []
self.indent = 0
self.indentstep = 1
self.set_out([])
# xxx where do this?
charset = 'utf-8'
self.header_nodes.append(self.mod.node_of_taci(
'meta', '', (
self.mod.node_of_taci('http-equiv=', 'Content-Type'),
self.mod.node_of_taci('content=',
'text/html; charset=%s'%charset))))
if node is not None:
node.accept(self)
def _visit_children(self, node):
node, attrs = node.split_attrs()
# xxx handle attrs?
E = self.mod.ReportedError
for ch in node.children:
try:
ch.accept(self)
except E:
pass
def begin(self, tag, arg=''):
t = '<'+tag
if arg:
t = t + ' ' + arg
if tag in self.mod._no_end_tag_elements:
# Quote from: http://gutenberg.hwg.org/markupXHTML.html
# N.B. The penultimate closing slash on empty elements such as the <img/>
# element can cause a problem in older browsers. For this reason it is
# recommended that you leave a space before the slash, namely <img />
t += ' /'
t += '>'
if tag in self.mod.line_break_allowed:
t = '\n'+self.indent * ' ' + t
self.append(t)
self.indent += self.indentstep
def chg_out(self, out):
oo = self.out
self.set_out(out)
return oo
def encode_link_name(self, name):
# 1. Make the name better looking for a html user's perspective
# 2. Encode it by HTML rules
if name.startswith(self.mod.tgt_prefix):
name = name[len(self.mod.tgt_prefix):]
else:
# Should not happen often or at all
assert 0
name = self.encode_name(name)
return name
def end(self, tag):
self.indent -= self.indentstep
self.append('</%s>'%tag)
def error(self, msg, *args, **kwds):
msg = 'Doc2XHTML: ' + msg
self.error_report(msg, *args, **kwds)
def error_report(self, msg, *args, **kwds):
print 'HTML ENCODING ERROR: ', msg, 'args=',args, 'kwds=',kwds
raise ValueError
def gen_document_header(self, lang, header_nodes):
# lang & title are nodes with text or char directives, to be encoded.
# metas is a list of nodes, with data to be encoded
strict = 1 # we have alternatives, I just havent yet decided how or if to let the user choose
if strict:
self.append("""\
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
""")
else:
self.append("""\
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
""")
self.begin('html',
'lang=%r xmlns="http://www.w3.org/1999/xhtml"'%self.get_encoded_text(lang),
)
self.begin('head')
for node in header_nodes:
self.gen_stdhtml(node)
self.end('head')
self.begin('body')
# Get around w3c restriction that character data are not allowed
# directly in body, makes it easier to write compliant code
# Arguably the restriction is there for a reason, but I dont know...
self.begin('div')
def gen_document_trailer(self):
self.end('div')
self.end('body')
self.end('html')
def gen_empty_elmt(self, tag, arg=''):
self.begin(tag, arg)
self.indent -= self.indentstep
def gen_generated_from_gsl(self):
self.gen_empty_elmt('hr')
self.append('Generated by ')
self.begin('a', 'href="http://guppy-pe.sourceforge.net/gsl.html"')
#self.begin('a', 'href="gsl.html"')
self.append('GSL-XHTML 0.1.7')
self.end('a')
self.append(' on '+self.mod.time.asctime(self.mod.time.localtime()))
def gen_meta(self, node, tag=None):
mknode = self.mod.node_of_taci
if tag is None:
tag = node.tag
self.header_nodes.append(
mknode('meta', '',
[mknode('name=', tag),
mknode('content=', node.arg, node.children)]))
def gen_stdhtml(self, node, tag=None, **options):
if tag is None:
tag = node.tag
node, attrs = node.split_attrs(tag)
self.begin(tag, ' '.join(['%s=%r'%(key, val) for (key, val) in attrs]))
if tag in self.mod._no_end_tag_elements:
if node.arg:
self.error('No enclosed text allowed for Html tag: %r.'%node.tag)
self.no_children(node)
self.indent -= self.indentstep
else:
node.arg_accept(self)
self.end(tag)
def get_encoded_text(self, node):
# From a node's arg and children that are text or characters
old_out = self.chg_out([])
self.append(self.encode(node.arg))
for ch in node.children:
if ch.tag in ('text', 'char'):
ch.accept(self)
else:
self.error('Only text and char allowed here, not %r.'%ch.tag, ch)
return ''.join(self.chg_out(old_out))
def get_html(self):
return ''.join(self.out)
def no_children(self, node):
if node.children:
self.error('No children allowed for %r. Got children nodes = %r.'%(
node.tag, node.children))
def set_out(self, out):
self.out = out
self.extend = out.extend
self.append = out.append
def visit_author(self, node):
self.gen_meta(node)
def visit_block(self, node):
self._visit_children(node)
def visit_char(self, node):
name = node.get_namearg()
if name in self.mod.name2codepoint:
name = '&%s;'%name
else:
if name[:2] == "0x":
char = int(name[2:], 16)
elif name.isdigit():
char = int(name)
else:
self.error('No such character: %r.'%name, node)
name = self.mod.codepoint2name.get(char)
if name is None:
name = '&#%d;'%char
else:
name = '&%s;'%name
self.append(name)
self._visit_children(node)
def visit_col_width(self, node):
self.append('<col width="%s" />'%node.arg)
def visit_comment(self, node):
return
# self.append('<!-- %s -->'%node.arg)
def visit_default(self, node):
if node.tag in self.mod.stdhtml:
if node.tag in self.mod._head_elements:
self.head_nodes.append(node)
else:
self.gen_stdhtml(node)
else:
self.error('I don\'t know what to generate for the tag %r.'%node.tag, node)
def visit_define(self, node):
name = self.encode_link_name(node.arg)
self.begin('a', 'name=%r'%name)
self._visit_children(node)
self.end('a')
def visit_document(self, node):
self.indent = 2 # Known indentation of header to be generated later
oldout = self.chg_out([])
self._visit_children(node)
# self.gen_generated_from_gsl()
newout = self.chg_out(oldout)
mknode = self.mod.node_of_taci
lang = self.document_lang
if not lang:
lang = mknode('document_lang', 'en')
self.indent = 0
self.gen_document_header(lang, self.header_nodes)
self.out.extend(newout)
self.gen_document_trailer()
def visit_document_lang(self, node):
if self.document_lang is not None:
self.error('Duplicate document lang directive.', node)
self.document_lang = node
def visit_document_title(self, node):
self.header_nodes.append(self.mod.node_of_taci('title', node.arg))
def visit_enumerate(self, node):
self.begin('ol')
for c in node.children:
self.begin('li')
c.accept(self)
self.end('li')
self.end('ol')
def visit_exdefs(self, node):
self.symplace = {}
for ch in node.children:
syms = [x.strip() for x in ch.arg.split(',')]
for sym in syms:
self.symplace[sym] = ch.tag
def visit_generated_from_gsl(self, node):
self.gen_generated_from_gsl()
def visit_header(self, node):
self.header_nodes.extend(node.children)
def visit_itemize(self, node):
self.begin('ul')
for c in node.children:
self.begin('li')
c.accept(self)
self.end('li')
self.end('ul')
def visit_link_to_extern(self, node):
name = node.arg
docname = node.children[0].arg
children = node.children[1:]
uri = '%s.html#%s'%(docname, self.encode_link_name(name))
self.begin('a', 'href=%r'%uri)
if not children:
self.append(self.encode(name))
else:
for ch in children:
ch.accept(self)
self.end('a')
def visit_link_to_local(self, node):
name = node.arg
uri = '#%s'%self.encode_link_name(name)
self.begin('a', 'href=%r'%uri)
if not node.children:
self.append(self.encode(name))
else:
self._visit_children(node)
self.end('a')
def visit_link_to_unresolved(self, node):
name = node.arg
self.begin('em')
if not node.children:
self.append(self.encode(name))
else:
self._visit_children(node)
self.end('em')
def visit_literal_block(self, node):
self.gen_stdhtml(node, 'pre')
def visit_man_page_mode(self, node):
self._visit_children(node)
def visit_meta(self, node):
self.document_metas.append(node)
def visit_spc_colonkind(self, node):
#self.append(' <strong>:</strong> ')
#self.append(' <code>:</code> ')
self.append('<code>:</code> ')
def visit_spc_mapsto(self, node):
self.append(' <strong>-></strong> ')
def visit_string(self, node):
self._visit_children(node)
def visit_symbol(self, node):
self.visit_text(node)
def visit_text(self, node):
text = self.encode(node.arg)
if len(text) > 80 or '\n' in text:
self.append('\n')
self.append(text)
self._visit_children(node)
def visit_to_document_only(self, node):
self._visit_children(node)
def visit_to_html_only(self, node):
self._visit_children(node)
def visit_to_tester_only(self, node):
pass
def visit_valid_html40(self, node):
self.valid_html40 = node
node, attrs = self.valid_html40.split_attrs(attrdict=True)
# XXX check allowed attrs but in a GENERAL way
# Code taken from validator.w3.org
self.append("""\
<a href="http://validator.w3.org/check?uri=referer"><img
src="%s"
alt="Valid HTML 4.0 Strict" height="31" width="88" /></a>
"""%attrs.get('src', 'http://www.w3.org/Icons/valid-html40'))
def visit_with(self, node):
pass
def visit_word(self, node):
self._visit_children(node)
class _GLUECLAMP_:
_imports_ = (
'_parent:SpecNodes',
'_parent.SpecNodes:node_of_taci',
'_parent.Gsml:is_not_ascii',
'_parent.Main:tgt_prefix',
'_parent.Main:ReportedError',
'_root.htmlentitydefs:name2codepoint',
'_root.htmlentitydefs:codepoint2name',
'_root:re',
'_root:time',
)
_chgable_ = ('tag_uppercase_name_chars',)
# Set to make upper-case name characters tagged to make sure
# no names in a file differ only in case as stated in HTML spec.
# I believe this doesn't matter in practice in contemporary browsers,
# since references are also said to be case sensitive!
# -- I can't be bothered to solve this better now. See also Notes Aug 12 2005.
tag_uppercase_name_chars = 0
_html3_2 = (
'a', 'address', 'area',
'b', 'base', 'big', 'blockquote', 'body', 'br',
'caption', 'center', 'cite', 'code',
'dfn', 'dt','dl', 'dd','div',
'em', 'form',
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'html',
'i', 'img', 'input', 'kbd',
'li',
'ol', 'option',
'p', 'param', 'pre',
'samp', 'select', 'small', 'strong', 'style', 'sub', 'sup',
'table', 'td', 'textarea', 'th', 'thead', 'title', 'tr', 'tt',
'ul',
'var')
# Included in Html 3.2 but 'deprecated' in Html 4.0
_html4_0_deprecated = (
'applet', 'basefont', 'dir', 'font', 'isindex',
'strike', 'u',
)
# Included in 3.2, not depreciated in 4.0 but one may want to avoid them
_html_avoid = (
'script',
)
_html4_0 = (
'abbr', 'acronym',
'bdo','button',
'col', 'colgroup',
'del',
'fieldset', 'frame', 'frameset',
'iframe', 'ins',
'label', 'legend',
'noframes', 'noscript',
'object','optgroup',
'q','s', 'span',
'tbody', 'tfoot', 'thead')
_head_elements = (
'base','isindex','link','meta','script','style','title'
)
# The ones that can have no end tag
# xxx are there more -style etc- look it up!
_no_end_tag_elements = (
# Header elmts
'meta', 'link',
# Other
'img',
'hr', # CAN have end tag? but never has. This will self-close to generate valid XHTML.
)
# The ones that we may generate line-break before
# and hope it will not affect the insertion of spaces in rendering.
_line_break_allowed = (
'html','head','body','frameset',
# Head Elements
) + _head_elements + (
# Generic Block-level Elements
'address','blockquote','center','del','div',
'h1','h2','h3','h4','h5','h6','hr','ins','isindex','noscript','p','pre',
# Lists
'dir','dl','dt','dd','li','menu','ol','ul',
# Tables
'table','caption','colgroup','col','thead','tfoot','tbody','tr','td','th',
# Forms
'form','button','fieldset','legend','input','label',
'select','optgroup','option','textarea'
)
# The attributes allowed in META elements
meta_attributes = ('name', 'http-equiv', 'content', 'scheme', 'lang', 'dir')
# This returns a function checking if a character is allowed to be used
# as the first character in a NAME or ID attribute.
# (I don't think this is the same as .isalpha() with unicode.)
def _get_is_name_starter_char(self):
return self.re.compile(r"[A-Za-z]").match
# This returns a function checking if a character is allowed to be used
# after the first character in a NAME or ID attribute.
def _get_is_name_follower_char(self):
return self.re.compile(r"[A-Za-z0-9\-_:\.]").match
# A set of the ones we generate directly.
# This includes the ones from html 3.2 and
# I have also included the deprecated and the 4.0 only
def _get_stdhtml(self):
sh = {}
for x in self._html3_2 + self._html4_0_deprecated + self._html4_0:
sh[x] = 1
return sh
def _get_line_break_allowed(self):
sh = {}
for x in self._line_break_allowed:
sh[x] = 1
return sh
def doc2filer(self, doc, node, name, dir, opts, IO):
text = self.doc2text(doc, node)
path = IO.path.join(dir, '%s.html'%name)
node = self.node_of_taci('write_file', path, [self.node_of_taci('text', text)])
return node
def doc2text(self, doc, node):
d2h = Node2XHTML(self, node, doc.env.error)
return d2h.get_html()
def node2file(self, node, file):
text = self.node2text(node)
f = open(file, 'w')
f.write(text)
f.close()
def node2text(self, node):
text = Node2XHTML(self, node).get_html()
return text
# Adapted from html4css1.py in docutils
def encode(self, text):
"""Encode special characters in `text` & return."""
# @@@ A codec to do these and all other HTML entities would be nice.
text = text.replace("&", "&")
text = text.replace("<", "<")
text = text.replace('"', """)
text = text.replace(">", ">")
text = text.replace("@", "@") # may thwart some address harvesters
return text
# Encode a name according to HTML spec. See also Notes Aug 12 2005.
# From wdghtml40/values.html#cdata :
# Attribute values of type ID and NAME must begin with a letter in the
# range A-Z or a-z and may be followed by letters (A-Za-z), digits
# (0-9), hyphens ("-"), underscores ("_"), colons (":"), and periods
# ("."). These values are case-sensitive.
def encode_name(self, name):
is_name_follower_char = self.is_name_follower_char
ns = []
append = ns.append
upperstate = 0
ch = name[:1]
if ch == 'z' or not self.is_name_starter_char(ch):
append('z')
if ch == 'z':
append('z')
for ch in name:
if ch == '-' or not is_name_follower_char(ch):
if upperstate:
append('-')
upperstate = 0
append('-')
if ch != '-':
append('%d'%ord(ch))
append('-')
elif ch.isupper() and self.tag_uppercase_name_chars:
if not upperstate:
append('-')
upperstate = 1
append(ch)
else:
if upperstate:
append('-')
upperstate = 0
append(ch)
if upperstate:
append('-')
return ''.join(ns)
|
scorpilix/Golemtest
|
refs/heads/develop
|
golem/ranking/helper/__init__.py
|
12133432
| |
diegoguimaraes/django
|
refs/heads/master
|
tests/admin_checks/__init__.py
|
12133432
| |
yencarnacion/jaikuengine
|
refs/heads/master
|
.google_appengine/lib/django_1_2/tests/regressiontests/string_lookup/__init__.py
|
12133432
| |
zhzhussupovkz/taxi-py
|
refs/heads/master
|
core/__init__.py
|
12133432
| |
tombstone/models
|
refs/heads/master
|
research/im2txt/im2txt/ops/image_embedding.py
|
34
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Image embedding ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.contrib.slim.python.slim.nets.inception_v3 import inception_v3_base
slim = tf.contrib.slim
def inception_v3(images,
trainable=True,
is_training=True,
weight_decay=0.00004,
stddev=0.1,
dropout_keep_prob=0.8,
use_batch_norm=True,
batch_norm_params=None,
add_summaries=True,
scope="InceptionV3"):
"""Builds an Inception V3 subgraph for image embeddings.
Args:
images: A float32 Tensor of shape [batch, height, width, channels].
trainable: Whether the inception submodel should be trainable or not.
is_training: Boolean indicating training mode or not.
weight_decay: Coefficient for weight regularization.
stddev: The standard deviation of the trunctated normal weight initializer.
dropout_keep_prob: Dropout keep probability.
use_batch_norm: Whether to use batch normalization.
batch_norm_params: Parameters for batch normalization. See
tf.contrib.layers.batch_norm for details.
add_summaries: Whether to add activation summaries.
scope: Optional Variable scope.
Returns:
end_points: A dictionary of activations from inception_v3 layers.
"""
# Only consider the inception model to be in training mode if it's trainable.
is_inception_model_training = trainable and is_training
if use_batch_norm:
# Default parameters for batch normalization.
if not batch_norm_params:
batch_norm_params = {
"is_training": is_inception_model_training,
"trainable": trainable,
# Decay for the moving averages.
"decay": 0.9997,
# Epsilon to prevent 0s in variance.
"epsilon": 0.001,
# Collection containing the moving mean and moving variance.
"variables_collections": {
"beta": None,
"gamma": None,
"moving_mean": ["moving_vars"],
"moving_variance": ["moving_vars"],
}
}
else:
batch_norm_params = None
if trainable:
weights_regularizer = tf.contrib.layers.l2_regularizer(weight_decay)
else:
weights_regularizer = None
with tf.variable_scope(scope, "InceptionV3", [images]) as scope:
with slim.arg_scope(
[slim.conv2d, slim.fully_connected],
weights_regularizer=weights_regularizer,
trainable=trainable):
with slim.arg_scope(
[slim.conv2d],
weights_initializer=tf.truncated_normal_initializer(stddev=stddev),
activation_fn=tf.nn.relu,
normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params):
net, end_points = inception_v3_base(images, scope=scope)
with tf.variable_scope("logits"):
shape = net.get_shape()
net = slim.avg_pool2d(net, shape[1:3], padding="VALID", scope="pool")
net = slim.dropout(
net,
keep_prob=dropout_keep_prob,
is_training=is_inception_model_training,
scope="dropout")
net = slim.flatten(net, scope="flatten")
# Add summaries.
if add_summaries:
for v in end_points.values():
tf.contrib.layers.summaries.summarize_activation(v)
return net
|
fnouama/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyMethodFirstArgAssignmentInspection/src/first-args.py
|
83
|
def self(): # ok
pass
self = 1 # ok
class A:
def foo(self, a):
(self, (a, b)) = 1, ((22, 23))
if 1:
a = {}
self = 23
for (self, a) in []:
pass
def boo():
self = 1
def moo(self):
def inner_moo():
self =1
def self():
pass
class self:
pass
@classmethod
def qoo(cls):
cls = 1
# no builtins detection -> can't test static methods :( where's mock Python SDK?
@staticmethod
def stat(first):
first = 1 # ok
|
yongshengwang/hue
|
refs/heads/master
|
build/env/lib/python2.7/site-packages/ipython-0.10-py2.7.egg/IPython/external/Itpl.py
|
7
|
# -*- coding: utf-8 -*-
"""String interpolation for Python (by Ka-Ping Yee, 14 Feb 2000).
This module lets you quickly and conveniently interpolate values into
strings (in the flavour of Perl or Tcl, but with less extraneous
punctuation). You get a bit more power than in the other languages,
because this module allows subscripting, slicing, function calls,
attribute lookup, or arbitrary expressions. Variables and expressions
are evaluated in the namespace of the caller.
The itpl() function returns the result of interpolating a string, and
printpl() prints out an interpolated string. Here are some examples:
from Itpl import printpl
printpl("Here is a $string.")
printpl("Here is a $module.member.")
printpl("Here is an $object.member.")
printpl("Here is a $functioncall(with, arguments).")
printpl("Here is an ${arbitrary + expression}.")
printpl("Here is an $array[3] member.")
printpl("Here is a $dictionary['member'].")
The filter() function filters a file object so that output through it
is interpolated. This lets you produce the illusion that Python knows
how to do interpolation:
import Itpl
sys.stdout = Itpl.filter()
f = "fancy"
print "Is this not $f?"
print "Standard output has been replaced with a $sys.stdout object."
sys.stdout = Itpl.unfilter()
print "Okay, back $to $normal."
Under the hood, the Itpl class represents a string that knows how to
interpolate values. An instance of the class parses the string once
upon initialization; the evaluation and substitution can then be done
each time the instance is evaluated with str(instance). For example:
from Itpl import Itpl
s = Itpl("Here is $foo.")
foo = 5
print str(s)
foo = "bar"
print str(s)
"""
#*****************************************************************************
#
# Copyright (c) 2001 Ka-Ping Yee <ping@lfw.org>
#
#
# Published under the terms of the MIT license, hereby reproduced:
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
#*****************************************************************************
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
__license__ = 'MIT'
import string
import sys
from tokenize import tokenprog
from types import StringType
class ItplError(ValueError):
def __init__(self, text, pos):
self.text = text
self.pos = pos
def __str__(self):
return "unfinished expression in %s at char %d" % (
repr(self.text), self.pos)
def matchorfail(text, pos):
match = tokenprog.match(text, pos)
if match is None:
raise ItplError(text, pos)
return match, match.end()
class Itpl:
"""Class representing a string with interpolation abilities.
Upon creation, an instance works out what parts of the format
string are literal and what parts need to be evaluated. The
evaluation and substitution happens in the namespace of the
caller when str(instance) is called."""
def __init__(self, format,codec='utf_8',encoding_errors='backslashreplace'):
"""The single mandatory argument to this constructor is a format
string.
The format string is parsed according to the following rules:
1. A dollar sign and a name, possibly followed by any of:
- an open-paren, and anything up to the matching paren
- an open-bracket, and anything up to the matching bracket
- a period and a name
any number of times, is evaluated as a Python expression.
2. A dollar sign immediately followed by an open-brace, and
anything up to the matching close-brace, is evaluated as
a Python expression.
3. Outside of the expressions described in the above two rules,
two dollar signs in a row give you one literal dollar sign.
Optional arguments:
- codec('utf_8'): a string containing the name of a valid Python
codec.
- encoding_errors('backslashreplace'): a string with a valid error handling
policy. See the codecs module documentation for details.
These are used to encode the format string if a call to str() fails on
the expanded result."""
if not isinstance(format,basestring):
raise TypeError, "needs string initializer"
self.format = format
self.codec = codec
self.encoding_errors = encoding_errors
namechars = "abcdefghijklmnopqrstuvwxyz" \
"ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_";
chunks = []
pos = 0
while 1:
dollar = string.find(format, "$", pos)
if dollar < 0: break
nextchar = format[dollar+1]
if nextchar == "{":
chunks.append((0, format[pos:dollar]))
pos, level = dollar+2, 1
while level:
match, pos = matchorfail(format, pos)
tstart, tend = match.regs[3]
token = format[tstart:tend]
if token == "{": level = level+1
elif token == "}": level = level-1
chunks.append((1, format[dollar+2:pos-1]))
elif nextchar in namechars:
chunks.append((0, format[pos:dollar]))
match, pos = matchorfail(format, dollar+1)
while pos < len(format):
if format[pos] == "." and \
pos+1 < len(format) and format[pos+1] in namechars:
match, pos = matchorfail(format, pos+1)
elif format[pos] in "([":
pos, level = pos+1, 1
while level:
match, pos = matchorfail(format, pos)
tstart, tend = match.regs[3]
token = format[tstart:tend]
if token[0] in "([": level = level+1
elif token[0] in ")]": level = level-1
else: break
chunks.append((1, format[dollar+1:pos]))
else:
chunks.append((0, format[pos:dollar+1]))
pos = dollar + 1 + (nextchar == "$")
if pos < len(format): chunks.append((0, format[pos:]))
self.chunks = chunks
def __repr__(self):
return "<Itpl %s >" % repr(self.format)
def _str(self,glob,loc):
"""Evaluate to a string in the given globals/locals.
The final output is built by calling str(), but if this fails, the
result is encoded with the instance's codec and error handling policy,
via a call to out.encode(self.codec,self.encoding_errors)"""
result = []
app = result.append
for live, chunk in self.chunks:
if live: app(str(eval(chunk,glob,loc)))
else: app(chunk)
out = ''.join(result)
try:
return str(out)
except UnicodeError:
return out.encode(self.codec,self.encoding_errors)
def __str__(self):
"""Evaluate and substitute the appropriate parts of the string."""
# We need to skip enough frames to get to the actual caller outside of
# Itpl.
frame = sys._getframe(1)
while frame.f_globals["__name__"] == __name__: frame = frame.f_back
loc, glob = frame.f_locals, frame.f_globals
return self._str(glob,loc)
class ItplNS(Itpl):
"""Class representing a string with interpolation abilities.
This inherits from Itpl, but at creation time a namespace is provided
where the evaluation will occur. The interpolation becomes a bit more
efficient, as no traceback needs to be extracte. It also allows the
caller to supply a different namespace for the interpolation to occur than
its own."""
def __init__(self, format,globals,locals=None,
codec='utf_8',encoding_errors='backslashreplace'):
"""ItplNS(format,globals[,locals]) -> interpolating string instance.
This constructor, besides a format string, takes a globals dictionary
and optionally a locals (which defaults to globals if not provided).
For further details, see the Itpl constructor."""
if locals is None:
locals = globals
self.globals = globals
self.locals = locals
Itpl.__init__(self,format,codec,encoding_errors)
def __str__(self):
"""Evaluate and substitute the appropriate parts of the string."""
return self._str(self.globals,self.locals)
def __repr__(self):
return "<ItplNS %s >" % repr(self.format)
# utilities for fast printing
def itpl(text): return str(Itpl(text))
def printpl(text): print itpl(text)
# versions with namespace
def itplns(text,globals,locals=None): return str(ItplNS(text,globals,locals))
def printplns(text,globals,locals=None): print itplns(text,globals,locals)
class ItplFile:
"""A file object that filters each write() through an interpolator."""
def __init__(self, file): self.file = file
def __repr__(self): return "<interpolated " + repr(self.file) + ">"
def __getattr__(self, attr): return getattr(self.file, attr)
def write(self, text): self.file.write(str(Itpl(text)))
def filter(file=sys.stdout):
"""Return an ItplFile that filters writes to the given file object.
'file = filter(file)' replaces 'file' with a filtered object that
has a write() method. When called with no argument, this creates
a filter to sys.stdout."""
return ItplFile(file)
def unfilter(ifile=None):
"""Return the original file that corresponds to the given ItplFile.
'file = unfilter(file)' undoes the effect of 'file = filter(file)'.
'sys.stdout = unfilter()' undoes the effect of 'sys.stdout = filter()'."""
return ifile and ifile.file or sys.stdout.file
|
barnsnake351/nova
|
refs/heads/master
|
nova/tests/unit/api/openstack/compute/admin_only_action_common.py
|
69
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import timeutils
from oslo_utils import uuidutils
import webob
from nova.compute import vm_states
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
class CommonMixin(object):
def setUp(self):
super(CommonMixin, self).setUp()
self.compute_api = None
self.req = fakes.HTTPRequest.blank('')
self.context = self.req.environ['nova.context']
def _stub_instance_get(self, uuid=None):
if uuid is None:
uuid = uuidutils.generate_uuid()
instance = fake_instance.fake_instance_obj(self.context,
id=1, uuid=uuid, vm_state=vm_states.ACTIVE,
task_state=None, launched_at=timeutils.utcnow())
self.compute_api.get(self.context, uuid, expected_attrs=None,
want_objects=True).AndReturn(instance)
return instance
def _stub_instance_get_failure(self, exc_info, uuid=None):
if uuid is None:
uuid = uuidutils.generate_uuid()
self.compute_api.get(self.context, uuid, expected_attrs=None,
want_objects=True).AndRaise(exc_info)
return uuid
def _test_non_existing_instance(self, action, body_map=None):
uuid = uuidutils.generate_uuid()
self._stub_instance_get_failure(
exception.InstanceNotFound(instance_id=uuid), uuid=uuid)
self.mox.ReplayAll()
controller_function = getattr(self.controller, action)
self.assertRaises(webob.exc.HTTPNotFound,
controller_function,
self.req, uuid, body=body_map)
# Do these here instead of tearDown because this method is called
# more than once for the same test case
self.mox.VerifyAll()
self.mox.UnsetStubs()
def _test_action(self, action, body=None, method=None,
compute_api_args_map=None):
if method is None:
method = action.replace('_', '')
compute_api_args_map = compute_api_args_map or {}
instance = self._stub_instance_get()
args, kwargs = compute_api_args_map.get(action, ((), {}))
getattr(self.compute_api, method)(self.context, instance, *args,
**kwargs)
self.mox.ReplayAll()
controller_function = getattr(self.controller, action)
res = controller_function(self.req, instance.uuid, body=body)
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if self._api_version == '2.1':
status_int = controller_function.wsgi_code
else:
status_int = res.status_int
self.assertEqual(202, status_int)
# Do these here instead of tearDown because this method is called
# more than once for the same test case
self.mox.VerifyAll()
self.mox.UnsetStubs()
def _test_not_implemented_state(self, action, method=None):
if method is None:
method = action.replace('_', '')
instance = self._stub_instance_get()
body = {}
compute_api_args_map = {}
args, kwargs = compute_api_args_map.get(action, ((), {}))
getattr(self.compute_api, method)(self.context, instance,
*args, **kwargs).AndRaise(
NotImplementedError())
self.mox.ReplayAll()
controller_function = getattr(self.controller, action)
self.assertRaises(webob.exc.HTTPNotImplemented,
controller_function,
self.req, instance.uuid, body=body)
# Do these here instead of tearDown because this method is called
# more than once for the same test case
self.mox.VerifyAll()
self.mox.UnsetStubs()
def _test_invalid_state(self, action, method=None, body_map=None,
compute_api_args_map=None,
exception_arg=None):
if method is None:
method = action.replace('_', '')
if body_map is None:
body_map = {}
if compute_api_args_map is None:
compute_api_args_map = {}
instance = self._stub_instance_get()
args, kwargs = compute_api_args_map.get(action, ((), {}))
getattr(self.compute_api, method)(self.context, instance,
*args, **kwargs).AndRaise(
exception.InstanceInvalidState(
attr='vm_state', instance_uuid=instance.uuid,
state='foo', method=method))
self.mox.ReplayAll()
controller_function = getattr(self.controller, action)
ex = self.assertRaises(webob.exc.HTTPConflict,
controller_function,
self.req, instance.uuid,
body=body_map)
self.assertIn("Cannot \'%(action)s\' instance %(id)s"
% {'action': exception_arg or method,
'id': instance.uuid}, ex.explanation)
# Do these here instead of tearDown because this method is called
# more than once for the same test case
self.mox.VerifyAll()
self.mox.UnsetStubs()
def _test_locked_instance(self, action, method=None, body=None,
compute_api_args_map=None):
if method is None:
method = action.replace('_', '')
compute_api_args_map = compute_api_args_map or {}
instance = self._stub_instance_get()
args, kwargs = compute_api_args_map.get(action, ((), {}))
getattr(self.compute_api, method)(self.context, instance, *args,
**kwargs).AndRaise(
exception.InstanceIsLocked(instance_uuid=instance.uuid))
self.mox.ReplayAll()
controller_function = getattr(self.controller, action)
self.assertRaises(webob.exc.HTTPConflict,
controller_function,
self.req, instance.uuid, body=body)
# Do these here instead of tearDown because this method is called
# more than once for the same test case
self.mox.VerifyAll()
self.mox.UnsetStubs()
def _test_instance_not_found_in_compute_api(self, action,
method=None, body=None, compute_api_args_map=None):
if method is None:
method = action.replace('_', '')
compute_api_args_map = compute_api_args_map or {}
instance = self._stub_instance_get()
args, kwargs = compute_api_args_map.get(action, ((), {}))
getattr(self.compute_api, method)(self.context, instance, *args,
**kwargs).AndRaise(
exception.InstanceNotFound(instance_id=instance.uuid))
self.mox.ReplayAll()
controller_function = getattr(self.controller, action)
self.assertRaises(webob.exc.HTTPNotFound,
controller_function,
self.req, instance.uuid, body=body)
# Do these here instead of tearDown because this method is called
# more than once for the same test case
self.mox.VerifyAll()
self.mox.UnsetStubs()
class CommonTests(CommonMixin, test.NoDBTestCase):
def _test_actions(self, actions, method_translations=None, body_map=None,
args_map=None):
method_translations = method_translations or {}
body_map = body_map or {}
args_map = args_map or {}
for action in actions:
method = method_translations.get(action)
body = body_map.get(action)
self.mox.StubOutWithMock(self.compute_api,
method or action.replace('_', ''))
self._test_action(action, method=method, body=body,
compute_api_args_map=args_map)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
def _test_actions_instance_not_found_in_compute_api(self,
actions, method_translations=None, body_map=None,
args_map=None):
method_translations = method_translations or {}
body_map = body_map or {}
args_map = args_map or {}
for action in actions:
method = method_translations.get(action)
body = body_map.get(action)
self.mox.StubOutWithMock(self.compute_api,
method or action.replace('_', ''))
self._test_instance_not_found_in_compute_api(
action, method=method, body=body,
compute_api_args_map=args_map)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
def _test_actions_with_non_existed_instance(self, actions, body_map=None):
body_map = body_map or {}
for action in actions:
self._test_non_existing_instance(action,
body_map=body_map)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
def _test_actions_raise_conflict_on_invalid_state(
self, actions, method_translations=None, body_map=None,
args_map=None, exception_args=None):
method_translations = method_translations or {}
body_map = body_map or {}
args_map = args_map or {}
exception_args = exception_args or {}
for action in actions:
method = method_translations.get(action)
exception_arg = exception_args.get(action)
self.mox.StubOutWithMock(self.compute_api,
method or action.replace('_', ''))
self._test_invalid_state(action, method=method,
body_map=body_map,
compute_api_args_map=args_map,
exception_arg=exception_arg)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
def _test_actions_with_locked_instance(self, actions,
method_translations=None,
body_map=None, args_map=None):
method_translations = method_translations or {}
body_map = body_map or {}
args_map = args_map or {}
for action in actions:
method = method_translations.get(action)
body = body_map.get(action)
self.mox.StubOutWithMock(self.compute_api,
method or action.replace('_', ''))
self._test_locked_instance(action, method=method, body=body,
compute_api_args_map=args_map)
# Re-mock this.
self.mox.StubOutWithMock(self.compute_api, 'get')
|
akintoey/django
|
refs/heads/master
|
tests/admin_checks/__init__.py
|
12133432
| |
vivekanand1101/neutron
|
refs/heads/master
|
neutron/db/__init__.py
|
12133432
| |
asimshankar/tensorflow
|
refs/heads/master
|
tensorflow/examples/adding_an_op/__init__.py
|
12133432
| |
google-code/android-scripting
|
refs/heads/master
|
python/src/Lib/bsddb/test/__init__.py
|
12133432
| |
niwinz/tornado-webtools
|
refs/heads/master
|
tests/template/__init__.py
|
12133432
| |
simbs/edx-platform
|
refs/heads/master
|
openedx/core/djangoapps/content/course_structures/api/v0/__init__.py
|
12133432
| |
MoritzS/django
|
refs/heads/master
|
tests/view_tests/app4/__init__.py
|
12133432
| |
janocat/odoo
|
refs/heads/8.0
|
addons/marketing_campaign/__init__.py
|
380
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import marketing_campaign
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
crccheck/atx-bandc
|
refs/heads/master
|
bandc/apps/agenda/management/__init__.py
|
12133432
| |
mbauskar/sapphire-erpnext
|
refs/heads/master
|
erpnext/support/doctype/warranty_claim/__init__.py
|
12133432
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.