code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
#EMAIL SERVER SETTINGS
CWDPAGEEMAIL = 'cronwatchdog@yourdomain.com' #CWD SEND email address
CWDDOMAIN = 'yourdomain.com' #SMTP SSL DOMAIN.
SMTPSERVER = 'yoursmtpserver.com' #SMTP SERVER
SMTPUSER = 'yoursmtpuser' #SMTP User if authentication/SSL is needed
SMTPPORT = '123' #SMTP Port
SMTPPASSWORD = 'yourpassword' #SMTP Authentication
SMTPSSL = True #SMTP Uses SSL and authenticates?
#SQLALCHEMY DATABASE URI: MySQL and SQLITE have been tested. Others should work.
DATABASEURI = 'sqlite:///cwd.db'
#SUPER SECRET ACCESS KEY
#MUST MATCH THE KEY in cwd_status push.
API_KEY='YOURSECRETKEY'
#GUI USER/PASSWORD
ADMINUSER='admin'
ADMINPASS='changeme' | tmkdev/cwd | configuration_dist.py | Python | gpl-2.0 | 724 |
import os
import glob
import numpy
import pyfits
import warnings
import select
import logging
import subprocess
import time
import SPARTATools
import scipy
import matplotlib.pyplot as pyplot
class Derotator( object ):
def __init__(self, parent):
self.angle = 0.0
self.encoder = 0
self.parent = parent
def initialize(self):
command = ""
self.parent.sendCommand(command)
def moveToAngle(self, angle):
command = "msgSend -n wci1ao ciiControl SETUP \" -function INS.DROT.ENC "+str(angle*1000)+" \""
self.parent.sendCommand(command)
self.angle = angle
self.encoder = angle*1000 % 360000
def moveAngleRel(self, deltaAngle):
command = "msgSend -n wci1ao ciiControl SETUP \" -function INS.DROT.ENCREL "+str(deltaAngle*1000)+" \""
self.parent.sendCommand(command)
self.angle += deltaAngle
self.angle %= 360.0
self.encoder += deltaAngle*1000
self.encoder %= 360000
class ParabolicMirror( object ):
def __init__(self, parent):
self.Tip = 0.0
self.Tilt = 0.0
self.parent = parent
def initialize(self, Tip, Tilt):
self.Tip = Tip
self.Tilt = Tilt
def moveToTip(self, Tip):
command = "msgSend -n wci1ao ciiControl SETUP \" -function INS.PMTIP.ENC "+str(Tip)+" \""
self.parent.sendCommand(command)
self.Tip = Tip
def moveToTilt(self, Tilt):
command = "msgSend -n wci1ao ciiControl SETUP \" -function INS.PMTIL.ENC "+str(Tilt)+" \""
self.parent.sendCommand(command)
self.Tilt = Tilt
class FieldLens( object ):
def __init__(self, parent):
self.x = 0.0
self.y = 0.0
self.parent = parent
def initialize(self, x, y):
self.x = x
self.y = y
def moveToX(self, x):
dx = x - self.x
self.x = x
command = "msgSend -n wci1ao ciiControl SETUP \" -function INS.FLDL.DX "+str(dx)+" INS.FLDL.DY 0 \""
self.parent.sendCommand(command)
"""
newY = self.y
while newX != x:
difference = x - newX
sign = numpy.abs(difference)/difference
stepsize = sign*min( 10.0, numpy.abs(difference))
command = "msgSend -n wci1ao ciiControl SETUP \" -function INS.FLDL.DX "+str(stepsize)+" INS.FLDL.DY 0 \""
#print command
self.parent.sendCommand(command)
newX += stepsize
self.x = newX
"""
def moveToY(self, y):
dy = y - self.y
self.y = y
command = "msgSend -n wci1ao ciiControl SETUP \" -function INS.FLDL.DX 0 INS.FLDL.DY "+str(dy)+" \""
self.parent.sendCommand(command)
"""
newY = self.y
while newY != y:
difference = y - newY
sign = numpy.abs(difference)/difference
stepsize = sign*min(10.0, numpy.abs(difference))
command = "msgSend -n wci1ao ciiControl SETUP \" -function INS.FLDL.DX 0 INS.FLDL.DY "+str(stepsize)+" \""
#print command
self.parent.sendCommand(command)
newY += stepsize
self.y = newY
"""
class VLTConnection( object ):
"""
VLTConnection: This object allows python to log into a computer
running the VLT SPARTA Light software and do the following:
- Send a new flat pattern to the DM
- Retrieve data from the RTC (slopes, intensities, etc...)
- what else?
"""
def __init__(self, simulate=True, datapath=None):
if datapath:
self.datapath = datapath
else:
self.datapath = os.path.expanduser('~')+'/data/'
self.CDMS = CDMS()
self.sim = simulate
self.modalBasis = None
logging.basicConfig(level=logging.DEBUG)
self.fieldLens = FieldLens(self)
self.derotator = Derotator(self)
self.PM = ParabolicMirror(self)
def simulate(self):
self.sim = True
def goLive(self):
self.sim = False
def sendCommand(self, command, response=False):
if not(self.sim):
#logging.debug("Executing '%s'" % command)
results = subprocess.check_output(command, shell=True)
if response:
return results
else:
print("In Simulation mode. I would have executed the following command")
print("'%s'" % command)
if response:
return "SIMULATION"
def parse(self, text, type):
lines = text.split('\n')
retval = numpy.array(lines[1].split(','), dtype=type)
return retval
def applyPAF(self, paf):
name = paf.name
for key in paf.parameters.keys():
par = paf.parameters[key]
if isinstance(par, numpy.int):
flag = '-i'
elif isinstance(par, numpy.float):
flag = '-d'
else:
flag = '-s'
command = "cdmsSetProp "+name+" "+key+" "+flag+" "+str(par)
self.sendCommand(command)
def saveMap(self, mapname, filename):
localfile = self.datapath+filename
command = "cdmsSave -f "+localfile+" "+mapname
self.sendCommand(command)
def updateMap(self, mapname):
localfile = self.datapath+self.CDMS.maps[mapname].outfile
command = "cdmsSave -f "+localfile+" "+mapname
self.sendCommand(command)
self.CDMS.maps[mapname].load(localfile)
def transmitMap(self, mapname, update=None):
localfile = self.datapath+self.CDMS.maps[mapname].outfile
self.CDMS.maps[mapname].write(path=self.datapath)
command = "cdmsLoad -f "+localfile+" "+mapname+" --rename"
self.sendCommand(command)
if update:
command = "msgSend \"\" spaccsServer EXEC \" -command "+update+".update ALL\""
self.sendCommand(command)
def zeroMap(self, mapName):
self.CDMS.maps[mapName].scale(0.0)
self.transmitMap(mapName)
def mirrorCDMS(self):
for mapname in self.CDMS.maps.keys():
self.updateMap(mapname)
def applyZernike(self, coeffs):
self.updateMap("HOCtr.ACT_POS_REF_MAP")
offsets = self.modalBasis.getZernikeOffsets(coeffs)
self.CDMS.maps["HOCtr.ACT_POS_REF_MAP"].delta(offsets)
self.transmitMap("HOCtr.ACT_POS_REF_MAP", update="HOCtr")
def set_Tip(self, tip):
self.CDMS.maps['TTCtr.ACT_POS_REF_MAP'].data[0][0] = tip
self.transmitMap('TTCtr.ACT_POS_REF_MAP', update='TTCtr')
#self.sendCommand("msgSend \"\" CDMSGateway SETMAP \"-object TTCtr.ACT_POS_REF_MAP -function 0,0= "+str("%.3g" % tip)+"\"")
#self.sendCommand("msgSend \"\" spaccsServer EXEC \"-command TTCtr.update ALL\"")
def set_Tilt(self, tilt):
self.CDMS.maps['TTCtr.ACT_POS_REF_MAP'].data[0][1] = tilt
self.transmitMap('TTCtr.ACT_POS_REF_MAP', update='TTCtr')
#self.sendCommand("msgSend \"\" CDMSGateway SETMAP \"-object TTCtr.ACT_POS_REF_MAP -function 0,1= "+str("%.3g" % tilt)+"\"")
#self.sendCommand("msgSend \"\" spaccsServer EXEC \"-command TTCtr.update ALL\"")
def get_TipTilt(self):
tip = self.sendCommand("msgSend \"\" CDMSGateway GETMAP \"-object TTCtr.ACT_POS_REF_MAP -function 0,0 1,0\"", response=True)
return self.parse(tip, numpy.float32)
def get_Tip(self):
tip = self.sendCommand("msgSend \"\" CDMSGateway GETMAP \"-object TTCtr.ACT_POS_REF_MAP -function 0,0\"", response=True)
return self.parse(tip, numpy.float32)
def get_Tilt(self):
tilt = self.sendCommand("msgSend \"\" CDMSGateway GETMAP \"-object TTCtr.ACT_POS_REF_MAP -function 1,0\"", response=True)
return self.parse(tilt, numpy.float32)
def set_TT_gain(self, gain):
self.sendCommand("msgSend \"\" CDMSGateway SETMAP \"-object TTCtr.TERM_B -function 0,0="+str("%.2g" % gain)+"\"")
self.sendCommand("msgSend \"\" spaccsServer EXEC \"-command TTCtr.update ALL\"")
def set_HO_gain(self, gain):
self.sendCommand("msgSend \"\" CDMSGateway SETMAP \"-object HOCtr.TERM_B -function 0,0="+str("%.2g" % gain)+"\"")
self.sendCommand("msgSend \"\" spaccsServer EXEC \"-command HOCtr.update ALL\"")
"""
def calc_CommandMatrix(self, nFiltModes=20):
self.CDMS.maps['Recn.REC1.CM'].replace(
SPARTATools.calculateCommandMatrix(
self.CDMS.maps['HORecnCalibrat.RESULT_IM'],
self.CDMS.maps['TTRecnCalibrat.RESULT.IM'],
nFiltModes))
#"""
#"""
def getTTFocus(self):
TT = self.sendCommand("msgSend \"\" spaccsServer EXEC \" -command LoopMonitor.measureTipTilt 10 \"", response=True)
TT = self.parse(TT, numpy.float32)
focus = self.sendCommand("msgSend \"\" spaccsServer EXEC \" -command LoopMonitor.measureFocus 10 \"", response=True)
focus = self.parse(focus, numpy.float32)
return numpy.append(TT, focus)
def getIntensities(self):
self.sendCommand("msgSend \"\" spaccsServer EXEC \" -command AcqOptimiser.measureFrames 10 \"", response=False)
return intensities
def calc_CommandMatrix(self, nFiltModes=20):
self.modalBasis = SPARTATools.modalBasis(self.CDMS.maps['HORecnCalibrat.RESULT_IM'].data, self.CDMS.maps['TTRecnCalibrat.RESULT.IM'].data, nFiltModes)
self.modalBasis.computeSystemControlMatrix()
self.CDMS.maps['Recn.REC1.CM'].replace(self.modalBasis.CM)
self.CDMS.maps['HOCtr.TT_TO_HO'].replace(self.modalBasis.TT2HO)
self.CDMS.maps['HOCtr.HO_TO_TT'].replace(self.modalBasis.HO2TT)
self.CDMS.maps['HOCtr.SMA_BASIS'].replace(self.modalBasis.SMAbasis)
#self.CDMS.maps['HOCtr.AWF_IM_KERNEL'].replace(self.modalBasis.AWFbasis)
self.CDMS.maps['HOCtr.PRA_PISTON_MODE'].replace(self.modalBasis.pistonMode)
self.CDMS.maps['HOCtr.PRA_PISTON_PROJECTION'].replace(self.modalBasis.pistonProj)
self.CDMS.maps['LoopMonitor.SLOPES2FOCUS'].replace(self.modalBasis.S2Z[2,:])
self.CDMS.maps['LoopMonitor.SLOPES2TT'].replace(self.modalBasis.S2Z[0:2,:])
self.CDMS.maps['LoopMonitor.SLOPES2FOCUS'].write()
self.CDMS.maps['LoopMonitor.SLOPES2TT'].write()
self.transmitMap('HOCtr.TT_TO_HO', update='HOCtr')
self.transmitMap('HOCtr.HO_TO_TT', update='HOCtr')
self.transmitMap('Recn.REC1.CM', update='Recn')
self.transmitMap('HOCtr.SMA_BASIS', update='HOCtr')
#self.transmitMap('HOCtr.AWF_IM_KERNEL', update='HOCtr')
self.transmitMap('HOCtr.PRA_PISTON_MODE', update='HOCtr')
self.transmitMap('HOCtr.PRA_PISTON_PROJECTION', update='HOCtr')
self.transmitMap('LoopMonitor.SLOPES2FOCUS', update='LoopMonitor')
self.transmitMap('LoopMonitor.SLOPES2TT', update='LoopMonitor')
self.CDMS.maps['Recn.REC1.CM'].replace(self.modalBasis.CM)
#"""
def dumpCommandMatrix(self, nFiltModes=20):
self.modalBasis = SPARTATools.modalBasis(self.CDMS.maps['HORecnCalibrat.RESULT_IM'].data, self.CDMS.maps['TTRecnCalibrat.RESULT.IM'].data, nFiltModes)
self.modalBasis.computeSystemControlMatrix()
hdu = pyfits.PrimaryHDU(numpy.array(self.modalBasis.M2V, dtype=numpy.float32))
hdu.writeto("M2V.fits", clobber=True)
hdu = pyfits.PrimaryHDU(numpy.array(self.modalBasis.TT2HO, dtype=numpy.float32))
hdu.writeto("TT2HO.fits", clobber=True)
hdu = pyfits.PrimaryHDU(numpy.array(self.modalBasis.S2M, dtype=numpy.float32))
hdu.writeto("S2M.fits", clobber=True)
hdu = pyfits.PrimaryHDU(numpy.array(self.modalBasis.S2Z, dtype=numpy.float32))
hdu.writeto("S2Z.fits", clobber=True)
zeroCM = numpy.zeros(self.modalBasis.CM.shape, dtype=numpy.float32)
hdu = pyfits.PrimaryHDU(self.modalBasis.CM)
hdu.writeto("CM.fits", clobber=True)
zeroCM = numpy.zeros(62, dtype=numpy.int)
hdu = pyfits.PrimaryHDU(zeroCM)
hdu.writeto("RTC_HODM_UNUSED_ACT_MAP.fits", clobber=True)
#zeroCM = numpy.zeros(self.modalBasis.CM.shape, dtype=numpy.float32)
#hdu = pyfits.PrimaryHDU(zeroCM)
hdu.writeto("HODM_SLUG_ACT_GAINS.fits", clobber=True)
CM = numpy.dot(self.modalBasis.M2V, self.modalBasis.S2M)
tiltHO = numpy.dot(self.modalBasis.TT2HO, CM[60:62,:])
aux_proj = numpy.zeros((62, 136), dtype=numpy.float32)
aux_proj[0:60,:] += tiltHO
hdu = pyfits.PrimaryHDU(aux_proj)
hdu.writeto("CLMatrixOptimiser.AUX_PROJ.fits", clobber=True)
self.CDMS.maps["RTC.MODAL_GAINS"].write("RTC.MODAL_GAINS.fits")
def updateRefSlopes(self, x, y):
slopes = numpy.zeros(136)
slopes[0::2] += x
slopes[1::2] += y
self.CDMS.maps["Acq.DET1.REFSLP"].replace(slopes)
self.transmitMap("Acq.DET1.REFSLP", update='Acq')
def replaceSlopesWithCurrent(self, rec5rdingName="BetaPic"):
self.measureCircularBuffer(recordingName=recordingName)
outfile="gradients.fits"
time.sleep(2.0)
SPARTATools.computeGradients(outfile, recordingName)
self.updateReferenceSlopes(outfile)
def updateReferenceSlopes(self, filename):
self.CDMS.maps["Acq.DET1.REFSLP"].load(filename)
self.transmitMap("Acq.DET1.REFSLP", update='Acq')
def measureCircularBuffer(self, recordingName="Arcturus", nframes=100):
command = "msgSend \"\" spaccsServer SETUP \"-function LoopRecorder.FILE_BASENAME "+recordingName+"\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer SETUP \"-function LoopRecorder.FILE_DIRNAME "+self.datapath+"\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer SETUP \"-function LoopRecorder.REQUESTED_FRAMES "+str(nframes)+"\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer EXEC \" -command LoopRecorder.run\""
self.sendCommand(command)
print "Took Circular Buffer"
def measurePixelFrames(self, recordingName="Arcturus", nframes=100):
command = "msgSend \"\" spaccsServer SETUP \"-function PixelRecorder.FILE_BASENAME "+recordingName+"\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer SETUP \"-function PixelRecorder.FILE_DIRNAME "+self.datapath+"\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer SETUP \"-function PixelRecorder.REQUESTED_FRAMES "+str(nframes)+"\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer EXEC \" -command PixelRecorder.run\""
self.sendCommand(command)
print "Took Pixel Frame"
def measureNewHORefPositions(self, recordingName='TWHydra', nframes=25):
command = "msgSend \"\" spaccsServer EXEC \" -command TTCtr.openLoop\""
self.sendCommand(command)
time.sleep(2.0)
command = "msgSend \"\" spaccsServer EXEC \" -command HOCtr.closeLoop\""
self.sendCommand(command)
time.sleep(5.0)
"""
command = "msgSend \"\" CommandGateway EXEC \" LoopMonitor.measureRefDMPos "+str(nframes)+"\""
self.sendCommand(command)
#"""
print "Recording Frames"
self.measureCircularBuffer(recordingName=recordingName)
time.sleep(2.0)
#"""
print "Opening Loop"
command = "msgSend \"\" spaccsServer EXEC \" -command HOCtr.openLoop\""
self.sendCommand(command)
time.sleep(2.0)
self.averageHOPositions(recordingName)
def measureNewTTRefPositions(self, recordingName='TWHydra', nframes=25):
command = "msgSend \"\" spaccsServer EXEC \" -command HOCtr.openLoop\""
self.sendCommand(command)
time.sleep(2.0)
command = "msgSend \"\" spaccsServer EXEC \" -command TTCtr.closeLoop\""
self.sendCommand(command)
time.sleep(5.0)
"""
command = "msgSend \"\" CommandGateway EXEC \" LoopMonitor.measureRefTTMPos "+str(nframes)+"\""
self.sendCommand(command)
#"""
print "Recording Frames"
self.measureCircularBuffer(recordingName=recordingName)
time.sleep(2.0)
#"""
print "Opening Loop"
command = "msgSend \"\" spaccsServer EXEC \" -command TTCtr.openLoop\""
self.sendCommand(command)
time.sleep(2.0)
self.averageTTPositions(recordingName)
def averageActuatorPositions(self, recordingName):
outfile= self.datapath+"new_flat.fits"
SPARTATools.computeNewBestFlat(outfile, self.datapath, recordingName)
command = "cdmsLoad -f "+outfile+" HOCtr.ACT_POS_REF_MAP --rename"
self.sendCommand(command)
def averageTTPositions(self, recordingName):
outfile=self.datapath+"new_TT_flat.fits"
SPARTATools.computeNewTTFlat(outfile, self.datapath, recordingName)
command = "cdmsLoad -f "+outfile+" TTCtr.ACT_POS_REF_MAP --rename"
self.sendCommand(command)
command = "msgSend \"\" spaccsServer EXEC \" -command TTCtr.update ALL\""
self.sendCommand(command)
def averageHOPositions(self, recordingName):
outfile=self.datapath+"new_HO_flat.fits"
SPARTATools.computeNewHOFlat(outfile, self.datapath, recordingName)
command = "cdmsLoad -f "+outfile+" HOCtr.ACT_POS_REF_MAP --rename"
self.sendCommand(command)
command = "msgSend \"\" spaccsServer EXEC \" -command HOCtr.update ALL\""
self.sendCommand(command)
def averageIntensities(self):
outfile=self.datapath+"averageIntensities.fits"
SPARTATools.computeIntensities(outfile)
def set_CommandMatrix(self):
#self.transmitMap('Recn.REC1.CM')
self.transmitMap('Recn.REC1.CM', update='Recn')
def save_CommandMatrixPlot(self):
self.updateMap("Recn.REC1.CM")
fig = pyplot.figure(0)
ax = fig.add_axes([0.1, 0.1, 0.8, 0.8])
ax.imshow(self.CDMS.maps['Recn.REC1.CM'].data)
fig.savefig("CM.png")
def disturb(self, disturbance="disturbance.fits", baseName = "TWHydra"):
command = "msgSend \"\" spaccsServer SETUP \"-function LoopRecorder.FILE_DIRNAME "+self.datapath+"\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer SETUP \"-function PixelRecorder.FILE_DIRNAME "+self.datapath+"\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer SETUP \"-function PixelRecorder.FILE_BASENAME "+baseName+"_pixels\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer SETUP \"-function LoopRecorder.FILE_BASENAME "+baseName+"_frames\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer SETUP \"-function LoopRecorder.REQUESTED_FRAMES 0\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer SETUP \"-function PixelRecorder.REQUESTED_FRAMES 0\""
self.sendCommand(command)
self.CDMS.paf["HOCtrDisturb.CFG.DYNAMIC"].update("FILENAME", self.datapath+"DM"+disturbance)
self.CDMS.paf["TTCtrDisturb.CFG.DYNAMIC"].update("FILENAME", self.datapath+"TTM"+disturbance)
startTime = numpy.int(time.time()+20)
self.CDMS.paf["HOCtrDisturb.CFG.DYNAMIC"].update("START_AT", startTime)
self.CDMS.paf["TTCtrDisturb.CFG.DYNAMIC"].update("START_AT", startTime)
self.applyPAF(self.CDMS.paf["HOCtrDisturb.CFG.DYNAMIC"])
self.applyPAF(self.CDMS.paf["TTCtrDisturb.CFG.DYNAMIC"])
command="msgSend \"\" spaccsServer EXEC \"-command LoopRecorder.run\""
self.sendCommand(command)
command="msgSend \"\" spaccsServer EXEC \"-command PixelRecorder.run\""
self.sendCommand(command)
command="msgSend \"\" spaccsServer EXEC \" -command HOCtrDisturb.run \""
#command = "spaciaortdfwDisturbPubl -d HODM -f " + self.datapath+disturbance
self.sendCommand(command)
command="msgSend \"\" spaccsServer EXEC \" -command TTCtrDisturb.run\""
self.sendCommand(command)
command = "dbRead \"<alias>SPARTA:HOCtrDisturb.percent_complete\""
complete = 0
time.sleep(5.0)
while complete < 100:
wait = self.sendCommand(command, response=True)
complete = numpy.float(wait.split()[-1])
print complete
time.sleep(5.0)
command="msgSend \"\" spaccsServer EXEC \"-command LoopRecorder.idle\""
self.sendCommand(command)
command="msgSend \"\" spaccsServer EXEC \"-command PixelRecorder.idle\""
self.sendCommand(command)
def disturbHO(self, disturbType="SINE", rng=0.5, max=0.95, period=40.0, actNum=5):
fname = self.datapath+"Disturbances/disturbanceFrame.fits"
SPARTATools.computeHODisturbanceFrame(20000,fname, rng=rng, max=max, disturbType=disturbType, period=period, actNum=actNum)
command = "spaciaortdfwDisturbPubl -d HODM -f "+fname
self.sendCommand(command)
def disturbTT(self, tip=0.05, tilt=0.05, waveshape='SQUARE'):
fname = self.datapath+"disturbanceTTFrame.fits"
SPARTATools.computeTTDisturbanceFrame(1000, fname, tip, tilt, waveshape=waveshape)
command = "spaciaortdfwDisturbPubl -d ITTM -f "+fname+" -m 1000"
self.sendCommand(command)
def measure_HOIM(self, config=None):
if config:
self.applyPAF(self.CDMS.paf["HORecnCalibrat.CFG.DYNAMIC"])
#command = "msgSend \"\" spaccsServer EXEC \" -command HOCtrUpload.run\""
#self.sendCommand(command)
command = "msgSend \"\" spaccsServer EXEC \" -command HORecnCalibrat.update ALL\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer EXEC \" -command HORecnCalibrat.run\""
self.sendCommand(command)
#command = "msgSend \"\" spaccsServer EXEC \" -command HORecnCalibrat.waitIdle\""
command = "dbRead \"<alias>SPARTA:HORecnCalibrat.percent_complete\""
complete = 0
time.sleep(5.0)
while complete < 100:
wait = self.sendCommand(command, response=True)
complete = numpy.float(wait.split()[-1])
#print complete
time.sleep(5.0)
def moveFieldLens(self, x, y):
#Move in X
self.fieldLens.moveToX(x)
#Move in Y
self.fieldLens.moveToY(y)
def movePM(self, Tip, Tilt):
#Move in X
self.PM.moveToTip(Tip)
#Move in Y
self.PM.moveToTilt(Tilt)
def moveDerotator(self, angle):
#Move to angle
self.derotator.moveToAngle(angle)
def measure_TTIM(self, config=None):
if config:
self.applyPAF(self.CDMS.paf["TTRecnCalibrat.CFG.DYNAMIC"])
#command = "msgSend \"\" spaccsServer EXEC \" -command HOCtrUpload.run\""
#self.sendCommand(command)
command = "msgSend \"\" spaccsServer EXEC \" -command TTRecnCalibrat.update ALL\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer EXEC \" -command TTRecnCalibrat.run\""
self.sendCommand(command)
#command = "msgSend \"\" spaccsServer EXEC \" -command HORecnCalibrat.waitIdle\""
command = "dbRead \"<alias>SPARTA:TTRecnCalibrat.percent_complete\""
complete = 0
time.sleep(5.0)
while complete < 100:
wait = self.sendCommand(command, response=True)
complete = numpy.float(wait.split()[-1])
#print complete
time.sleep(5.0)
def setup_HOIM(self, amplitude=1.0, noise=0.05, skip=0.05,
period=0.2, mode_cycles=1, cycles=3):
self.CDMS.paf["HORecnCalibrat.CFG.DYNAMIC"].update("ACTUATION_MATRIX", "HORecnCalibrat.USER_60")
self.CDMS.paf["HORecnCalibrat.CFG.DYNAMIC"].update("ACTUATION_MATRIX_INV", "HORecnCalibrat.USER_INV_60")
self.CDMS.paf["HORecnCalibrat.CFG.DYNAMIC"].update("TIME_UNIT",
"SECONDS")
self.CDMS.paf["HORecnCalibrat.CFG.DYNAMIC"].update("WAVE_PERIOD",
period)
self.CDMS.paf["HORecnCalibrat.CFG.DYNAMIC"].update("AMPLITUDE",
amplitude)
self.CDMS.paf["HORecnCalibrat.CFG.DYNAMIC"].update("NOISE_THRESHOLD",
noise)
self.CDMS.paf["HORecnCalibrat.CFG.DYNAMIC"].update("SKIP_TIME", skip)
self.CDMS.paf["HORecnCalibrat.CFG.DYNAMIC"].update("CYCLES", cycles)
self.CDMS.paf["HORecnCalibrat.CFG.DYNAMIC"].update("MODE_CYCLES", mode_cycles)
def setup_TTIM(self, amplitude=0.05, skip=0.2,
period=2.0, mode_cycles=1, cycles=3):
self.CDMS.paf["TTRecnCalibrat.CFG.DYNAMIC"].update("TIME_UNIT",
"SECONDS")
self.CDMS.paf["TTRecnCalibrat.CFG.DYNAMIC"].update("WAVE_PERIOD",
period)
self.CDMS.paf["TTRecnCalibrat.CFG.DYNAMIC"].update("AMPLITUDE",
amplitude)
self.CDMS.paf["TTRecnCalibrat.CFG.DYNAMIC"].update("SKIP_TIME", skip)
self.CDMS.paf["TTRecnCalibrat.CFG.DYNAMIC"].update("CYCLES", cycles)
self.CDMS.paf["TTRecnCalibrat.CFG.DYNAMIC"].update("MODE_CYCLES", mode_cycles)
self.CDMS.paf["TTRecnCalibrat.CFG.DYNAMIC"].update("INTERACTION_MATRIX", 'TTRecnCalibrat.RESULT.IM')
def get_HOIM(self):
self.updateMap('HORecnCalibrat.RESULT_IM')
def get_InteractionMatrices(self):
self.updateMap('HORecnCalibrat.RESULT_IM')
self.updateMap('TTRecnCalibrat.RESULT.IM')
def set_InteractionMatrices(self, HOIM_file, TTIM_file):
self.CDMS.maps["HORecnCalibrat.RESULT_IM"].load(HOIM_file)
self.CDMS.maps["TTRecnCalibrat.RESULT.IM"].load(TTIM_file)
#self.transmitMap("HORecnCalibrat.RESULT_IM")
#self.transmitMap("TTRecnCalibrat.RESULT.IM")
def changePixelTapPoint(self, tp):
try:
if tp == "RAW":
pass
elif tp == "CALIB":
pass
elif tp == "BACKGROUND":
pass
else:
print("Error! Unrecognized tap point!")
escape
command="cdmsSetProp Acq.CFG.DYNAMIC DET1.PIXEL_TAP -s \""+tp+"\""
self.sendCommand(command)
command = "msgSend \"\" spaccsServer EXEC \" -command Acq.update ALL\""
self.sendCommand(command)
except:
print("Error! Invalid tap point!")
def measureBackground(self, nframes):
self.changePixelTapPoint("RAW")
command="msgSend \"\" CommandGateway EXEC \"AcqOptimiser.measureBackground "+str(nframes)+"\""
self.sendCommand(command)
self.updateMap('Acq.DET1.BACKGROUND')
self.changePixelTapPoint("CALIB")
class CDMS_Map( object ):
def __init__(self, name, ax1, ax2, dtype, filltype, bscale):
if dtype == "float32":
self.dtype = numpy.float32
elif dtype == "float16":
self.dtype = numpy.float16
elif dtype == "int32":
self.dtype = numpy.int32
elif dtype == "int16":
self.dtype = numpy.int16
else:
print "Error!"
if filltype == 0.0:
self.data = numpy.zeros((ax1, ax2), dtype=self.dtype)
elif filltype >= 1.0:
self.data = numpy.ones((ax1, ax2), dtype=self.dtype)*filltype
elif filltype == -1.0:
self.data = numpy.arange(ax1, dtype=self.dtype)
else:
print "Error! I can't understand the fill type!"
self.data_template = self.data.copy()
self.bscale = bscale
self.outfile = name+'.fits'
def replace(self, newmap):
self.data = self.dtype(newmap).copy()
def load(self, file):
self.data = pyfits.getdata(file)
def revert(self):
self.data = self.data_template.copy()
def delta(self, offsets):
self.data += offsets
def scale(self, factor):
self.data *= factor
def write(self, path=''):
self.hdu = pyfits.PrimaryHDU(self.data)
if self.bscale == 'minmax':
self.hdu.scale(option='minmax')
elif self.bscale == 'True':
self.hdu.scale()
warnings.resetwarnings()
warnings.filterwarnings('ignore', category=UserWarning, append=True)
self.hdu.writeto(path+self.outfile, clobber=True)
warnings.resetwarnings()
warnings.filterwarnings('always', category=UserWarning, append=True)
class PAF_File( object ):
def __init__(self, filename, name):
self.name = name
self.file = filename
self.parameters = {}
file = open(filename, 'r')
for line in file:
l = line.split()
if len(l) > 0:
if (l[0].find(name) == 0) & (l[0][0] != '#'):
parameter = l[0][len(name)+1:]
if (l[1][:-1].find('.') != -1):
try:
val = numpy.float(l[1][:-1])
except:
val = l[1][:-1]
else:
try:
val = numpy.int(l[1][:-1])
except:
val = l[1][:-1]
self.parameters[parameter] = val
def update(self, parameter, value):
self.parameters[parameter] = value
class CDMS( object ):
def __init__(self):
self.maps = {}
self.populateMapDefs()
self.paf = {}
self.populatePAF()
def populateMapDefs(self):
definitionFile = os.path.dirname(__file__)+'/CDMS_Map_Definitions.dat'
df = open(definitionFile, 'r')
for line in df:
l = line.split(',')
name = l[0]
ax1 = int(l[1])
ax2 = int(l[2])
dtype = l[3].strip()
filltype = float(l[4])
bscale = bool(l[5])
self.maps[name] = CDMS_Map(name, ax1, ax2, dtype, filltype, bscale)
def populatePAF(self):
pafdirectory = os.path.dirname(__file__)+'/PAF/'
paffiles = glob.glob(pafdirectory+'*.paf')
for paf in paffiles:
name = paf[len(pafdirectory):-4]
self.paf[name] = PAF_File(paf, name)
| soylentdeen/CIAO-commissioning-tools | VLTTools.py | Python | mit | 30,270 |
import pytest
from py.error import ENOENT
import utils.browser
from cfme.fixtures.pytest_selenium import ensure_browser_open, take_screenshot
from fixtures.artifactor_plugin import fire_art_test_hook
from utils.datafile import template_env
from utils.path import log_path
from utils import browser as browser_module, safe_string
browser_fixtures = {'browser'}
failed_test_tracking = {
'tests': list(),
'total_failed': 0,
'total_errored': 0,
}
def pytest_namespace():
# Return the contents of this file as the 'sel' namespace in pytest.
from cfme.fixtures import pytest_selenium
return {'sel': pytest_selenium}
def pytest_runtest_setup(item):
if set(getattr(item, 'fixturenames', [])) & browser_fixtures:
utils.browser.ensure_browser_open()
def pytest_exception_interact(node, call, report):
from fixtures.pytest_store import store
from httplib import BadStatusLine
from socket import error
from utils.browser import WharfFactory
import urllib2
val = safe_string(call.excinfo.value.message).decode('utf-8', 'ignore')
if isinstance(call.excinfo.value, (urllib2.URLError, BadStatusLine, error)):
from utils.browser import manager
if isinstance(manager.factory, WharfFactory):
manager.factory.wharf.checkin()
manager.factory.wharf.checkout()
manager.start()
manager.ensure_open()
short_tb = '{}\n{}'.format(
call.excinfo.type.__name__, val.encode('ascii', 'xmlcharrefreplace'))
fire_art_test_hook(
node, 'filedump',
description="Traceback", contents=str(report.longrepr), file_type="traceback",
display_type="danger", display_glyph="align-justify", group_id="pytest-exception",
slaveid=store.slaveid)
fire_art_test_hook(
node, 'filedump',
description="Short traceback", contents=short_tb, file_type="short_tb",
display_type="danger", display_glyph="align-justify", group_id="pytest-exception",
slaveid=store.slaveid)
# base64 encoded to go into a data uri, same for screenshots
full_tb = str(report.longrepr).encode('base64').strip()
# errors are when exceptions are thrown outside of the test call phase
report.when = getattr(report, 'when', 'setup')
is_error = report.when != 'call'
template_data = {
'name': node.name,
'file': node.fspath,
'is_error': is_error,
'fail_stage': report.when,
'short_tb': short_tb,
'full_tb': full_tb,
}
# Before trying to take a screenshot, we used to check if one of the browser_fixtures was
# in this node's fixturenames, but that was too limited and preventing the capture of
# screenshots. If removing that conditional now makes this too broad, we should consider
# an isinstance(val, WebDriverException) check in addition to the browser fixture check that
# exists here in commit 825ef50fd84a060b58d7e4dc316303a8b61b35d2
screenshot = take_screenshot()
template_data['screenshot'] = screenshot.png
template_data['screenshot_error'] = screenshot.error
if screenshot.png:
fire_art_test_hook(
node, 'filedump',
description="Exception screenshot", file_type="screenshot", mode="wb",
contents_base64=True, contents=template_data['screenshot'], display_glyph="camera",
group_id="pytest-exception", slaveid=store.slaveid)
if screenshot.error:
fire_art_test_hook(
node, 'filedump',
description="Screenshot error", mode="w", contents_base64=False,
contents=template_data['screenshot_error'], display_type="danger",
group_id="pytest-exception", slaveid=store.slaveid)
failed_test_tracking['tests'].append(template_data)
if is_error:
failed_test_tracking['total_errored'] += 1
else:
failed_test_tracking['total_failed'] += 1
def pytest_sessionfinish(session, exitstatus):
failed_tests_template = template_env.get_template('failed_browser_tests.html')
outfile = log_path.join('failed_browser_tests.html')
# Clean out any old reports
try:
outfile.remove(ignore_errors=True)
except ENOENT:
pass
# Generate a new one if needed
if failed_test_tracking['tests']:
failed_tests_report = failed_tests_template.render(**failed_test_tracking)
outfile.write(failed_tests_report)
@pytest.fixture(scope='session')
def browser():
return browser_module.browser
@pytest.yield_fixture(scope="function")
def nuke_browser_after_test():
"""Some more disruptive tests have to take this measure."""
yield
browser_module.quit()
ensure_browser_open()
| jdemon519/cfme_tests | fixtures/browser.py | Python | gpl-2.0 | 4,714 |
from functools import partial
import pytest
import sklearn.cluster
import numpy as np
from numpy.testing import assert_array_equal
from dask_ml.datasets import make_blobs
from dask_ml.cluster import SpectralClustering
from dask_ml import metrics
X, y = make_blobs(n_samples=200, chunks=100, random_state=0)
X_ = X.compute()
@pytest.mark.parametrize('data', [X, X_])
@pytest.mark.parametrize('persist_embedding', [True, False])
def test_basic(data, persist_embedding):
sc = SpectralClustering(n_components=25, random_state=0,
persist_embedding=persist_embedding)
sc.fit(data)
assert len(sc.labels_) == len(X)
@pytest.mark.parametrize('assign_labels', [
sklearn.cluster.KMeans(n_init=2),
'sklearn-kmeans'])
def test_sklearn_kmeans(assign_labels):
sc = SpectralClustering(n_components=25, random_state=0,
assign_labels=assign_labels,
kmeans_params={'n_clusters': 8})
sc.fit(X)
assert isinstance(sc.assign_labels_, sklearn.cluster.KMeans)
def test_callable_affinity():
affinity = partial(metrics.pairwise.pairwise_kernels,
metric='rbf',
filter_params=True)
sc = SpectralClustering(affinity=affinity)
sc.fit(X)
def test_n_components_raises():
sc = SpectralClustering(n_components=len(X))
with pytest.raises(ValueError) as m:
sc.fit(X)
assert m.match('n_components')
def test_assign_labels_raises():
sc = SpectralClustering(assign_labels='foo')
with pytest.raises(ValueError) as m:
sc.fit(X)
assert m.match("Unknown 'assign_labels' 'foo'")
sc = SpectralClustering(assign_labels=dict())
with pytest.raises(TypeError) as m:
sc.fit(X)
assert m.match("Invalid type ")
def test_affinity_raises():
sc = SpectralClustering(affinity='foo')
with pytest.raises(ValueError) as m:
sc.fit(X)
assert m.match("Unknown affinity metric name 'foo'")
sc = SpectralClustering(affinity=np.array([]))
with pytest.raises(TypeError) as m:
sc.fit(X)
assert m.match("Unexpected type for affinity 'ndarray'")
def test_spectral_clustering():
S = np.array([[1.0, 1.0, 1.0, 0.2, 0.0, 0.0, 0.0],
[1.0, 1.0, 1.0, 0.2, 0.0, 0.0, 0.0],
[1.0, 1.0, 1.0, 0.2, 0.0, 0.0, 0.0],
[0.2, 0.2, 0.2, 1.0, 1.0, 1.0, 1.0],
[0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0],
[0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0],
[0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0]])
model = SpectralClustering(random_state=0, n_clusters=2,
n_components=4).fit(S)
labels = model.labels_.compute()
if labels[0] == 0:
labels = 1 - labels
assert_array_equal(labels, [1, 1, 1, 0, 0, 0, 0])
| daniel-severo/dask-ml | tests/test_spectral_clustering.py | Python | bsd-3-clause | 2,854 |
# -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2016 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
import pytest
from inspirehep.modules.search import IQ
def test_empty():
query = IQ('')
expected = {
'multi_match': {
'zero_terms_query': 'all',
'query': '',
'fields': [
'title^3',
'title.raw^10',
'abstract^2',
'abstract.raw^4',
'author^10',
'author.raw^15',
'reportnumber^10',
'eprint^10',
'doi^10'
]
}
}
result = query.to_dict()
assert expected == result
def test_google_style():
query = IQ('kudenko')
expected = {
'multi_match': {
'zero_terms_query': 'all',
'query': 'kudenko',
'fields': [
'title^3',
'title.raw^10',
'abstract^2',
'abstract.raw^4',
'author^10',
'author.raw^15',
'reportnumber^10',
'eprint^10',
'doi^10'
]
}
}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def test_google_style_or_google_style():
query = IQ('sungtae cho or 1301.7261')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def test_google_style_and_not_collaboration():
query = IQ("raffaele d'agnolo and not cn cms")
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def test_author():
query = IQ('a kondrashuk')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def test_author_bai_malformed():
query = IQ('a r.j.hill.1')
expected = {}
result = query.to_dict()
assert expected == result
def test_author_bai():
query = IQ('find a r.j.hill.1')
expected = {
"bool": {
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "r.j.hill.1"
}
},
{
"term": {
"authors.inspire_bai": "r.j.hill.1"
}
}
]
}
}
],
"should": [
{
"match": {
"authors.full_name": "r.j.hill.1"
}
}
]
}
}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def test_author_or_author():
query = IQ('a fileviez perez,p or p. f. perez')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def test_author_and_not_author():
query = IQ('a espinosa,jose r and not a rodriguez espinosa')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def test_author_and_not_type_code():
query = IQ('a nilles,h and not tc I')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def author_or_author_and_not_collaborations_and_not_title_and_not_type_code():
query = IQ(
'a rojo,j. or rojo-chacon,j. and not collaboration pierre auger '
'and not collaboration auger and not t auger and tc p')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def test_exactauthor():
query = IQ('ea wu, xing gang')
expected = {}
result = query.to_dict()
assert expected == result
def test_abstract_colon_with_star_wildcard():
query = IQ('abstract: part*')
expected = {
'query_string': {
'query': 'part*',
'default_field': 'abstracts.value',
'analyze_wildcard': True
}
}
result = query.to_dict()
assert expected == result
def test_author_colon():
query = IQ('author: vagenas')
expected = {
"bool": {
"should": [
{
"match": {
"authors.name_variations": "vagenas"
}
},
{
"match": {
"authors.full_name": "vagenas"
}
},
{
"match": {
"authors.inspire_bai": "vagenas"
}
}
]
}
}
result = query.to_dict()
assert expected == result
def test_author_colon_with_double_quotes():
query = IQ('author:"tachikawa, yuji"')
expected = {
"bool": {
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "tachikawa, yuji"
}
},
{
"term": {
"authors.inspire_bai": "tachikawa, yuji"
}
}
]
}
}
],
"should": [
{
"match": {
"authors.full_name": "tachikawa, yuji"
}
}
]
}
}
result = query.to_dict()
assert expected == result
def test_author_colon_bai():
query = IQ('author:Y.Nomura.1')
expected = {
"bool": {
"should": [
{
"match": {
"authors.name_variations": "Y.Nomura.1"
}
},
{
"match": {
"authors.full_name": "Y.Nomura.1"
}
},
{
"match": {
"authors.inspire_bai": "Y.Nomura.1"
}
}
]
}
}
result = query.to_dict()
assert expected == result
def test_author_colon_bai_and_collection_colon():
query = IQ(
'author:E.Witten.1 AND collection:citeable')
expected = {
"bool": {
"should": [
{
"match": {
"authors.name_variations": "E.Witten.1"
}
},
{
"match": {
"authors.full_name": "E.Witten.1"
}
},
{
"match": {
"authors.inspire_bai": "E.Witten.1"
}
}
],
"must": [
{
"multi_match": {
"query": "citeable",
"fields": [
"collections.primary"
]
}
}
]
}
}
result = query.to_dict()
assert expected == result
def test_author_colon_bai_with_double_quotes_and_collection_colon():
query = IQ('author:"E.Witten.1" AND collection:citeable')
expected = {
"bool": {
"should": [
{
"match": {
"authors.full_name": "E.Witten.1"
}
}
],
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "E.Witten.1"
}
},
{
"term": {
"authors.inspire_bai": "E.Witten.1"
}
}
]
}
},
{
"multi_match": {
"query": "citeable",
"fields": [
"collections.primary"
]
}
}
]
}
}
result = query.to_dict()
assert expected == result
def test_author_colon_bai_and_collection_colon_and_cited_colon():
query = IQ(
'author:E.Witten.1 AND collection:citeable AND cited:500->1000000')
expected = {
"bool": {
"must": [
{
"multi_match": {
"query": "citeable",
"fields": [
"collections.primary"
]
}
},
{
"range": {
"citation_count": {
"gte": "500",
"lte": "1000000"
}
}
}
],
"should": [
{
"match": {
"authors.name_variations": "E.Witten.1"
}
},
{
"match": {
"authors.full_name": "E.Witten.1"
}
},
{
"match": {
"authors.inspire_bai": "E.Witten.1"
}
}
]
}
}
result = query.to_dict()
assert expected == result
def test_author_colon_bai_with_double_quotes_and_collection_colon_and_cited_colon():
query = IQ(
'author:"E.Witten.1" AND collection:citeable AND cited:500->1000000')
expected = {
"bool": {
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "E.Witten.1"
}
},
{
"term": {
"authors.inspire_bai": "E.Witten.1"
}
}
]
}
},
{
"multi_match": {
"query": "citeable",
"fields": [
"collections.primary"
]
}
},
{
"range": {
"citation_count": {
"gte": "500",
"lte": "1000000"
}
}
}
],
"should": [
{
"match": {
"authors.full_name": "E.Witten.1"
}
}
]
}
}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def test_author_colon_or_eprint_without_keyword():
query = IQ('author:"Takayanagi, Tadashi" or hep-th/0010101')
expected = {}
result = query.to_dict()
assert expected == result
def test_author_colon_or_author_colon_or_title_colon_or_title_colon():
query = IQ(
"(author:'Hiroshi Okada' OR (author:'H Okada' hep-ph) OR "
"title: 'Dark matter in supersymmetric U(1(B-L) model' OR "
"title: 'Non-Abelian discrete symmetry for flavors')")
expected = {
'bool': {
'should': [
{
'bool': {
'should': [
{
'bool': {
'should': [
{
'bool': {
'must': [
{
'multi_match': {
'query': 'H Okada',
'type': 'phrase',
'fields': [
'authors.full_name',
'authors.alternative_name'
]
}
},
{
'multi_match': {
'query': 'hep-ph',
'fields': [
'global_fulltext'
]
}
}
]
}
},
{
'multi_match': {
'query': 'Hiroshi Okada',
'type': 'phrase',
'fields': [
'authors.full_name',
'authors.alternative_name'
],
}
}
]
}
},
{
'multi_match': {
'query': 'Dark matter in supersymmetric U(1(B-L) model',
'type': 'phrase',
'fields': [
'titles.title',
'titles.title.raw^2',
'title_translation.title',
'title_variation',
'title_translation.subtitle',
'titles.subtitle'
]
}
}
]
}
},
{
'multi_match': {
'query': 'Non-Abelian discrete symmetry for flavors',
'type': 'phrase',
'fields': [
'titles.title',
'titles.title.raw^2',
'title_translation.title',
'title_variation',
'title_translation.subtitle',
'titles.subtitle'
],
}
}
]
}
}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='tracked in issue #817')
def test_citedby_colon():
query = IQ('citedby:foobar')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='tracked in issue #817')
def test_citedby_colon_recid_colon():
query = IQ('citedby:recid:902780')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='tracked in issue #817')
def test_eprint_colon_with_arxiv():
query = IQ('eprint:arxiv:TODO')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='tracked in issue #817')
def test_eprint_colon_without_arxiv():
query = IQ('eprint:TODO')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def test_exactauthor_colon():
query = IQ('ea:matt visser')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='query is malformed, but user intent is clear')
def test_exactauthor_colon_and_collection_colon():
query = IQ('ea: matt visser AND collection:citeable')
expected = {}
result = query.to_dict()
assert expected == result
def test_exactauthor_colon_bai():
query = IQ('exactauthor:J.Serra.3')
expected = {
"multi_match": {
"query": "J.Serra.3",
"fields": [
"exactauthor.raw",
"authors.full_name",
"authors.alternative_name",
"authors.inspire_bai"
]
}
}
result = query.to_dict()
assert expected == result
def test_field_code_colon():
query = IQ('fc: a')
expected = {'multi_match': {'query': 'a', 'fields': ['field_code']}}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='BAI is not part of the mappings')
def test_or_of_exactauthor_colon_queries():
query = IQ('exactauthor:X.Yin.1 or exactauthor:"Yin, Xi"')
expected = {
"multi_match": {
"query": "J.Serra.3",
"fields": [
"exactauthor.raw",
"authors.full_name",
"authors.alternative_name",
"authors.inspire_bai"
]
}
}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='tracked in issue #817')
def test_fulltext_colon():
query = IQ('fulltext:TODO')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='tracked in issue #817')
def test_journal_colon():
query = IQ('journal:TODO')
expected = {}
result = query.to_dict()
assert expected == result
def test_refersto_colon_recid_colon():
query = IQ('refersto:recid:1286113')
expected = {
'multi_match': {
'query': '1286113',
'fields': [
'references.recid'
]
}
}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='tracked in issue #817')
def test_topcite_colon():
query = IQ('topcite:200+')
expected = {}
result = query.to_dict()
assert expected == result
def test_type_code_colon():
query = IQ('tc: l')
expected = {'multi_match': {'query': 'l', 'fields': ['collection']}}
result = query.to_dict()
assert expected == result
def test_find_author_with_hash_wildcard():
query = IQ('find a chkv#')
expected = {
'bool': {
'should': [{
'query_string': {
'analyze_wildcard': True,
'default_field': 'authors.full_name',
'query': 'chkv*'}}, {
'query_string': {
'analyze_wildcard': True,
'default_field': 'authors.alternative_name',
'query': 'chkv*'}}
]}
}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='tracked in issue #1235')
def test_find_journal():
query = IQ('find j "Phys.Rev.Lett.,105*"')
expected = {
'query_string': {
'query': '"Phys.Rev.Lett.,105*"',
'default_field': 'publication_info.pubnote',
'analyze_wildcard': True
}
}
result = query.to_dict()
assert expected == result
def test_find_exactauthor():
query = IQ('find ea witten, edward')
expected = {
"multi_match": {
"query": "witten, edward",
"fields": [
"exactauthor.raw",
"authors.full_name",
"authors.alternative_name",
"authors.inspire_bai"
]
}
}
result = query.to_dict()
assert expected == result
def test_find_exactauthor_not_affiliation_uppercase():
query = IQ(
'FIND EA RINALDI, MASSIMILIANO NOT AFF SINCROTRONE TRIESTE')
expected = {
"bool": {
"must_not": [
{
"multi_match": {
"query": "SINCROTRONE TRIESTE",
"fields": [
"authors.affiliations.value",
"corporate_author"
]
}
}
],
"must": [
{
"multi_match": {
"query": "RINALDI, MASSIMILIANO",
"fields": [
"exactauthor.raw",
"authors.full_name",
"authors.alternative_name",
"authors.inspire_bai"
]
}
}
]
}
}
result = query.to_dict()
assert expected == result
def test_find_author():
query = IQ('find a polchinski')
expected = {
"bool": {
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "polchinski"
}
},
{
"term": {
"authors.inspire_bai": "polchinski"
}
}
]
}
}
],
"should": [
{
"match": {
"authors.full_name": "polchinski"
}
}
]
}
}
result = query.to_dict()
assert expected == result
def test_find_author_uppercase():
query = IQ('FIND A W F CHANG')
expected = {
"bool": {
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "W F CHANG"
}
},
{
"term": {
"authors.inspire_bai": "W F CHANG"
}
}
]
}
}
],
"should": [
{
"match": {
"authors.full_name": "W F CHANG"
}
}
]
}
}
result = query.to_dict()
assert expected == result
def test_find_author_and_date():
query = IQ('find a hatta and date after 2000')
expected = {
"bool": {
"minimum_should_match": 0,
"should": [
{
"match": {
"authors.full_name": "hatta"
}
}
],
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "hatta"
}
},
{
"term": {
"authors.inspire_bai": "hatta"
}
}
]
}
},
{
"bool": {
"minimum_should_match": 1,
"should": [
{
"bool": {
"should": [
{
"bool": {
"should": [
{
"range": {
"imprints.date": {
"gt": "2000"
}
}
},
{
"range": {
"preprint_date": {
"gt": "2000"
}
}
}
]
}
},
{
"range": {
"thesis.date": {
"gt": "2000"
}
}
}
]
}
},
{
"range": {
"publication_info.year": {
"gt": "2000"
}
}
}
]
}
}
]
}
}
result = query.to_dict()
assert expected == result
def test_find_author_or_author():
query = IQ('find a gersdorff, g or a von gersdorff, g')
expected = {
"bool": {
"should": [
{
"bool": {
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "gersdorff, g"
}
},
{
"term": {
"authors.inspire_bai": "gersdorff, g"
}
}
]
}
}
],
"should": [
{
"match": {
"authors.full_name": "gersdorff, g"
}
}
]
}
},
{
"bool": {
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "von gersdorff, g"
}
},
{
"term": {
"authors.inspire_bai": "von gersdorff, g"
}
}
]
}
}
],
"should": [
{
"match": {
"authors.full_name": "von gersdorff, g"
}
}
]
}
}
]
}
}
result = query.to_dict()
assert expected == result
def test_find_author_not_author_not_author():
query = IQ('f a ostapchenko not olinto not haungs')
expected = {
"bool": {
"minimum_should_match": 0,
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "ostapchenko"
}
},
{
"term": {
"authors.inspire_bai": "ostapchenko"
}
}
]
}
}
],
"must_not": [
{
"bool": {
"should": [
{
"match": {
"authors.full_name": "olinto"
}
}
],
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "olinto"
}
},
{
"term": {
"authors.inspire_bai": "olinto"
}
}
]
}
}
]
}
},
{
"bool": {
"must": [
{
"bool": {
"should": [
{
"match": {
"authors.name_variations": "haungs"
}
},
{
"term": {
"authors.inspire_bai": "haungs"
}
}
]
}
}
],
"should": [
{
"match": {
"authors.full_name": "haungs"
}
}
]
}
}
],
"should": [
{
"match": {
"authors.full_name": "ostapchenko"
}
}
]
}
}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='tracked in issue #817')
def test_find_caption():
query = IQ('Diagram for the fermion flow violating process')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='tracked in issue #817')
def test_find_country_code():
query = IQ('find cc italy')
expected = {}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='today must be converted to an actual date')
def test_find_date():
query = IQ('fin date > today')
expected = {}
result = query.to_dict()
assert expected == result
def test_find_field_code():
query = IQ('find fc a')
expected = {'multi_match': {'query': 'a', 'fields': ['field_code']}}
result = query.to_dict()
assert expected == result
@pytest.mark.xfail(reason='tracked in issue #817')
def test_find_report():
query = IQ('find r atlas-conf-*')
expected = {}
result = query.to_dict()
assert expected == result
def test_find_type_code():
query = IQ('find tc book')
expected = {'multi_match': {'query': 'book', 'fields': ['collection']}}
result = query.to_dict()
assert expected == result
| Panos512/inspire-next | tests/unit/search/test_search_query.py | Python | gpl-2.0 | 32,501 |
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.flow_monitor', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## flow-monitor-helper.h (module 'flow-monitor'): ns3::FlowMonitorHelper [class]
module.add_class('FlowMonitorHelper')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## histogram.h (module 'flow-monitor'): ns3::Histogram [class]
module.add_class('Histogram')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
module.add_class('InetSocketAddress', import_from_module='ns.network')
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress [class]
module.add_class('Ipv4InterfaceAddress', import_from_module='ns.internet')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e [enumeration]
module.add_enum('InterfaceAddressScope_e', ['HOST', 'LINK', 'GLOBAL'], outer_class=root_module['ns3::Ipv4InterfaceAddress'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress [class]
module.add_class('Ipv6InterfaceAddress', import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e [enumeration]
module.add_enum('State_e', ['TENTATIVE', 'DEPRECATED', 'PREFERRED', 'PERMANENT', 'HOMEADDRESS', 'TENTATIVE_OPTIMISTIC', 'INVALID'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e [enumeration]
module.add_enum('Scope_e', ['HOST', 'LINKLOCAL', 'GLOBAL'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData_e [enumeration]
module.add_enum('TagData_e', ['MAX_SIZE'], outer_class=root_module['ns3::PacketTagList::TagData'], import_from_module='ns.network')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## nstime.h (module 'core'): ns3::TimeWithUnit [class]
module.add_class('TimeWithUnit', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t::impl_type [enumeration]
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header [class]
module.add_class('Ipv4Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType [enumeration]
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType [enumeration]
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv6-header.h (module 'internet'): ns3::Ipv6Header [class]
module.add_class('Ipv6Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::DscpType [enumeration]
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::NextHeader_e [enumeration]
module.add_enum('NextHeader_e', ['IPV6_EXT_HOP_BY_HOP', 'IPV6_IPV4', 'IPV6_TCP', 'IPV6_UDP', 'IPV6_IPV6', 'IPV6_EXT_ROUTING', 'IPV6_EXT_FRAGMENTATION', 'IPV6_EXT_CONFIDENTIALITY', 'IPV6_EXT_AUTHENTIFICATION', 'IPV6_ICMPV6', 'IPV6_EXT_END', 'IPV6_EXT_DESTINATION', 'IPV6_SCTP', 'IPV6_EXT_MOBILITY', 'IPV6_UDP_LITE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::FlowClassifier', 'ns3::empty', 'ns3::DefaultDeleter<ns3::FlowClassifier>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4MulticastRoute', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NetDeviceQueue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NetDeviceQueue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::QueueItem', 'ns3::empty', 'ns3::DefaultDeleter<ns3::QueueItem>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## socket.h (module 'network'): ns3::Socket [class]
module.add_class('Socket', import_from_module='ns.network', parent=root_module['ns3::Object'])
## socket.h (module 'network'): ns3::Socket::SocketErrno [enumeration]
module.add_enum('SocketErrno', ['ERROR_NOTERROR', 'ERROR_ISCONN', 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::SocketType [enumeration]
module.add_enum('SocketType', ['NS3_SOCK_STREAM', 'NS3_SOCK_SEQPACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::Ipv6MulticastFilterMode [enumeration]
module.add_enum('Ipv6MulticastFilterMode', ['INCLUDE', 'EXCLUDE'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::SocketIpTosTag [class]
module.add_class('SocketIpTosTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpTtlTag [class]
module.add_class('SocketIpTtlTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag [class]
module.add_class('SocketIpv6HopLimitTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag [class]
module.add_class('SocketIpv6TclassTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag [class]
module.add_class('SocketSetDontFragmentTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## flow-classifier.h (module 'flow-monitor'): ns3::FlowClassifier [class]
module.add_class('FlowClassifier', parent=root_module['ns3::SimpleRefCount< ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >'])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor [class]
module.add_class('FlowMonitor', parent=root_module['ns3::Object'])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats [struct]
module.add_class('FlowStats', outer_class=root_module['ns3::FlowMonitor'])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe [class]
module.add_class('FlowProbe', parent=root_module['ns3::Object'])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats [struct]
module.add_class('FlowStats', outer_class=root_module['ns3::FlowProbe'])
## ipv4.h (module 'internet'): ns3::Ipv4 [class]
module.add_class('Ipv4', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier [class]
module.add_class('Ipv4FlowClassifier', parent=root_module['ns3::FlowClassifier'])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple [struct]
module.add_class('FiveTuple', outer_class=root_module['ns3::Ipv4FlowClassifier'])
## ipv4-flow-probe.h (module 'flow-monitor'): ns3::Ipv4FlowProbe [class]
module.add_class('Ipv4FlowProbe', parent=root_module['ns3::FlowProbe'])
## ipv4-flow-probe.h (module 'flow-monitor'): ns3::Ipv4FlowProbe::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_NO_ROUTE', 'DROP_TTL_EXPIRE', 'DROP_BAD_CHECKSUM', 'DROP_QUEUE', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT', 'DROP_INVALID_REASON'], outer_class=root_module['ns3::Ipv4FlowProbe'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol [class]
module.add_class('Ipv4L3Protocol', import_from_module='ns.internet', parent=root_module['ns3::Ipv4'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_BAD_CHECKSUM', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv4L3Protocol'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute [class]
module.add_class('Ipv4MulticastRoute', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route [class]
module.add_class('Ipv4Route', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol [class]
module.add_class('Ipv4RoutingProtocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6.h (module 'internet'): ns3::Ipv6 [class]
module.add_class('Ipv6', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier [class]
module.add_class('Ipv6FlowClassifier', parent=root_module['ns3::FlowClassifier'])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple [struct]
module.add_class('FiveTuple', outer_class=root_module['ns3::Ipv6FlowClassifier'])
## ipv6-flow-probe.h (module 'flow-monitor'): ns3::Ipv6FlowProbe [class]
module.add_class('Ipv6FlowProbe', parent=root_module['ns3::FlowProbe'])
## ipv6-flow-probe.h (module 'flow-monitor'): ns3::Ipv6FlowProbe::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_NO_ROUTE', 'DROP_TTL_EXPIRE', 'DROP_BAD_CHECKSUM', 'DROP_QUEUE', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_UNKNOWN_PROTOCOL', 'DROP_UNKNOWN_OPTION', 'DROP_MALFORMED_HEADER', 'DROP_FRAGMENT_TIMEOUT', 'DROP_INVALID_REASON'], outer_class=root_module['ns3::Ipv6FlowProbe'])
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol [class]
module.add_class('Ipv6L3Protocol', import_from_module='ns.internet', parent=root_module['ns3::Ipv6'])
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_UNKNOWN_PROTOCOL', 'DROP_UNKNOWN_OPTION', 'DROP_MALFORMED_HEADER', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv6L3Protocol'], import_from_module='ns.internet')
## ipv6-pmtu-cache.h (module 'internet'): ns3::Ipv6PmtuCache [class]
module.add_class('Ipv6PmtuCache', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## net-device.h (module 'network'): ns3::NetDeviceQueue [class]
module.add_class('NetDeviceQueue', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >'])
## net-device.h (module 'network'): ns3::NetDeviceQueueInterface [class]
module.add_class('NetDeviceQueueInterface', import_from_module='ns.network', parent=root_module['ns3::Object'])
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper [class]
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
## net-device.h (module 'network'): ns3::QueueItem [class]
module.add_class('QueueItem', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_container('std::vector< ns3::Ipv6Address >', 'ns3::Ipv6Address', container_type=u'vector')
module.add_container('std::vector< unsigned int >', 'unsigned int', container_type=u'vector')
module.add_container('std::vector< unsigned long >', 'long unsigned int', container_type=u'vector')
module.add_container('std::map< unsigned int, ns3::FlowMonitor::FlowStats >', ('unsigned int', 'ns3::FlowMonitor::FlowStats'), container_type=u'map')
module.add_container('std::vector< ns3::Ptr< ns3::FlowProbe > >', 'ns3::Ptr< ns3::FlowProbe >', container_type=u'vector')
module.add_container('std::map< unsigned int, ns3::FlowProbe::FlowStats >', ('unsigned int', 'ns3::FlowProbe::FlowStats'), container_type=u'map')
module.add_container('std::map< unsigned int, unsigned int >', ('unsigned int', 'unsigned int'), container_type=u'map')
typehandlers.add_type_alias(u'uint32_t', u'ns3::FlowPacketId')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::FlowPacketId*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::FlowPacketId&')
typehandlers.add_type_alias(u'uint32_t', u'ns3::FlowId')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::FlowId*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::FlowId&')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
## Register a nested module for the namespace TracedValueCallback
nested_module = module.add_cpp_namespace('TracedValueCallback')
register_types_ns3_TracedValueCallback(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_types_ns3_TracedValueCallback(module):
root_module = module.get_root()
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *', u'ns3::TracedValueCallback::Time')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) **', u'ns3::TracedValueCallback::Time*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *&', u'ns3::TracedValueCallback::Time&')
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3FlowMonitorHelper_methods(root_module, root_module['ns3::FlowMonitorHelper'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Histogram_methods(root_module, root_module['ns3::Histogram'])
register_Ns3Inet6SocketAddress_methods(root_module, root_module['ns3::Inet6SocketAddress'])
register_Ns3InetSocketAddress_methods(root_module, root_module['ns3::InetSocketAddress'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4InterfaceAddress_methods(root_module, root_module['ns3::Ipv4InterfaceAddress'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6InterfaceAddress_methods(root_module, root_module['ns3::Ipv6InterfaceAddress'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Ipv4Header_methods(root_module, root_module['ns3::Ipv4Header'])
register_Ns3Ipv6Header_methods(root_module, root_module['ns3::Ipv6Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3FlowClassifier_Ns3Empty_Ns3DefaultDeleter__lt__ns3FlowClassifier__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
register_Ns3SimpleRefCount__Ns3NetDeviceQueue_Ns3Empty_Ns3DefaultDeleter__lt__ns3NetDeviceQueue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Socket_methods(root_module, root_module['ns3::Socket'])
register_Ns3SocketIpTosTag_methods(root_module, root_module['ns3::SocketIpTosTag'])
register_Ns3SocketIpTtlTag_methods(root_module, root_module['ns3::SocketIpTtlTag'])
register_Ns3SocketIpv6HopLimitTag_methods(root_module, root_module['ns3::SocketIpv6HopLimitTag'])
register_Ns3SocketIpv6TclassTag_methods(root_module, root_module['ns3::SocketIpv6TclassTag'])
register_Ns3SocketSetDontFragmentTag_methods(root_module, root_module['ns3::SocketSetDontFragmentTag'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3FlowClassifier_methods(root_module, root_module['ns3::FlowClassifier'])
register_Ns3FlowMonitor_methods(root_module, root_module['ns3::FlowMonitor'])
register_Ns3FlowMonitorFlowStats_methods(root_module, root_module['ns3::FlowMonitor::FlowStats'])
register_Ns3FlowProbe_methods(root_module, root_module['ns3::FlowProbe'])
register_Ns3FlowProbeFlowStats_methods(root_module, root_module['ns3::FlowProbe::FlowStats'])
register_Ns3Ipv4_methods(root_module, root_module['ns3::Ipv4'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4FlowClassifier_methods(root_module, root_module['ns3::Ipv4FlowClassifier'])
register_Ns3Ipv4FlowClassifierFiveTuple_methods(root_module, root_module['ns3::Ipv4FlowClassifier::FiveTuple'])
register_Ns3Ipv4FlowProbe_methods(root_module, root_module['ns3::Ipv4FlowProbe'])
register_Ns3Ipv4L3Protocol_methods(root_module, root_module['ns3::Ipv4L3Protocol'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv4MulticastRoute_methods(root_module, root_module['ns3::Ipv4MulticastRoute'])
register_Ns3Ipv4Route_methods(root_module, root_module['ns3::Ipv4Route'])
register_Ns3Ipv4RoutingProtocol_methods(root_module, root_module['ns3::Ipv4RoutingProtocol'])
register_Ns3Ipv6_methods(root_module, root_module['ns3::Ipv6'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6FlowClassifier_methods(root_module, root_module['ns3::Ipv6FlowClassifier'])
register_Ns3Ipv6FlowClassifierFiveTuple_methods(root_module, root_module['ns3::Ipv6FlowClassifier::FiveTuple'])
register_Ns3Ipv6FlowProbe_methods(root_module, root_module['ns3::Ipv6FlowProbe'])
register_Ns3Ipv6L3Protocol_methods(root_module, root_module['ns3::Ipv6L3Protocol'])
register_Ns3Ipv6PmtuCache_methods(root_module, root_module['ns3::Ipv6PmtuCache'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NetDeviceQueue_methods(root_module, root_module['ns3::NetDeviceQueue'])
register_Ns3NetDeviceQueueInterface_methods(root_module, root_module['ns3::NetDeviceQueueInterface'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3QueueItem_methods(root_module, root_module['ns3::QueueItem'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::PeekU8() [member function]
cls.add_method('PeekU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(ns3::Buffer::Iterator start, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Adjust(int32_t adjustment) [member function]
cls.add_method('Adjust',
'void',
[param('int32_t', 'adjustment')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('==')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3FlowMonitorHelper_methods(root_module, cls):
## flow-monitor-helper.h (module 'flow-monitor'): ns3::FlowMonitorHelper::FlowMonitorHelper() [constructor]
cls.add_constructor([])
## flow-monitor-helper.h (module 'flow-monitor'): void ns3::FlowMonitorHelper::SetMonitorAttribute(std::string n1, ns3::AttributeValue const & v1) [member function]
cls.add_method('SetMonitorAttribute',
'void',
[param('std::string', 'n1'), param('ns3::AttributeValue const &', 'v1')])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::Install(ns3::NodeContainer nodes) [member function]
cls.add_method('Install',
'ns3::Ptr< ns3::FlowMonitor >',
[param('ns3::NodeContainer', 'nodes')])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::Install(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Install',
'ns3::Ptr< ns3::FlowMonitor >',
[param('ns3::Ptr< ns3::Node >', 'node')])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::InstallAll() [member function]
cls.add_method('InstallAll',
'ns3::Ptr< ns3::FlowMonitor >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::GetMonitor() [member function]
cls.add_method('GetMonitor',
'ns3::Ptr< ns3::FlowMonitor >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowClassifier> ns3::FlowMonitorHelper::GetClassifier() [member function]
cls.add_method('GetClassifier',
'ns3::Ptr< ns3::FlowClassifier >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowClassifier> ns3::FlowMonitorHelper::GetClassifier6() [member function]
cls.add_method('GetClassifier6',
'ns3::Ptr< ns3::FlowClassifier >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): void ns3::FlowMonitorHelper::SerializeToXmlStream(std::ostream & os, int indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('int', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor-helper.h (module 'flow-monitor'): std::string ns3::FlowMonitorHelper::SerializeToXmlString(int indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlString',
'std::string',
[param('int', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor-helper.h (module 'flow-monitor'): void ns3::FlowMonitorHelper::SerializeToXmlFile(std::string fileName, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlFile',
'void',
[param('std::string', 'fileName'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Histogram_methods(root_module, cls):
## histogram.h (module 'flow-monitor'): ns3::Histogram::Histogram(ns3::Histogram const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Histogram const &', 'arg0')])
## histogram.h (module 'flow-monitor'): ns3::Histogram::Histogram(double binWidth) [constructor]
cls.add_constructor([param('double', 'binWidth')])
## histogram.h (module 'flow-monitor'): ns3::Histogram::Histogram() [constructor]
cls.add_constructor([])
## histogram.h (module 'flow-monitor'): void ns3::Histogram::AddValue(double value) [member function]
cls.add_method('AddValue',
'void',
[param('double', 'value')])
## histogram.h (module 'flow-monitor'): uint32_t ns3::Histogram::GetBinCount(uint32_t index) [member function]
cls.add_method('GetBinCount',
'uint32_t',
[param('uint32_t', 'index')])
## histogram.h (module 'flow-monitor'): double ns3::Histogram::GetBinEnd(uint32_t index) [member function]
cls.add_method('GetBinEnd',
'double',
[param('uint32_t', 'index')])
## histogram.h (module 'flow-monitor'): double ns3::Histogram::GetBinStart(uint32_t index) [member function]
cls.add_method('GetBinStart',
'double',
[param('uint32_t', 'index')])
## histogram.h (module 'flow-monitor'): double ns3::Histogram::GetBinWidth(uint32_t index) const [member function]
cls.add_method('GetBinWidth',
'double',
[param('uint32_t', 'index')],
is_const=True)
## histogram.h (module 'flow-monitor'): uint32_t ns3::Histogram::GetNBins() const [member function]
cls.add_method('GetNBins',
'uint32_t',
[],
is_const=True)
## histogram.h (module 'flow-monitor'): void ns3::Histogram::SerializeToXmlStream(std::ostream & os, int indent, std::string elementName) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('int', 'indent'), param('std::string', 'elementName')],
is_const=True)
## histogram.h (module 'flow-monitor'): void ns3::Histogram::SetDefaultBinWidth(double binWidth) [member function]
cls.add_method('SetDefaultBinWidth',
'void',
[param('double', 'binWidth')])
return
def register_Ns3Inet6SocketAddress_methods(root_module, cls):
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Inet6SocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Inet6SocketAddress const &', 'arg0')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6) [constructor]
cls.add_constructor([param('char const *', 'ipv6')])
## inet6-socket-address.h (module 'network'): static ns3::Inet6SocketAddress ns3::Inet6SocketAddress::ConvertFrom(ns3::Address const & addr) [member function]
cls.add_method('ConvertFrom',
'ns3::Inet6SocketAddress',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): ns3::Ipv6Address ns3::Inet6SocketAddress::GetIpv6() const [member function]
cls.add_method('GetIpv6',
'ns3::Ipv6Address',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): uint16_t ns3::Inet6SocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): static bool ns3::Inet6SocketAddress::IsMatchingType(ns3::Address const & addr) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetIpv6(ns3::Ipv6Address ipv6) [member function]
cls.add_method('SetIpv6',
'void',
[param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3InetSocketAddress_methods(root_module, cls):
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::InetSocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::InetSocketAddress const &', 'arg0')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4) [constructor]
cls.add_constructor([param('char const *', 'ipv4')])
## inet-socket-address.h (module 'network'): static ns3::InetSocketAddress ns3::InetSocketAddress::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::InetSocketAddress',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): ns3::Ipv4Address ns3::InetSocketAddress::GetIpv4() const [member function]
cls.add_method('GetIpv4',
'ns3::Ipv4Address',
[],
is_const=True)
## inet-socket-address.h (module 'network'): uint16_t ns3::InetSocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet-socket-address.h (module 'network'): static bool ns3::InetSocketAddress::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetIpv4(ns3::Ipv4Address address) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ipv4Address', 'address')])
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4InterfaceAddress_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4Address local, ns3::Ipv4Mask mask) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'local'), param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4InterfaceAddress const & o) [copy constructor]
cls.add_constructor([param('ns3::Ipv4InterfaceAddress const &', 'o')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetLocal() const [member function]
cls.add_method('GetLocal',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Mask ns3::Ipv4InterfaceAddress::GetMask() const [member function]
cls.add_method('GetMask',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e ns3::Ipv4InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): bool ns3::Ipv4InterfaceAddress::IsSecondary() const [member function]
cls.add_method('IsSecondary',
'bool',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetBroadcast(ns3::Ipv4Address broadcast) [member function]
cls.add_method('SetBroadcast',
'void',
[param('ns3::Ipv4Address', 'broadcast')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetLocal(ns3::Ipv4Address local) [member function]
cls.add_method('SetLocal',
'void',
[param('ns3::Ipv4Address', 'local')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetMask(ns3::Ipv4Mask mask) [member function]
cls.add_method('SetMask',
'void',
[param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetPrimary() [member function]
cls.add_method('SetPrimary',
'void',
[])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetScope(ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetSecondary() [member function]
cls.add_method('SetSecondary',
'void',
[])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
deprecated=True, is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6InterfaceAddress_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address, ns3::Ipv6Prefix prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6Prefix', 'prefix')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6InterfaceAddress const & o) [copy constructor]
cls.add_constructor([param('ns3::Ipv6InterfaceAddress const &', 'o')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6InterfaceAddress::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): uint32_t ns3::Ipv6InterfaceAddress::GetNsDadUid() const [member function]
cls.add_method('GetNsDadUid',
'uint32_t',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Prefix ns3::Ipv6InterfaceAddress::GetPrefix() const [member function]
cls.add_method('GetPrefix',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e ns3::Ipv6InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv6InterfaceAddress::Scope_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e ns3::Ipv6InterfaceAddress::GetState() const [member function]
cls.add_method('GetState',
'ns3::Ipv6InterfaceAddress::State_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): bool ns3::Ipv6InterfaceAddress::IsInSameSubnet(ns3::Ipv6Address b) const [member function]
cls.add_method('IsInSameSubnet',
'bool',
[param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetAddress(ns3::Ipv6Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetNsDadUid(uint32_t uid) [member function]
cls.add_method('SetNsDadUid',
'void',
[param('uint32_t', 'uid')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetScope(ns3::Ipv6InterfaceAddress::Scope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv6InterfaceAddress::Scope_e', 'scope')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetState(ns3::Ipv6InterfaceAddress::State_e state) [member function]
cls.add_method('SetState',
'void',
[param('ns3::Ipv6InterfaceAddress::State_e', 'state')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function]
cls.add_method('Replace',
'bool',
[param('ns3::Tag &', 'tag')])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 21 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Simulator_methods(root_module, cls):
## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Simulator const &', 'arg0')])
## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]
cls.add_method('Cancel',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]
cls.add_method('Destroy',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function]
cls.add_method('GetImplementation',
'ns3::Ptr< ns3::SimulatorImpl >',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]
cls.add_method('GetMaximumSimulationTime',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]
cls.add_method('IsExpired',
'bool',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]
cls.add_method('IsFinished',
'bool',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]
cls.add_method('Now',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]
cls.add_method('Remove',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function]
cls.add_method('SetImplementation',
'void',
[param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]
cls.add_method('SetScheduler',
'void',
[param('ns3::ObjectFactory', 'schedulerFactory')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & delay) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'delay')],
is_static=True)
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TimeWithUnit_methods(root_module, cls):
cls.add_output_stream_operator()
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor]
cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor, std::string callback) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long double v) [constructor]
cls.add_constructor([param('long double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::implementation [variable]
cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True)
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Ipv4Header_methods(root_module, cls):
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header(ns3::Ipv4Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Header const &', 'arg0')])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header() [constructor]
cls.add_constructor([])
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::DscpTypeToString(ns3::Ipv4Header::DscpType dscp) const [member function]
cls.add_method('DscpTypeToString',
'std::string',
[param('ns3::Ipv4Header::DscpType', 'dscp')],
is_const=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::EcnTypeToString(ns3::Ipv4Header::EcnType ecn) const [member function]
cls.add_method('EcnTypeToString',
'std::string',
[param('ns3::Ipv4Header::EcnType', 'ecn')],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::EnableChecksum() [member function]
cls.add_method('EnableChecksum',
'void',
[])
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType ns3::Ipv4Header::GetDscp() const [member function]
cls.add_method('GetDscp',
'ns3::Ipv4Header::DscpType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType ns3::Ipv4Header::GetEcn() const [member function]
cls.add_method('GetEcn',
'ns3::Ipv4Header::EcnType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetFragmentOffset() const [member function]
cls.add_method('GetFragmentOffset',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetIdentification() const [member function]
cls.add_method('GetIdentification',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::TypeId ns3::Ipv4Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetPayloadSize() const [member function]
cls.add_method('GetPayloadSize',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): static ns3::TypeId ns3::Ipv4Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsChecksumOk() const [member function]
cls.add_method('IsChecksumOk',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsDontFragment() const [member function]
cls.add_method('IsDontFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsLastFragment() const [member function]
cls.add_method('IsLastFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDestination(ns3::Ipv4Address destination) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'destination')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDontFragment() [member function]
cls.add_method('SetDontFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDscp(ns3::Ipv4Header::DscpType dscp) [member function]
cls.add_method('SetDscp',
'void',
[param('ns3::Ipv4Header::DscpType', 'dscp')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetEcn(ns3::Ipv4Header::EcnType ecn) [member function]
cls.add_method('SetEcn',
'void',
[param('ns3::Ipv4Header::EcnType', 'ecn')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetFragmentOffset(uint16_t offsetBytes) [member function]
cls.add_method('SetFragmentOffset',
'void',
[param('uint16_t', 'offsetBytes')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetIdentification(uint16_t identification) [member function]
cls.add_method('SetIdentification',
'void',
[param('uint16_t', 'identification')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetLastFragment() [member function]
cls.add_method('SetLastFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMayFragment() [member function]
cls.add_method('SetMayFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMoreFragments() [member function]
cls.add_method('SetMoreFragments',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetPayloadSize(uint16_t size) [member function]
cls.add_method('SetPayloadSize',
'void',
[param('uint16_t', 'size')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetProtocol(uint8_t num) [member function]
cls.add_method('SetProtocol',
'void',
[param('uint8_t', 'num')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetSource(ns3::Ipv4Address source) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'source')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3Ipv6Header_methods(root_module, cls):
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header(ns3::Ipv6Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Header const &', 'arg0')])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header() [constructor]
cls.add_constructor([])
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv6-header.h (module 'internet'): std::string ns3::Ipv6Header::DscpTypeToString(ns3::Ipv6Header::DscpType dscp) const [member function]
cls.add_method('DscpTypeToString',
'std::string',
[param('ns3::Ipv6Header::DscpType', 'dscp')],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetDestinationAddress() const [member function]
cls.add_method('GetDestinationAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::DscpType ns3::Ipv6Header::GetDscp() const [member function]
cls.add_method('GetDscp',
'ns3::Ipv6Header::DscpType',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetFlowLabel() const [member function]
cls.add_method('GetFlowLabel',
'uint32_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::TypeId ns3::Ipv6Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetNextHeader() const [member function]
cls.add_method('GetNextHeader',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint16_t ns3::Ipv6Header::GetPayloadLength() const [member function]
cls.add_method('GetPayloadLength',
'uint16_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetSourceAddress() const [member function]
cls.add_method('GetSourceAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetTrafficClass() const [member function]
cls.add_method('GetTrafficClass',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): static ns3::TypeId ns3::Ipv6Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetDestinationAddress(ns3::Ipv6Address dst) [member function]
cls.add_method('SetDestinationAddress',
'void',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetDscp(ns3::Ipv6Header::DscpType dscp) [member function]
cls.add_method('SetDscp',
'void',
[param('ns3::Ipv6Header::DscpType', 'dscp')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetFlowLabel(uint32_t flow) [member function]
cls.add_method('SetFlowLabel',
'void',
[param('uint32_t', 'flow')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetHopLimit(uint8_t limit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'limit')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetNextHeader(uint8_t next) [member function]
cls.add_method('SetNextHeader',
'void',
[param('uint8_t', 'next')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetPayloadLength(uint16_t len) [member function]
cls.add_method('SetPayloadLength',
'void',
[param('uint16_t', 'len')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetSourceAddress(ns3::Ipv6Address src) [member function]
cls.add_method('SetSourceAddress',
'void',
[param('ns3::Ipv6Address', 'src')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetTrafficClass(uint8_t traffic) [member function]
cls.add_method('SetTrafficClass',
'void',
[param('uint8_t', 'traffic')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3FlowClassifier_Ns3Empty_Ns3DefaultDeleter__lt__ns3FlowClassifier__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >::SimpleRefCount(ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter< ns3::FlowClassifier > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4MulticastRoute > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4Route > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NetDeviceQueue_Ns3Empty_Ns3DefaultDeleter__lt__ns3NetDeviceQueue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter< ns3::NetDeviceQueue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter< ns3::OutputStreamWrapper > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::SimpleRefCount(ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter< ns3::QueueItem > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Socket_methods(root_module, cls):
## socket.h (module 'network'): ns3::Socket::Socket(ns3::Socket const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Socket const &', 'arg0')])
## socket.h (module 'network'): ns3::Socket::Socket() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): int ns3::Socket::Bind(ns3::Address const & address) [member function]
cls.add_method('Bind',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind() [member function]
cls.add_method('Bind',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind6() [member function]
cls.add_method('Bind6',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::BindToNetDevice(ns3::Ptr<ns3::NetDevice> netdevice) [member function]
cls.add_method('BindToNetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'netdevice')],
is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Close() [member function]
cls.add_method('Close',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Connect(ns3::Address const & address) [member function]
cls.add_method('Connect',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): static ns3::Ptr<ns3::Socket> ns3::Socket::CreateSocket(ns3::Ptr<ns3::Node> node, ns3::TypeId tid) [member function]
cls.add_method('CreateSocket',
'ns3::Ptr< ns3::Socket >',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::TypeId', 'tid')],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::GetAllowBroadcast() const [member function]
cls.add_method('GetAllowBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Socket::GetBoundNetDevice() [member function]
cls.add_method('GetBoundNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[])
## socket.h (module 'network'): ns3::Socket::SocketErrno ns3::Socket::GetErrno() const [member function]
cls.add_method('GetErrno',
'ns3::Socket::SocketErrno',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTos() const [member function]
cls.add_method('GetIpTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTtl() const [member function]
cls.add_method('GetIpTtl',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6HopLimit() const [member function]
cls.add_method('GetIpv6HopLimit',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6Tclass() const [member function]
cls.add_method('GetIpv6Tclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Node> ns3::Socket::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::GetPeerName(ns3::Address & address) const [member function]
cls.add_method('GetPeerName',
'int',
[param('ns3::Address &', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetRxAvailable() const [member function]
cls.add_method('GetRxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::GetSockName(ns3::Address & address) const [member function]
cls.add_method('GetSockName',
'int',
[param('ns3::Address &', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Socket::SocketType ns3::Socket::GetSocketType() const [member function]
cls.add_method('GetSocketType',
'ns3::Socket::SocketType',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetTxAvailable() const [member function]
cls.add_method('GetTxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::Socket::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::Socket::Ipv6JoinGroup(ns3::Ipv6Address address, ns3::Socket::Ipv6MulticastFilterMode filterMode, std::vector<ns3::Ipv6Address,std::allocator<ns3::Ipv6Address> > sourceAddresses) [member function]
cls.add_method('Ipv6JoinGroup',
'void',
[param('ns3::Ipv6Address', 'address'), param('ns3::Socket::Ipv6MulticastFilterMode', 'filterMode'), param('std::vector< ns3::Ipv6Address >', 'sourceAddresses')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::Ipv6JoinGroup(ns3::Ipv6Address address) [member function]
cls.add_method('Ipv6JoinGroup',
'void',
[param('ns3::Ipv6Address', 'address')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::Ipv6LeaveGroup() [member function]
cls.add_method('Ipv6LeaveGroup',
'void',
[],
is_virtual=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTos() const [member function]
cls.add_method('IsIpRecvTos',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTtl() const [member function]
cls.add_method('IsIpRecvTtl',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvHopLimit() const [member function]
cls.add_method('IsIpv6RecvHopLimit',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvTclass() const [member function]
cls.add_method('IsIpv6RecvTclass',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsRecvPktInfo() const [member function]
cls.add_method('IsRecvPktInfo',
'bool',
[],
is_const=True)
## socket.h (module 'network'): int ns3::Socket::Listen() [member function]
cls.add_method('Listen',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv(uint32_t maxSize, uint32_t flags) [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv() [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[])
## socket.h (module 'network'): int ns3::Socket::Recv(uint8_t * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Recv',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(uint32_t maxSize, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::RecvFrom(uint8_t * buf, uint32_t size, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p')])
## socket.h (module 'network'): int ns3::Socket::Send(uint8_t const * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): int ns3::Socket::SendTo(ns3::Ptr<ns3::Packet> p, uint32_t flags, ns3::Address const & toAddress) [member function]
cls.add_method('SendTo',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags'), param('ns3::Address const &', 'toAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::SendTo(uint8_t const * buf, uint32_t size, uint32_t flags, ns3::Address const & address) [member function]
cls.add_method('SendTo',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address const &', 'address')])
## socket.h (module 'network'): void ns3::Socket::SetAcceptCallback(ns3::Callback<bool, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionRequest, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> newConnectionCreated) [member function]
cls.add_method('SetAcceptCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionRequest'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'newConnectionCreated')])
## socket.h (module 'network'): bool ns3::Socket::SetAllowBroadcast(bool allowBroadcast) [member function]
cls.add_method('SetAllowBroadcast',
'bool',
[param('bool', 'allowBroadcast')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetCloseCallbacks(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> normalClose, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> errorClose) [member function]
cls.add_method('SetCloseCallbacks',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'normalClose'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'errorClose')])
## socket.h (module 'network'): void ns3::Socket::SetConnectCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionSucceeded, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionFailed) [member function]
cls.add_method('SetConnectCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionSucceeded'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionFailed')])
## socket.h (module 'network'): void ns3::Socket::SetDataSentCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> dataSent) [member function]
cls.add_method('SetDataSentCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'dataSent')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTos(bool ipv4RecvTos) [member function]
cls.add_method('SetIpRecvTos',
'void',
[param('bool', 'ipv4RecvTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTtl(bool ipv4RecvTtl) [member function]
cls.add_method('SetIpRecvTtl',
'void',
[param('bool', 'ipv4RecvTtl')])
## socket.h (module 'network'): void ns3::Socket::SetIpTos(uint8_t ipTos) [member function]
cls.add_method('SetIpTos',
'void',
[param('uint8_t', 'ipTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpTtl(uint8_t ipTtl) [member function]
cls.add_method('SetIpTtl',
'void',
[param('uint8_t', 'ipTtl')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6HopLimit(uint8_t ipHopLimit) [member function]
cls.add_method('SetIpv6HopLimit',
'void',
[param('uint8_t', 'ipHopLimit')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvHopLimit(bool ipv6RecvHopLimit) [member function]
cls.add_method('SetIpv6RecvHopLimit',
'void',
[param('bool', 'ipv6RecvHopLimit')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvTclass(bool ipv6RecvTclass) [member function]
cls.add_method('SetIpv6RecvTclass',
'void',
[param('bool', 'ipv6RecvTclass')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6Tclass(int ipTclass) [member function]
cls.add_method('SetIpv6Tclass',
'void',
[param('int', 'ipTclass')])
## socket.h (module 'network'): void ns3::Socket::SetRecvCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> arg0) [member function]
cls.add_method('SetRecvCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'arg0')])
## socket.h (module 'network'): void ns3::Socket::SetRecvPktInfo(bool flag) [member function]
cls.add_method('SetRecvPktInfo',
'void',
[param('bool', 'flag')])
## socket.h (module 'network'): void ns3::Socket::SetSendCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> sendCb) [member function]
cls.add_method('SetSendCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'sendCb')])
## socket.h (module 'network'): int ns3::Socket::ShutdownRecv() [member function]
cls.add_method('ShutdownRecv',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::ShutdownSend() [member function]
cls.add_method('ShutdownSend',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTos() const [member function]
cls.add_method('IsManualIpTos',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTtl() const [member function]
cls.add_method('IsManualIpTtl',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6HopLimit() const [member function]
cls.add_method('IsManualIpv6HopLimit',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6Tclass() const [member function]
cls.add_method('IsManualIpv6Tclass',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionFailed() [member function]
cls.add_method('NotifyConnectionFailed',
'void',
[],
visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::NotifyConnectionRequest(ns3::Address const & from) [member function]
cls.add_method('NotifyConnectionRequest',
'bool',
[param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionSucceeded() [member function]
cls.add_method('NotifyConnectionSucceeded',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataRecv() [member function]
cls.add_method('NotifyDataRecv',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataSent(uint32_t size) [member function]
cls.add_method('NotifyDataSent',
'void',
[param('uint32_t', 'size')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyErrorClose() [member function]
cls.add_method('NotifyErrorClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNewConnectionCreated(ns3::Ptr<ns3::Socket> socket, ns3::Address const & from) [member function]
cls.add_method('NotifyNewConnectionCreated',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket'), param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNormalClose() [member function]
cls.add_method('NotifyNormalClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifySend(uint32_t spaceAvailable) [member function]
cls.add_method('NotifySend',
'void',
[param('uint32_t', 'spaceAvailable')],
visibility='protected')
return
def register_Ns3SocketIpTosTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag(ns3::SocketIpTosTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpTosTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTosTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTosTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTosTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTosTag::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTosTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
return
def register_Ns3SocketIpTtlTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag(ns3::SocketIpTtlTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpTtlTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTtlTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTtlTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTtlTag::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTtlTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3SocketIpv6HopLimitTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag(ns3::SocketIpv6HopLimitTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpv6HopLimitTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6HopLimitTag::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6HopLimitTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6HopLimitTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6HopLimitTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::SetHopLimit(uint8_t hopLimit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'hopLimit')])
return
def register_Ns3SocketIpv6TclassTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag(ns3::SocketIpv6TclassTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpv6TclassTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6TclassTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6TclassTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6TclassTag::GetTclass() const [member function]
cls.add_method('GetTclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6TclassTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::SetTclass(uint8_t tclass) [member function]
cls.add_method('SetTclass',
'void',
[param('uint8_t', 'tclass')])
return
def register_Ns3SocketSetDontFragmentTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag(ns3::SocketSetDontFragmentTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketSetDontFragmentTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Disable() [member function]
cls.add_method('Disable',
'void',
[])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Enable() [member function]
cls.add_method('Enable',
'void',
[])
## socket.h (module 'network'): ns3::TypeId ns3::SocketSetDontFragmentTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketSetDontFragmentTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketSetDontFragmentTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::SocketSetDontFragmentTag::IsEnabled() const [member function]
cls.add_method('IsEnabled',
'bool',
[],
is_const=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function]
cls.add_method('As',
'ns3::TimeWithUnit',
[param('ns3::Time::Unit const', 'unit')],
is_const=True)
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function]
cls.add_method('GetDays',
'double',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function]
cls.add_method('GetHours',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function]
cls.add_method('GetMinutes',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function]
cls.add_method('GetYears',
'double',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function]
cls.add_method('Max',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function]
cls.add_method('Min',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function]
cls.add_method('StaticInit',
'bool',
[],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3FlowClassifier_methods(root_module, cls):
## flow-classifier.h (module 'flow-monitor'): ns3::FlowClassifier::FlowClassifier() [constructor]
cls.add_constructor([])
## flow-classifier.h (module 'flow-monitor'): void ns3::FlowClassifier::SerializeToXmlStream(std::ostream & os, int indent) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('int', 'indent')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## flow-classifier.h (module 'flow-monitor'): ns3::FlowId ns3::FlowClassifier::GetNewFlowId() [member function]
cls.add_method('GetNewFlowId',
'ns3::FlowId',
[],
visibility='protected')
return
def register_Ns3FlowMonitor_methods(root_module, cls):
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowMonitor(ns3::FlowMonitor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::FlowMonitor const &', 'arg0')])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowMonitor() [constructor]
cls.add_constructor([])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::AddFlowClassifier(ns3::Ptr<ns3::FlowClassifier> classifier) [member function]
cls.add_method('AddFlowClassifier',
'void',
[param('ns3::Ptr< ns3::FlowClassifier >', 'classifier')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::AddProbe(ns3::Ptr<ns3::FlowProbe> probe) [member function]
cls.add_method('AddProbe',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::CheckForLostPackets() [member function]
cls.add_method('CheckForLostPackets',
'void',
[])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::CheckForLostPackets(ns3::Time maxDelay) [member function]
cls.add_method('CheckForLostPackets',
'void',
[param('ns3::Time', 'maxDelay')])
## flow-monitor.h (module 'flow-monitor'): std::vector<ns3::Ptr<ns3::FlowProbe>, std::allocator<ns3::Ptr<ns3::FlowProbe> > > const & ns3::FlowMonitor::GetAllProbes() const [member function]
cls.add_method('GetAllProbes',
'std::vector< ns3::Ptr< ns3::FlowProbe > > const &',
[],
is_const=True)
## flow-monitor.h (module 'flow-monitor'): std::map<unsigned int, ns3::FlowMonitor::FlowStats, std::less<unsigned int>, std::allocator<std::pair<unsigned int const, ns3::FlowMonitor::FlowStats> > > const & ns3::FlowMonitor::GetFlowStats() const [member function]
cls.add_method('GetFlowStats',
'std::map< unsigned int, ns3::FlowMonitor::FlowStats > const &',
[],
is_const=True)
## flow-monitor.h (module 'flow-monitor'): ns3::TypeId ns3::FlowMonitor::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## flow-monitor.h (module 'flow-monitor'): static ns3::TypeId ns3::FlowMonitor::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportDrop(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize, uint32_t reasonCode) [member function]
cls.add_method('ReportDrop',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('ns3::FlowId', 'flowId'), param('ns3::FlowPacketId', 'packetId'), param('uint32_t', 'packetSize'), param('uint32_t', 'reasonCode')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportFirstTx(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize) [member function]
cls.add_method('ReportFirstTx',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('ns3::FlowId', 'flowId'), param('ns3::FlowPacketId', 'packetId'), param('uint32_t', 'packetSize')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportForwarding(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize) [member function]
cls.add_method('ReportForwarding',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('ns3::FlowId', 'flowId'), param('ns3::FlowPacketId', 'packetId'), param('uint32_t', 'packetSize')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportLastRx(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize) [member function]
cls.add_method('ReportLastRx',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('ns3::FlowId', 'flowId'), param('ns3::FlowPacketId', 'packetId'), param('uint32_t', 'packetSize')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::SerializeToXmlFile(std::string fileName, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlFile',
'void',
[param('std::string', 'fileName'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::SerializeToXmlStream(std::ostream & os, int indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('int', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor.h (module 'flow-monitor'): std::string ns3::FlowMonitor::SerializeToXmlString(int indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlString',
'std::string',
[param('int', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::Start(ns3::Time const & time) [member function]
cls.add_method('Start',
'void',
[param('ns3::Time const &', 'time')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::StartRightNow() [member function]
cls.add_method('StartRightNow',
'void',
[])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::Stop(ns3::Time const & time) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'time')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::StopRightNow() [member function]
cls.add_method('StopRightNow',
'void',
[])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3FlowMonitorFlowStats_methods(root_module, cls):
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::FlowStats() [constructor]
cls.add_constructor([])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::FlowStats(ns3::FlowMonitor::FlowStats const & arg0) [copy constructor]
cls.add_constructor([param('ns3::FlowMonitor::FlowStats const &', 'arg0')])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::bytesDropped [variable]
cls.add_instance_attribute('bytesDropped', 'std::vector< unsigned long >', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::delayHistogram [variable]
cls.add_instance_attribute('delayHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::delaySum [variable]
cls.add_instance_attribute('delaySum', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::flowInterruptionsHistogram [variable]
cls.add_instance_attribute('flowInterruptionsHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::jitterHistogram [variable]
cls.add_instance_attribute('jitterHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::jitterSum [variable]
cls.add_instance_attribute('jitterSum', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::lastDelay [variable]
cls.add_instance_attribute('lastDelay', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::lostPackets [variable]
cls.add_instance_attribute('lostPackets', 'uint32_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::packetSizeHistogram [variable]
cls.add_instance_attribute('packetSizeHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::packetsDropped [variable]
cls.add_instance_attribute('packetsDropped', 'std::vector< unsigned int >', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::rxBytes [variable]
cls.add_instance_attribute('rxBytes', 'uint64_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::rxPackets [variable]
cls.add_instance_attribute('rxPackets', 'uint32_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeFirstRxPacket [variable]
cls.add_instance_attribute('timeFirstRxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeFirstTxPacket [variable]
cls.add_instance_attribute('timeFirstTxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeLastRxPacket [variable]
cls.add_instance_attribute('timeLastRxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeLastTxPacket [variable]
cls.add_instance_attribute('timeLastTxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timesForwarded [variable]
cls.add_instance_attribute('timesForwarded', 'uint32_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::txBytes [variable]
cls.add_instance_attribute('txBytes', 'uint64_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::txPackets [variable]
cls.add_instance_attribute('txPackets', 'uint32_t', is_const=False)
return
def register_Ns3FlowProbe_methods(root_module, cls):
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::AddPacketDropStats(ns3::FlowId flowId, uint32_t packetSize, uint32_t reasonCode) [member function]
cls.add_method('AddPacketDropStats',
'void',
[param('ns3::FlowId', 'flowId'), param('uint32_t', 'packetSize'), param('uint32_t', 'reasonCode')])
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::AddPacketStats(ns3::FlowId flowId, uint32_t packetSize, ns3::Time delayFromFirstProbe) [member function]
cls.add_method('AddPacketStats',
'void',
[param('ns3::FlowId', 'flowId'), param('uint32_t', 'packetSize'), param('ns3::Time', 'delayFromFirstProbe')])
## flow-probe.h (module 'flow-monitor'): std::map<unsigned int, ns3::FlowProbe::FlowStats, std::less<unsigned int>, std::allocator<std::pair<unsigned int const, ns3::FlowProbe::FlowStats> > > ns3::FlowProbe::GetStats() const [member function]
cls.add_method('GetStats',
'std::map< unsigned int, ns3::FlowProbe::FlowStats >',
[],
is_const=True)
## flow-probe.h (module 'flow-monitor'): static ns3::TypeId ns3::FlowProbe::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::SerializeToXmlStream(std::ostream & os, int indent, uint32_t index) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('int', 'indent'), param('uint32_t', 'index')],
is_const=True)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowProbe(ns3::Ptr<ns3::FlowMonitor> flowMonitor) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::FlowMonitor >', 'flowMonitor')],
visibility='protected')
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3FlowProbeFlowStats_methods(root_module, cls):
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::FlowStats(ns3::FlowProbe::FlowStats const & arg0) [copy constructor]
cls.add_constructor([param('ns3::FlowProbe::FlowStats const &', 'arg0')])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::FlowStats() [constructor]
cls.add_constructor([])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::bytes [variable]
cls.add_instance_attribute('bytes', 'uint64_t', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::bytesDropped [variable]
cls.add_instance_attribute('bytesDropped', 'std::vector< unsigned long >', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::delayFromFirstProbeSum [variable]
cls.add_instance_attribute('delayFromFirstProbeSum', 'ns3::Time', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::packets [variable]
cls.add_instance_attribute('packets', 'uint32_t', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::packetsDropped [variable]
cls.add_instance_attribute('packetsDropped', 'std::vector< unsigned int >', is_const=False)
return
def register_Ns3Ipv4_methods(root_module, cls):
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4(ns3::Ipv4 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4 const &', 'arg0')])
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4() [constructor]
cls.add_constructor([])
## ipv4.h (module 'internet'): bool ns3::Ipv4::AddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4::GetAddress(uint32_t interface, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForAddress(ns3::Ipv4Address address) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForPrefix(ns3::Ipv4Address address, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'address'), param('ns3::Ipv4Mask', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMetric(uint32_t interface) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMtu(uint32_t interface) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4::GetNetDevice(uint32_t interface) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): static ns3::TypeId ns3::Ipv4::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsForwarding(uint32_t interface) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsUp(uint32_t interface) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetDown(uint32_t interface) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetForwarding(uint32_t interface, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'interface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetMetric(uint32_t interface, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'interface'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetUp(uint32_t interface) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4::SourceAddressSelection(uint32_t interface, ns3::Ipv4Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv4Address',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'dest')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4::IF_ANY [variable]
cls.add_static_attribute('IF_ANY', 'uint32_t const', is_const=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4FlowClassifier_methods(root_module, cls):
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::Ipv4FlowClassifier() [constructor]
cls.add_constructor([])
## ipv4-flow-classifier.h (module 'flow-monitor'): bool ns3::Ipv4FlowClassifier::Classify(ns3::Ipv4Header const & ipHeader, ns3::Ptr<const ns3::Packet> ipPayload, uint32_t * out_flowId, uint32_t * out_packetId) [member function]
cls.add_method('Classify',
'bool',
[param('ns3::Ipv4Header const &', 'ipHeader'), param('ns3::Ptr< ns3::Packet const >', 'ipPayload'), param('uint32_t *', 'out_flowId'), param('uint32_t *', 'out_packetId')])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple ns3::Ipv4FlowClassifier::FindFlow(ns3::FlowId flowId) const [member function]
cls.add_method('FindFlow',
'ns3::Ipv4FlowClassifier::FiveTuple',
[param('ns3::FlowId', 'flowId')],
is_const=True)
## ipv4-flow-classifier.h (module 'flow-monitor'): void ns3::Ipv4FlowClassifier::SerializeToXmlStream(std::ostream & os, int indent) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('int', 'indent')],
is_const=True, is_virtual=True)
return
def register_Ns3Ipv4FlowClassifierFiveTuple_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::FiveTuple() [constructor]
cls.add_constructor([])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::FiveTuple(ns3::Ipv4FlowClassifier::FiveTuple const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4FlowClassifier::FiveTuple const &', 'arg0')])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::destinationAddress [variable]
cls.add_instance_attribute('destinationAddress', 'ns3::Ipv4Address', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::destinationPort [variable]
cls.add_instance_attribute('destinationPort', 'uint16_t', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::protocol [variable]
cls.add_instance_attribute('protocol', 'uint8_t', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::sourceAddress [variable]
cls.add_instance_attribute('sourceAddress', 'ns3::Ipv4Address', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::sourcePort [variable]
cls.add_instance_attribute('sourcePort', 'uint16_t', is_const=False)
return
def register_Ns3Ipv4FlowProbe_methods(root_module, cls):
## ipv4-flow-probe.h (module 'flow-monitor'): ns3::Ipv4FlowProbe::Ipv4FlowProbe(ns3::Ptr<ns3::FlowMonitor> monitor, ns3::Ptr<ns3::Ipv4FlowClassifier> classifier, ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::FlowMonitor >', 'monitor'), param('ns3::Ptr< ns3::Ipv4FlowClassifier >', 'classifier'), param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-flow-probe.h (module 'flow-monitor'): static ns3::TypeId ns3::Ipv4FlowProbe::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-flow-probe.h (module 'flow-monitor'): void ns3::Ipv4FlowProbe::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv4L3Protocol_methods(root_module, cls):
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::Ipv4L3Protocol() [constructor]
cls.add_constructor([])
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::AddAddress(uint32_t i, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'i'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4L3Protocol::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4L3Protocol::GetAddress(uint32_t interfaceIndex, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Interface> ns3::Ipv4L3Protocol::GetInterface(uint32_t i) const [member function]
cls.add_method('GetInterface',
'ns3::Ptr< ns3::Ipv4Interface >',
[param('uint32_t', 'i')],
is_const=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForAddress(ns3::Ipv4Address addr) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'addr')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForPrefix(ns3::Ipv4Address addr, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'addr'), param('ns3::Ipv4Mask', 'mask')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMetric(uint32_t i) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMtu(uint32_t i) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4L3Protocol::GetNetDevice(uint32_t i) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4L3Protocol::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4L3Protocol::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4L3Protocol::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4L3Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsForwarding(uint32_t i) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsUnicast(ns3::Ipv4Address ad) const [member function]
cls.add_method('IsUnicast',
'bool',
[param('ns3::Ipv4Address', 'ad')],
is_const=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsUp(uint32_t i) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Receive(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<const ns3::Packet> p, uint16_t protocol, ns3::Address const & from, ns3::Address const & to, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'p'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'from'), param('ns3::Address const &', 'to'), param('ns3::NetDevice::PacketType', 'packetType')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::RemoveAddress(uint32_t interfaceIndex, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::RemoveAddress(uint32_t interface, ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'address')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4L3Protocol::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDefaultTtl(uint8_t ttl) [member function]
cls.add_method('SetDefaultTtl',
'void',
[param('uint8_t', 'ttl')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDown(uint32_t i) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetForwarding(uint32_t i, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'i'), param('bool', 'val')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetMetric(uint32_t i, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'i'), param('uint16_t', 'metric')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetUp(uint32_t i) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4L3Protocol::SourceAddressSelection(uint32_t interface, ns3::Ipv4Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv4Address',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'dest')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::PROT_NUMBER [variable]
cls.add_static_attribute('PROT_NUMBER', 'uint16_t const', is_const=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
visibility='private', is_virtual=True)
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv4MulticastRoute_methods(root_module, cls):
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute(ns3::Ipv4MulticastRoute const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MulticastRoute const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetGroup() const [member function]
cls.add_method('GetGroup',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetOrigin() const [member function]
cls.add_method('GetOrigin',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): std::map<unsigned int, unsigned int, std::less<unsigned int>, std::allocator<std::pair<unsigned int const, unsigned int> > > ns3::Ipv4MulticastRoute::GetOutputTtlMap() const [member function]
cls.add_method('GetOutputTtlMap',
'std::map< unsigned int, unsigned int >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): uint32_t ns3::Ipv4MulticastRoute::GetParent() const [member function]
cls.add_method('GetParent',
'uint32_t',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetGroup(ns3::Ipv4Address const group) [member function]
cls.add_method('SetGroup',
'void',
[param('ns3::Ipv4Address const', 'group')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOrigin(ns3::Ipv4Address const origin) [member function]
cls.add_method('SetOrigin',
'void',
[param('ns3::Ipv4Address const', 'origin')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOutputTtl(uint32_t oif, uint32_t ttl) [member function]
cls.add_method('SetOutputTtl',
'void',
[param('uint32_t', 'oif'), param('uint32_t', 'ttl')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetParent(uint32_t iif) [member function]
cls.add_method('SetParent',
'void',
[param('uint32_t', 'iif')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_INTERFACES [variable]
cls.add_static_attribute('MAX_INTERFACES', 'uint32_t const', is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_TTL [variable]
cls.add_static_attribute('MAX_TTL', 'uint32_t const', is_const=True)
return
def register_Ns3Ipv4Route_methods(root_module, cls):
cls.add_output_stream_operator()
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route(ns3::Ipv4Route const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Route const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetGateway() const [member function]
cls.add_method('GetGateway',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4Route::GetOutputDevice() const [member function]
cls.add_method('GetOutputDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetDestination(ns3::Ipv4Address dest) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'dest')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetGateway(ns3::Ipv4Address gw) [member function]
cls.add_method('SetGateway',
'void',
[param('ns3::Ipv4Address', 'gw')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetOutputDevice(ns3::Ptr<ns3::NetDevice> outputDevice) [member function]
cls.add_method('SetOutputDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'outputDevice')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetSource(ns3::Ipv4Address src) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'src')])
return
def register_Ns3Ipv4RoutingProtocol_methods(root_module, cls):
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol() [constructor]
cls.add_constructor([])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol(ns3::Ipv4RoutingProtocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4RoutingProtocol const &', 'arg0')])
## ipv4-routing-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4RoutingProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): bool ns3::Ipv4RoutingProtocol::RouteInput(ns3::Ptr<const ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void,ns3::Ptr<ns3::Ipv4Route>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::Socket::SocketErrno,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Route> ns3::Ipv4RoutingProtocol::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Ipv6_methods(root_module, cls):
## ipv6.h (module 'internet'): ns3::Ipv6::Ipv6(ns3::Ipv6 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6 const &', 'arg0')])
## ipv6.h (module 'internet'): ns3::Ipv6::Ipv6() [constructor]
cls.add_constructor([])
## ipv6.h (module 'internet'): bool ns3::Ipv6::AddAddress(uint32_t interface, ns3::Ipv6InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv6InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): uint32_t ns3::Ipv6::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6::GetAddress(uint32_t interface, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): int32_t ns3::Ipv6::GetInterfaceForAddress(ns3::Ipv6Address address) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv6Address', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): int32_t ns3::Ipv6::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): int32_t ns3::Ipv6::GetInterfaceForPrefix(ns3::Ipv6Address address, ns3::Ipv6Prefix mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6Prefix', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint16_t ns3::Ipv6::GetMetric(uint32_t interface) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint16_t ns3::Ipv6::GetMtu(uint32_t interface) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint32_t ns3::Ipv6::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint32_t ns3::Ipv6::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv6::GetNetDevice(uint32_t interface) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::Ipv6RoutingProtocol> ns3::Ipv6::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv6RoutingProtocol >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): static ns3::TypeId ns3::Ipv6::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::IsForwarding(uint32_t interface) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::IsUp(uint32_t interface) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::RegisterExtensions() [member function]
cls.add_method('RegisterExtensions',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::RegisterOptions() [member function]
cls.add_method('RegisterOptions',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::RemoveAddress(uint32_t interface, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::RemoveAddress(uint32_t interface, ns3::Ipv6Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv6Address source, ns3::Ipv6Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv6Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv6Address', 'source'), param('ns3::Ipv6Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv6Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetDown(uint32_t interface) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetForwarding(uint32_t interface, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'interface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetMetric(uint32_t interface, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'interface'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetPmtu(ns3::Ipv6Address dst, uint32_t pmtu) [member function]
cls.add_method('SetPmtu',
'void',
[param('ns3::Ipv6Address', 'dst'), param('uint32_t', 'pmtu')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetRoutingProtocol(ns3::Ptr<ns3::Ipv6RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv6RoutingProtocol >', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetUp(uint32_t interface) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6::SourceAddressSelection(uint32_t interface, ns3::Ipv6Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv6Address',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'dest')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ipv6::IF_ANY [variable]
cls.add_static_attribute('IF_ANY', 'uint32_t const', is_const=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::GetMtuDiscover() const [member function]
cls.add_method('GetMtuDiscover',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetMtuDiscover(bool mtuDiscover) [member function]
cls.add_method('SetMtuDiscover',
'void',
[param('bool', 'mtuDiscover')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6FlowClassifier_methods(root_module, cls):
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::Ipv6FlowClassifier() [constructor]
cls.add_constructor([])
## ipv6-flow-classifier.h (module 'flow-monitor'): bool ns3::Ipv6FlowClassifier::Classify(ns3::Ipv6Header const & ipHeader, ns3::Ptr<const ns3::Packet> ipPayload, uint32_t * out_flowId, uint32_t * out_packetId) [member function]
cls.add_method('Classify',
'bool',
[param('ns3::Ipv6Header const &', 'ipHeader'), param('ns3::Ptr< ns3::Packet const >', 'ipPayload'), param('uint32_t *', 'out_flowId'), param('uint32_t *', 'out_packetId')])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple ns3::Ipv6FlowClassifier::FindFlow(ns3::FlowId flowId) const [member function]
cls.add_method('FindFlow',
'ns3::Ipv6FlowClassifier::FiveTuple',
[param('ns3::FlowId', 'flowId')],
is_const=True)
## ipv6-flow-classifier.h (module 'flow-monitor'): void ns3::Ipv6FlowClassifier::SerializeToXmlStream(std::ostream & os, int indent) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('int', 'indent')],
is_const=True, is_virtual=True)
return
def register_Ns3Ipv6FlowClassifierFiveTuple_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::FiveTuple() [constructor]
cls.add_constructor([])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::FiveTuple(ns3::Ipv6FlowClassifier::FiveTuple const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6FlowClassifier::FiveTuple const &', 'arg0')])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::destinationAddress [variable]
cls.add_instance_attribute('destinationAddress', 'ns3::Ipv6Address', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::destinationPort [variable]
cls.add_instance_attribute('destinationPort', 'uint16_t', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::protocol [variable]
cls.add_instance_attribute('protocol', 'uint8_t', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::sourceAddress [variable]
cls.add_instance_attribute('sourceAddress', 'ns3::Ipv6Address', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::sourcePort [variable]
cls.add_instance_attribute('sourcePort', 'uint16_t', is_const=False)
return
def register_Ns3Ipv6FlowProbe_methods(root_module, cls):
## ipv6-flow-probe.h (module 'flow-monitor'): ns3::Ipv6FlowProbe::Ipv6FlowProbe(ns3::Ptr<ns3::FlowMonitor> monitor, ns3::Ptr<ns3::Ipv6FlowClassifier> classifier, ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::FlowMonitor >', 'monitor'), param('ns3::Ptr< ns3::Ipv6FlowClassifier >', 'classifier'), param('ns3::Ptr< ns3::Node >', 'node')])
## ipv6-flow-probe.h (module 'flow-monitor'): static ns3::TypeId ns3::Ipv6FlowProbe::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-flow-probe.h (module 'flow-monitor'): void ns3::Ipv6FlowProbe::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv6L3Protocol_methods(root_module, cls):
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol::PROT_NUMBER [variable]
cls.add_static_attribute('PROT_NUMBER', 'uint16_t const', is_const=True)
## ipv6-l3-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv6L3Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol::Ipv6L3Protocol() [constructor]
cls.add_constructor([])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6L3Protocol::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6L3Protocol::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv6L3Protocol::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetDefaultTtl(uint8_t ttl) [member function]
cls.add_method('SetDefaultTtl',
'void',
[param('uint8_t', 'ttl')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetDefaultTclass(uint8_t tclass) [member function]
cls.add_method('SetDefaultTclass',
'void',
[param('uint8_t', 'tclass')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Receive(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<const ns3::Packet> p, uint16_t protocol, ns3::Address const & from, ns3::Address const & to, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'p'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'from'), param('ns3::Address const &', 'to'), param('ns3::NetDevice::PacketType', 'packetType')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv6Address source, ns3::Ipv6Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv6Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv6Address', 'source'), param('ns3::Ipv6Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv6Route >', 'route')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetRoutingProtocol(ns3::Ptr<ns3::Ipv6RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv6RoutingProtocol >', 'routingProtocol')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv6RoutingProtocol> ns3::Ipv6L3Protocol::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv6RoutingProtocol >',
[],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv6L3Protocol::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv6Interface> ns3::Ipv6L3Protocol::GetInterface(uint32_t i) const [member function]
cls.add_method('GetInterface',
'ns3::Ptr< ns3::Ipv6Interface >',
[param('uint32_t', 'i')],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv6L3Protocol::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): int32_t ns3::Ipv6L3Protocol::GetInterfaceForAddress(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): int32_t ns3::Ipv6L3Protocol::GetInterfaceForPrefix(ns3::Ipv6Address addr, ns3::Ipv6Prefix mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv6Address', 'addr'), param('ns3::Ipv6Prefix', 'mask')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): int32_t ns3::Ipv6L3Protocol::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::AddAddress(uint32_t i, ns3::Ipv6InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'i'), param('ns3::Ipv6InterfaceAddress', 'address')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6L3Protocol::GetAddress(uint32_t interfaceIndex, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv6L3Protocol::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::RemoveAddress(uint32_t interfaceIndex, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::RemoveAddress(uint32_t interfaceIndex, ns3::Ipv6Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('ns3::Ipv6Address', 'address')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetMetric(uint32_t i, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'i'), param('uint16_t', 'metric')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv6L3Protocol::GetMetric(uint32_t i) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv6L3Protocol::GetMtu(uint32_t i) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetPmtu(ns3::Ipv6Address dst, uint32_t pmtu) [member function]
cls.add_method('SetPmtu',
'void',
[param('ns3::Ipv6Address', 'dst'), param('uint32_t', 'pmtu')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsUp(uint32_t i) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetUp(uint32_t i) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetDown(uint32_t i) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsForwarding(uint32_t i) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetForwarding(uint32_t i, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'i'), param('bool', 'val')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6L3Protocol::SourceAddressSelection(uint32_t interface, ns3::Ipv6Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv6Address',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'dest')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv6L3Protocol::GetNetDevice(uint32_t i) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Icmpv6L4Protocol> ns3::Ipv6L3Protocol::GetIcmpv6() const [member function]
cls.add_method('GetIcmpv6',
'ns3::Ptr< ns3::Icmpv6L4Protocol >',
[],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::AddAutoconfiguredAddress(uint32_t interface, ns3::Ipv6Address network, ns3::Ipv6Prefix mask, uint8_t flags, uint32_t validTime, uint32_t preferredTime, ns3::Ipv6Address defaultRouter=ns3::Ipv6Address::GetZero( )) [member function]
cls.add_method('AddAutoconfiguredAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'network'), param('ns3::Ipv6Prefix', 'mask'), param('uint8_t', 'flags'), param('uint32_t', 'validTime'), param('uint32_t', 'preferredTime'), param('ns3::Ipv6Address', 'defaultRouter', default_value='ns3::Ipv6Address::GetZero( )')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RemoveAutoconfiguredAddress(uint32_t interface, ns3::Ipv6Address network, ns3::Ipv6Prefix mask, ns3::Ipv6Address defaultRouter) [member function]
cls.add_method('RemoveAutoconfiguredAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'network'), param('ns3::Ipv6Prefix', 'mask'), param('ns3::Ipv6Address', 'defaultRouter')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RegisterExtensions() [member function]
cls.add_method('RegisterExtensions',
'void',
[],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RegisterOptions() [member function]
cls.add_method('RegisterOptions',
'void',
[],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::ReportDrop(ns3::Ipv6Header ipHeader, ns3::Ptr<ns3::Packet> p, ns3::Ipv6L3Protocol::DropReason dropReason) [member function]
cls.add_method('ReportDrop',
'void',
[param('ns3::Ipv6Header', 'ipHeader'), param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv6L3Protocol::DropReason', 'dropReason')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::AddMulticastAddress(ns3::Ipv6Address address) [member function]
cls.add_method('AddMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::AddMulticastAddress(ns3::Ipv6Address address, uint32_t interface) [member function]
cls.add_method('AddMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'interface')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RemoveMulticastAddress(ns3::Ipv6Address address) [member function]
cls.add_method('RemoveMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RemoveMulticastAddress(ns3::Ipv6Address address, uint32_t interface) [member function]
cls.add_method('RemoveMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'interface')])
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsRegisteredMulticastAddress(ns3::Ipv6Address address) const [member function]
cls.add_method('IsRegisteredMulticastAddress',
'bool',
[param('ns3::Ipv6Address', 'address')],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsRegisteredMulticastAddress(ns3::Ipv6Address address, uint32_t interface) const [member function]
cls.add_method('IsRegisteredMulticastAddress',
'bool',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'interface')],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetMtuDiscover(bool mtuDiscover) [member function]
cls.add_method('SetMtuDiscover',
'void',
[param('bool', 'mtuDiscover')],
visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::GetMtuDiscover() const [member function]
cls.add_method('GetMtuDiscover',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetSendIcmpv6Redirect(bool sendIcmpv6Redirect) [member function]
cls.add_method('SetSendIcmpv6Redirect',
'void',
[param('bool', 'sendIcmpv6Redirect')],
visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::GetSendIcmpv6Redirect() const [member function]
cls.add_method('GetSendIcmpv6Redirect',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv6PmtuCache_methods(root_module, cls):
## ipv6-pmtu-cache.h (module 'internet'): ns3::Ipv6PmtuCache::Ipv6PmtuCache(ns3::Ipv6PmtuCache const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PmtuCache const &', 'arg0')])
## ipv6-pmtu-cache.h (module 'internet'): ns3::Ipv6PmtuCache::Ipv6PmtuCache() [constructor]
cls.add_constructor([])
## ipv6-pmtu-cache.h (module 'internet'): void ns3::Ipv6PmtuCache::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## ipv6-pmtu-cache.h (module 'internet'): uint32_t ns3::Ipv6PmtuCache::GetPmtu(ns3::Ipv6Address dst) [member function]
cls.add_method('GetPmtu',
'uint32_t',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-pmtu-cache.h (module 'internet'): ns3::Time ns3::Ipv6PmtuCache::GetPmtuValidityTime() const [member function]
cls.add_method('GetPmtuValidityTime',
'ns3::Time',
[],
is_const=True)
## ipv6-pmtu-cache.h (module 'internet'): static ns3::TypeId ns3::Ipv6PmtuCache::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-pmtu-cache.h (module 'internet'): void ns3::Ipv6PmtuCache::SetPmtu(ns3::Ipv6Address dst, uint32_t pmtu) [member function]
cls.add_method('SetPmtu',
'void',
[param('ns3::Ipv6Address', 'dst'), param('uint32_t', 'pmtu')])
## ipv6-pmtu-cache.h (module 'internet'): bool ns3::Ipv6PmtuCache::SetPmtuValidityTime(ns3::Time validity) [member function]
cls.add_method('SetPmtuValidityTime',
'bool',
[param('ns3::Time', 'validity')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,const ns3::Address&,ns3::NetDevice::PacketType,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NetDeviceQueue_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDeviceQueue::NetDeviceQueue(ns3::NetDeviceQueue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceQueue const &', 'arg0')])
## net-device.h (module 'network'): ns3::NetDeviceQueue::NetDeviceQueue() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): bool ns3::NetDeviceQueue::HasWakeCallbackSet() const [member function]
cls.add_method('HasWakeCallbackSet',
'bool',
[],
is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDeviceQueue::IsStopped() const [member function]
cls.add_method('IsStopped',
'bool',
[],
is_const=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::SetWakeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetWakeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::Start() [member function]
cls.add_method('Start',
'void',
[],
is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::Wake() [member function]
cls.add_method('Wake',
'void',
[],
is_virtual=True)
return
def register_Ns3NetDeviceQueueInterface_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDeviceQueueInterface::NetDeviceQueueInterface(ns3::NetDeviceQueueInterface const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceQueueInterface const &', 'arg0')])
## net-device.h (module 'network'): ns3::NetDeviceQueueInterface::NetDeviceQueueInterface() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): uint8_t ns3::NetDeviceQueueInterface::GetSelectedQueue(ns3::Ptr<ns3::QueueItem> item) const [member function]
cls.add_method('GetSelectedQueue',
'uint8_t',
[param('ns3::Ptr< ns3::QueueItem >', 'item')],
is_const=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::NetDeviceQueue> ns3::NetDeviceQueueInterface::GetTxQueue(uint8_t i) const [member function]
cls.add_method('GetTxQueue',
'ns3::Ptr< ns3::NetDeviceQueue >',
[param('uint8_t', 'i')],
is_const=True)
## net-device.h (module 'network'): uint8_t ns3::NetDeviceQueueInterface::GetTxQueuesN() const [member function]
cls.add_method('GetTxQueuesN',
'uint8_t',
[],
is_const=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDeviceQueueInterface::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueueInterface::SetSelectQueueCallback(ns3::Callback<unsigned char, ns3::Ptr<ns3::QueueItem>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetSelectQueueCallback',
'void',
[param('ns3::Callback< unsigned char, ns3::Ptr< ns3::QueueItem >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')])
## net-device.h (module 'network'): void ns3::NetDeviceQueueInterface::SetTxQueuesN(uint8_t numTxQueues) [member function]
cls.add_method('SetTxQueuesN',
'void',
[param('uint8_t', 'numTxQueues')])
## net-device.h (module 'network'): void ns3::NetDeviceQueueInterface::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): ns3::Time ns3::Node::GetLocalTime() const [member function]
cls.add_method('GetLocalTime',
'ns3::Time',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3OutputStreamWrapper_methods(root_module, cls):
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(ns3::OutputStreamWrapper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OutputStreamWrapper const &', 'arg0')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::string filename, std::_Ios_Openmode filemode) [constructor]
cls.add_constructor([param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::ostream * os) [constructor]
cls.add_constructor([param('std::ostream *', 'os')])
## output-stream-wrapper.h (module 'network'): std::ostream * ns3::OutputStreamWrapper::GetStream() [member function]
cls.add_method('GetStream',
'std::ostream *',
[])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function]
cls.add_method('ReplacePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> nixVector) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'nixVector')])
## packet.h (module 'network'): std::string ns3::Packet::ToString() const [member function]
cls.add_method('ToString',
'std::string',
[],
is_const=True)
return
def register_Ns3QueueItem_methods(root_module, cls):
cls.add_output_stream_operator()
## net-device.h (module 'network'): ns3::QueueItem::QueueItem(ns3::Ptr<ns3::Packet> p) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet >', 'p')])
## net-device.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::QueueItem::GetPacket() const [member function]
cls.add_method('GetPacket',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## net-device.h (module 'network'): uint32_t ns3::QueueItem::GetPacketSize() const [member function]
cls.add_method('GetPacketSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::QueueItem::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
register_functions_ns3_TracedValueCallback(module.get_submodule('TracedValueCallback'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def register_functions_ns3_TracedValueCallback(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| wasiqmukhtar/tcp-eval.wasiq | src/flow-monitor/bindings/modulegen__gcc_LP64.py | Python | gpl-2.0 | 441,417 |
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: Isaac Saito
from python_qt_binding.QtGui import QWidget
class AbstractStatusWidget(QWidget):
"""
An abstract widget that consists of status display part and timeline part,
although this class doen't define any concrete design for those display
parts. Instead this only defines interface methods.
"""
def __init__(self):
super(AbstractStatusWidget, self).__init__()
def new_diagnostic(self, msg, is_forced=False):
"""
Needs overridden in derived classes.
:param msg: This can be a function that takes either
{ DiagnosticArray, DiagnosticsStatus } as an argument.
:param is_forced: If True then update occurs even when paused.
"""
pass
def pause(self, msg):
pass
def unpause(self, msg):
pass
def get_color_for_value(self, queue_diagnostic, color_index):
pass
def on_pause(self, paused, diagnostic_arr):
pass
| aslab/rct | higgs/branches/ros-groovy/higgs_gazebo_simulation/rqt_robot_plugins/rqt_robot_monitor/src/rqt_robot_monitor/abst_status_widget.py | Python | gpl-3.0 | 2,584 |
s = f'{42:foo\{bar}baz\}' | smmribeiro/intellij-community | python/testData/psi/FStringSingleSlashesBeforeBracesInFormatPart.py | Python | apache-2.0 | 25 |
import unittest
import client
import utilities
import datetime
class TestSeriesFunctions(unittest.TestCase):
def setUp(self):
self.rs = client.RSSeriesClient()
def test_get_identifier(self):
identifier = self.rs.get_identifier('A1')
self.assertEqual(identifier, 'A1')
def test_get_title(self):
test_title = (
'Correspondence files, annual single number series '
'[Main correspondence files series of the agency]'
)
title = self.rs.get_title('A1')
self.assertEqual(title, test_title)
def test_get_accumulation_dates(self):
test_dates = {
'date_str': '01 Jan 1903 - 31 Dec 1938',
'start_date': {
'date': datetime.datetime(1903, 1, 1, 0, 0),
'day': True,
'month': True
},
'end_date': {
'date': datetime.datetime(1938, 12, 31, 0, 0),
'day': True,
'month': True
}
}
accumulation_dates = self.rs.get_accumulation_dates('A1')
self.assertEqual(accumulation_dates, test_dates)
def test_get_contents_dates(self):
test_dates = {
'date_str': '01 Jan 1890 - 31 Dec 1969',
'start_date': {
'date': datetime.datetime(1890, 1, 1, 0, 0),
'day': True,
'month': True
},
'end_date': {
'date': datetime.datetime(1969, 12, 31, 0, 0),
'day': True,
'month': True
}
}
contents_dates = self.rs.get_contents_dates('A1')
self.assertEqual(contents_dates, test_dates)
def test_get_number_described(self):
results = {
'described_note': 'All items from this series are entered on RecordSearch.',
'described_number': 64455
}
items_described = self.rs.get_number_described('A1')
self.assertEqual(items_described, results)
class TestItemFunctions(unittest.TestCase):
def setUp(self):
self.rs = client.RSItemClient()
def test_get_title(self):
test_title = (
'WRAGGE Clement Lionel Egerton : SERN 647 : '
'POB Cheadle England : POE Enoggera QLD : '
'NOK (Father) WRAGGE Clement Lindley'
)
title = self.rs.get_title('3445411')
self.assertEqual(title, test_title)
def test_get_digitised_pages(self):
pages = self.rs.get_digitised_pages('3445411')
self.assertEqual(pages, 47)
class TestClosedItemDetails(unittest.TestCase):
def setUp(self):
self.rs = client.RSItemClient()
def test_details(self):
test_details = {
'access_decision': {
'date_str': u'16 Jul 2012',
'end_date': None,
'start_date': {
'date': datetime.datetime(2012, 7, 16, 0, 0),
'day': True,
'month': True
}
},
'access_reason': [{'note': '', 'reason': u'Withheld pending adv'}],
'access_status': u'Closed',
'contents_dates': {
'date_str': u'1918 - 1925',
'end_date': {
'date': datetime.datetime(1925, 1, 1, 0, 0),
'day': False,
'month': False
},
'start_date': {
'date': datetime.datetime(1918, 1, 1, 0, 0),
'day': False,
'month': False
}
},
'control_symbol': u'G1924/3039',
'digitised_pages': 0,
'digitised_status': False,
'identifier': u'55545',
'location': u'Canberra',
'series': u'A106',
'title': u'Increments to Permanent Professional Officers.'
}
details = self.rs.get_summary('55545')
self.assertEqual(details, test_details)
class TestAgencyFunctions(unittest.TestCase):
def setUp(self):
self.rs = client.RSAgencyClient()
def test_get_identifier(self):
identifier = self.rs.get_identifier('CA 12')
self.assertEqual(identifier, 'CA 12')
def test_get_title(self):
test_title = (
'Prime Minister\'s Department'
)
title = self.rs.get_title('CA 12')
self.assertEqual(title, test_title)
def test_get_dates(self):
test_dates = {
'date_str': '01 Jul 1911 - 12 Mar 1971',
'start_date': {
'date': datetime.datetime(1911, 7, 1, 0, 0),
'day': True,
'month': True
},
'end_date': {
'date': datetime.datetime(1971, 3, 12, 0, 0),
'day': True,
'month': True
}
}
dates = self.rs.get_dates('CA 12')
self.assertEqual(dates, test_dates)
class TestAgencyDetails(unittest.TestCase):
def setUp(self):
self.rs = client.RSAgencyClient()
def test_summary(self):
test_details = {
'agency_id': 'CA 100',
'agency_status': u'Regional or State Office',
'associated_people': None,
'controlled_agencies': None,
'dates': {'date_str': u'01 Oct 1926 - 31 Dec 1936',
'end_date': {'date': datetime.datetime(1936, 12, 31, 0, 0),
'day': True,
'month': True},
'start_date': {'date': datetime.datetime(1926, 10, 1, 0, 0),
'day': True,
'month': True}},
'functions': [{'date_str': u'01 Oct 1926 - 31 Dec 1936',
'end_date': {'date': datetime.datetime(1936, 12, 31, 0, 0),
'day': True,
'month': True},
'identifier': u'HORTICULTURE',
'start_date': {'date': datetime.datetime(1926, 10, 1, 0, 0),
'day': True,
'month': True},
'title': u'HORTICULTURE'}],
'location': u'Victoria',
'previous_agencies': None,
'subsequent_agencies': None,
'superior_agencies': [{'date_str': u'01 Oct 1926 - 31 Jan 1928',
'end_date': {'date': datetime.datetime(1928, 1, 31, 0, 0),
'day': True,
'month': True},
'identifier': u'CA 20',
'start_date': {'date': datetime.datetime(1926, 10, 1, 0, 0),
'day': True,
'month': True},
'title': u'Department of Markets and Migration, Central Administration'},
{'date_str': u'01 Jan 1928 - 31 Dec 1928',
'end_date': {'date': datetime.datetime(1928, 12, 31, 0, 0),
'day': True,
'month': True},
'identifier': u'CA 21',
'start_date': {'date': datetime.datetime(1928, 1, 1, 0, 0),
'day': True,
'month': True},
'title': u'Department of Markets [I], Central Office'},
{'date_str': u'01 Dec 1928 - 30 Apr 1930',
'end_date': {'date': datetime.datetime(1930, 4, 30, 0, 0),
'day': True,
'month': True},
'identifier': u'CA 23',
'start_date': {'date': datetime.datetime(1928, 12, 1, 0, 0),
'day': True,
'month': True},
'title': u'Department of Markets and Transport, Central Office'},
{'date_str': u'01 Apr 1930 - 30 Apr 1932',
'end_date': {'date': datetime.datetime(1932, 4, 30, 0, 0),
'day': True,
'month': True},
'identifier': u'CA 25',
'start_date': {'date': datetime.datetime(1930, 4, 1, 0, 0),
'day': True,
'month': True},
'title': u'Department of Markets [II], Central Office'},
{'date_str': u'01 Apr 1932 - 31 Dec 1936',
'end_date': {'date': datetime.datetime(1936, 12, 31, 0, 0),
'day': True,
'month': True},
'identifier': u'CA 28',
'start_date': {'date': datetime.datetime(1932, 4, 1, 0, 0),
'day': True,
'month': True},
'title': u'Department of Commerce, Central Office'}],
'title': u'State Advisory Fruit Board, Victoria'}
details = self.rs.get_summary('CA 100')
self.assertEqual(details, test_details)
class TestAgencySearch(unittest.TestCase):
def setUp(self):
self.rs = client.RSAgencySearchClient()
def test_totals(self):
test_total = '198'
self.rs.search_agencies(function="MIGRATION")
total = self.rs.total_results
self.assertEqual(total, test_total)
class TestSeriesDetails(unittest.TestCase):
def setUp(self):
self.rs = client.RSSeriesClient()
def test_details(self):
test_details = {
'access_status': {'CLOSED': 0, 'NYE': 0, 'OPEN': 27, 'OWE': 0},
'accumulation_dates': {
'date_str': u'20 Jan 1916 - 31 Jul 1916',
'end_date': {'date': datetime.datetime(1916, 7, 31, 0, 0),
'day': True,
'month': True},
'start_date': {'date': datetime.datetime(1916, 1, 20, 0, 0),
'day': True,
'month': True}},
'arrangement': u'Single number system imposed by National Archives of Australia',
'contents_dates': {
'date_str': u'27 Aug 1914 - 22 Apr 1918',
'end_date': {'date': datetime.datetime(1918, 4, 22, 0, 0),
'day': True,
'month': True},
'start_date': {'date': datetime.datetime(1914, 8, 27, 0, 0),
'day': True,
'month': True}},
'control_symbols': u'[1] - [27]',
'controlling_agencies': [{
'date_str': u'12 Mar 1971 -',
'end_date': {'date': None,
'day': False,
'month': False},
'identifier': u'CA 1401',
'start_date': {'date': datetime.datetime(1971, 3, 12, 0, 0),
'day': True,
'month': True},
'title': u'Department of the Prime Minister and Cabinet'}],
'controlling_series': None,
'identifier': 'CP359/2',
'items_described': {'described_note': u'All items from this series are entered on RecordSearch.', 'described_number': 27},
'items_digitised': 21,
'locations': [{'location': u'ACT', 'quantity': 0.36}],
'physical_format': u'PAPER FILES AND DOCUMENTS',
'previous_series': None,
'recording_agencies': [{'date_str': u'20 Jan 1916 - 31 Jul 1916',
'end_date': {'date': datetime.datetime(1916, 7, 31, 0, 0),
'day': True,
'month': True},
'identifier': u'CA 12',
'start_date': {'date': datetime.datetime(1916, 1, 20, 0, 0),
'day': True,
'month': True},
'title': u"Prime Minister's Department - Prime Minister's Office"},
{'date_str': u'20 Jan 1916 - 31 Jul 1916',
'end_date': {'date': datetime.datetime(1916, 7, 31, 0, 0),
'day': True,
'month': True},
'identifier': u'CP 290',
'start_date': {'date': datetime.datetime(1916, 1, 20, 0, 0),
'day': True,
'month': True},
'title': u'The Rt Hon William Morris HUGHES PC, CH, KC'}],
'related_series': None,
'subsequent_series': None,
'title': u'Subject files maintained by the Prime Minister (William Morris Hughes) during his visit to London, 1916'
}
details = self.rs.get_summary('CP359/2')
self.assertEqual(details, test_details)
class TestSeriesSearch(unittest.TestCase):
def setUp(self):
self.rs = client.RSSeriesSearchClient()
def test_totals(self):
test_total = '429'
self.rs.search_series(agency_recording="CA 12", page=1)
total = self.rs.total_results
self.assertEqual(total, test_total)
class TestUtilityFunctions(unittest.TestCase):
def test_parse_date(self):
cases = [
('2 June 1884', {'date': datetime.datetime(1884, 6, 2), 'day': True, 'month': True}),
('03 Jul 1921', {'date': datetime.datetime(1921, 7, 3), 'day': True, 'month': True}),
('13 Jul. 1921', {'date': datetime.datetime(1921, 7, 13), 'day': True, 'month': True}),
('Dec 1778', {'date': datetime.datetime(1778, 12, 1), 'day': False, 'month': True}),
('1962', {'date': datetime.datetime(1962, 1, 1), 'day': False, 'month': False}),
]
for case in cases:
self.assertEqual(utilities.parse_date(case[0]), case[1])
def test_process_date_string(self):
cases = [
('2 June 1884 - Sep 1884',
{
'date_str': '2 June 1884 - Sep 1884',
'start_date': {'date': datetime.datetime(1884, 6, 2), 'day': True, 'month': True},
'end_date': {'date': datetime.datetime(1884, 9, 1), 'day': False, 'month': True},
}),
]
for case in cases:
self.assertEqual(utilities.process_date_string(case[0]), case[1])
def test_convert_date_to_iso(self):
cases = [
({'date': datetime.datetime(1884, 6, 2), 'day': True, 'month': True}, '1884-06-02'),
({'date': datetime.datetime(1778, 12, 1), 'day': False, 'month': True}, '1778-12'),
({'date': datetime.datetime(1962, 1, 1), 'day': False, 'month': False}, '1962'),
]
for case in cases:
self.assertEqual(utilities.convert_date_to_iso(case[0]), case[1])
if __name__ == '__main__':
unittest.main()
| wragge/recordsearch_tools | tests.py | Python | cc0-1.0 | 16,330 |
'''
Calculation of [Total # of Contigs], [Total Length], [Total # of trimmed Contigs], [Trimmed Length], [GC content],
[Min Contig Size [bp]], [Median Contig Size [bp]], [Mean Contig Size [bp]], [Max Contig Size [bp]],
[N50[bp] [# of Contigs],
[Total # of Contigs]
This code creates an output.txt file with all of the statistics
Usage: python assembly_stats.py <assembly.fasta> <minimum contig size>
Modified code from Nicolas Schmelling
'''
from __future__ import division
from Bio import SeqIO
from statistics import median
import sys
import os.path
def median(trimmedLength):
n = len(trimmedLength)
if n < 1:
return None
if n % 2 == 1:
return trimmedLength[n//2]
else:
return sum(trimmedLength[n//2-1:n//2+1])/2.0
def assembly_stats(contigsMultifasta, minContigSize):
#contigsLength = []
trimmedLength = []
sum_trimmed = 0
thres = minContigSize - 1
GC_count = 0
GC_cont = 0
seqs = open(contigsMultifasta, 'r')
# Create lists for Total Length and Trimmed Length and Calculates GC content
for seq_record in SeqIO.parse(open(contigsMultifasta), 'fasta'):
#contigsLength.append(len(seq_record.seq))
# Min Contig Length Threshold
if len(seq_record.seq) > thres:
sum_trimmed += len(seq_record.seq)
trimmedLength.append(len(seq_record.seq))
GC_count+=seq_record.seq.count("C")
GC_count+=seq_record.seq.count("G")
GC_cont = float((GC_count/sum_trimmed)*100)
# Sorting the Trimmed Contigs from Large to Small
trimmedLength.sort()
trimmedLength.reverse()
# Calculating Mean Contig Size
meancontig = int(sum_trimmed/len(trimmedLength))
# Calculating Median Contig Size
mediancontig=median(trimmedLength)
# Checking N50 [bp] [# of Contigs]
totalSum = 0
contigcount=0
N50 = 0
N50con = 0
n1m = 0
s1m = 0.0
n2m = 0
s2m = 0.0
n4m = 0
s4m = 0.0
n10m = 0
s10m = 0.0
#contig len and contigtrimmedlen if cutoff >1
for contiglen in trimmedLength:
#print (contig)
totalSum += contiglen
contigcount += 1
if totalSum >= sum_trimmed/2.0 and N50 == 0:
N50con = contigcount
N50 = contiglen
if totalSum >= 1000000 and n1m == 0:
n1m = contigcount
s1m = contiglen
if totalSum >= 2000000 and n2m == 0:
n2m = contigcount
s2m = contiglen
if totalSum >= 4000000 and n4m == 0:
n4m = contigcount
s4m = contiglen
if totalSum >= 10000000 and n10m == 0:
n10m = contigcount
s10m = contiglen
#print ("File\t# Contigs\tTotal Size(Kbp)\tMin Size\tMax Size(Kbp)\tAverage Size\tMedian Size\tN50\t# N50 contigs\tSize at 1Mbp (Kbp)\tNumber @ 1Mbp\tSize at 2Mbp (Kbp)\tNumber @ 2Mbp\tSize at 4Mbp (Kbp)\tNumber @ 4Mbp\tSize at 10Mbp (Kbp)\tNumber @ 10Mbp\tGC content [%]")
print("sample_name\tnum_contigs\ttotal_size_Kbp\tmin_size_bp\tmax_size_Kbp\taverage_size\tmedian_size\tN50\tmum_N50\tsize_1Mbp_Kbp\tnum_1Mbp\tsize_2Mbp_Kbp\tnum_2Mbp\tsize_4Mbp_Kbp\tnum_4Mbp\tsize_10Mbp_Kbp\tmum_10Mbp\tGC_content%\t")
print ("%s\t%d" % (os.path.basename(contigsMultifasta.replace(".fasta", "")),len(trimmedLength)) + '\t' \
+ "%d" % (sum_trimmed) + '\t' \
+ "%d\t%d\t%.2f\t%.2f" % (min(trimmedLength),max(trimmedLength),meancontig,mediancontig) + '\t' \
+ "%d\t%d\t%.2f\t%d\t%.2f\t%d" % (N50,N50con,s1m/1000,n1m,s2m/1000,n2m) + '\t' \
+ "%.2f\t%d\t%.2f\t%d" % (s4m/1000,n4m,s10m/1000,n10m) + '\t' \
+ "%.2f" % (GC_cont) )
if __name__ == "__main__":
contigsMultifasta = sys.argv[1]
minContigSize = int(sys.argv[2])
assembly_stats(contigsMultifasta, minContigSize)
| marbl/MetaCompass | bin/assembly_stats.py | Python | artistic-2.0 | 3,884 |
#!/bin/python
import os, fnmatch
from subprocess import call
class RunPandoc():
def runPandoc(self, fileName):
pandoc="C:\\Program Files (x86)\\Pandoc\\bin\\pandoc.exe"
paramData={"fileName": fileName, "ext1":"md", "ext2":"html"}
markdownName="%(fileName)s.%(ext1)s" % paramData
htmlName="%(fileName)s.%(ext2)s" % paramData
call([pandoc, "-f", "html", "-t", "markdown", "-o", markdownName, htmlName])
def locate(self, pattern, excludes, root=os.curdir):
'''Locate all files matching supplied filename pattern in and below
supplied root directory.'''
for path, dirs, files in os.walk(os.path.abspath(root)):
dirs[:] = [d for d in dirs if d not in excludes]
for filename in fnmatch.filter(files, pattern):
yield os.path.join(path, filename)
def findFile(self, pattern, excludes):
for fileName in self.locate(pattern, excludes):
name = os.path.basename(fileName).split(".")[0]
self.runPandoc(name)
# find all the files in the current directory that have
# an extension of html and don't have _vti_cnf as part
# of their path
r = RunPandoc()
r.findFile("*.html", ['_vti_cnf'])
| donlee888/JsObjects | Python/PandocConvert/PandocConvert.py | Python | mit | 1,118 |
#
# Copyright 2014-2016, 2021 Lars Pastewka (U. Freiburg)
# 2015-2016 Adrien Gola (KIT)
# 2014 James Kermode (Warwick U.)
#
# matscipy - Materials science with Python at the atomic-scale
# https://github.com/libAtoms/matscipy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from .metis import save_metis
from .tbl import savetbl, loadtbl | libAtoms/matscipy | matscipy/io/__init__.py | Python | lgpl-2.1 | 948 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# Name: yttools.py
# Version: 0.2
# Purpose: 工具和自定义控件
# Author: Dormouse.Young
# Created: 2016-05-03
# LastModify 2016-08-31
# Copyright: Dormouse.Young
# Licence: GPL V3.0
# ---------------------------------------------------------------------------
from PyQt5.QtCore import (Qt, QSortFilterProxyModel, QModelIndex, QAbstractTableModel)
from PyQt5.QtGui import (
QFont,
)
import datetime
import hashlib
import logging
import json
import os
import platform
import re
import subprocess
from functools import reduce
from urllib.parse import urlparse
# import httplib2
# from bs4 import BeautifulSoup
# from jinja2 import Template
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)s %(levelname)s %(message)s')
"""
class YtTools():
def __init__(self):
self.logger = logging.getLogger(__name__)
timeout = 20 # second
self.http = httplib2.Http('.cache', timeout=timeout)
def download(self, url):
self.logger.info("start download:%s", url)
try:
response, content = self.http.request(url)
except Exception as e:
self.logger.error("download fail:%s", url)
self.logger.error(e)
return None
if response.status == 200:
return content
else:
return None
"""
class SqlModel(QAbstractTableModel):
def __init__(self, **kwargs):
parent = kwargs.get('parent')
super(SqlModel, self).__init__(parent)
self.log = logging.getLogger(__name__)
self.rows = []
self.rowPerPage = 10 # 默认每次读取 10 条记录
self.totalRowCount = 0
self.query = None
def setHeader(self, datas):
self.headers = datas
def setRowPerPage(self, count=10):
""" 设置每次读取的记录数
参数:
count; 每次读取的记录数,默认值为 10 ,类型为 int 。
"""
self.rowPerPage = count
def setQuery(self, query=None):
"""
设置模型的 query ,如果 query 为 None ,那么相当于初始化模型中的数据
:param query:
:return:
"""
self.beginResetModel()
if query:
self.query = query
self.rows = []
self.totalRowCount = self.getTotalRowCount()
self.endResetModel()
def getTotalRowCount(self):
"""
得到总的行数,如果字段数据不多则不用重载,
如果字段数据太多,则影响速度,应当重载以加速
"""
return self.query.count()
def rowCount(self, QModelIndex_parent=None, *args, **kwargs):
return len(self.rows)
def headerData(self, p_int, Qt_Orientation, int_role=None):
if int_role == Qt.DisplayRole:
if Qt_Orientation == Qt.Horizontal:
return self.headers[p_int]
# if Qt_Orientation == Qt.Vertical:
# return p_int + 1
return super(SqlModel, self).headerData(p_int, Qt_Orientation, int_role)
def columnCount(self, QModelIndex_parent=None, *args, **kwargs):
return len(self.headers)
def data(self, index, role=Qt.DisplayRole):
if not index.isValid():
return None
if index.row() >= self.totalRowCount or index.row() < 0:
return None
if role == Qt.DisplayRole:
source = self.colSource[index.column()]
data = getattr(self.rows[index.row()], source)
return data
return None
def canFetchMore(self, QModelIndex):
return len(self.rows) < self.totalRowCount
def fetchMore(self, index):
remainder = self.totalRowCount - len(self.rows)
itemsToFetch = min(self.rowPerPage, remainder)
self.beginInsertRows(QModelIndex(), len(self.rows), len(self.rows) + itemsToFetch - 1)
self.rows = self.fetchRows(len(self.rows) + itemsToFetch)
self.endInsertRows()
def fetchRows(self, offset):
rows = self.query[:offset]
return rows
class SortFilterProxyModel(QSortFilterProxyModel):
def __init__(self, **kwargs):
parent = kwargs.get('parent')
super(SortFilterProxyModel, self).__init__(parent)
# Work around the fact that QSortFilterProxyModel always filters datetime
# values in QtCore.Qt.ISODate format, but the tree views display using
# QtCore.Qt.DefaultLocaleShortDate format.
def filterAcceptsRow(self, sourceRow, sourceParent):
# self.log.debug('self.rowCount:%s', self.rowCount())
# self.log.debug('source rowCount:%s', self.sourceModel().rowCount())
# self.log.debug(sourceRow)
dateCol = 999
# Do we filter for the date column?
if self.filterKeyColumn() == dateCol:
# Fetch datetime value.
index = self.sourceModel().index(sourceRow, dateCol, sourceParent)
data = self.sourceModel().data(index)
# Return, if regExp match in displayed format.
return (self.filterRegExp().indexIn(data.toString(Qt.DefaultLocaleShortDate)) >= 0)
# Not our business.
return super(SortFilterProxyModel, self).filterAcceptsRow(sourceRow, sourceParent)
class DictClass():
""" covert dict to class """
def __init__(self, **mydict):
for k, v in mydict.items():
if isinstance(v, dict):
mydict[k] = DictClass(**v)
self.__dict__.update(mydict)
class Option():
def __init__(self, option_file='conf.json'):
self.log = logging.getLogger(__name__)
self.option_file = option_file
self.read()
def read(self):
""" read data from file """
try:
with open(self.option_file, 'r', encoding="utf-8") as f:
self.to_class(json.load(f))
except Exception as e:
self.log.warning('load config file error, use default config')
self.log.warning(e)
self.init_default()
def to_class(self, mydict):
dc = DictClass(**mydict)
self.__dict__.update(dc.__dict__)
def to_dict(self, myclass):
mydict = {}
mydict.update(myclass.__dict__)
for k, v in mydict.items():
if k == 'option_file':
pass
else:
if isinstance(v, DictClass):
mydict[k] = self.to_dict(v)
return mydict
def get_default(self):
# globe
globe = {
'width': 1000,
'height': 690,
'x': 0,
'y': 0
}
default_dict = {'globe': globe}
return default_dict
def save(self):
with open(self.option_file, 'w', encoding="utf-8") as f:
mydict = self.to_dict(self)
for key in ['option_file', 'log']:
del mydict[key]
json.dump(mydict, f, ensure_ascii=False)
def init_default(self):
self.to_class(self.get_default())
self.save()
| dormouse/read | yttools.py | Python | lgpl-3.0 | 7,188 |
# -*- coding: utf-8 -*-
import logging
import os
import datetime
import configparser
from cosycar.constants import Constants
from cosycar.read_email import ReadEmail
from cosycar.create_events import CreateEvent
log = logging.getLogger(__name__)
class Events():
def __init__(self, config_file):
self._config_file = config_file
config = configparser.ConfigParser()
config.read(self._config_file)
self._overtime = config.getint('CAR_SETTINGS', 'overtime')
self._check_email = config.getboolean('EMAIL', 'check_email')
def fetch_next_event(self):
if self._check_email:
self._check_email_event()
minutes_to_file_event = self._minutes_to_file_event()
# Note! A negative value represents an event that has passed.
minutes_to_calendar_event = None
minutes_to_next_event = self._pick_time_to_use(
minutes_to_calendar_event,
minutes_to_file_event,
None)
if minutes_to_next_event is not None:
log.debug("Next event in: {}".format(minutes_to_next_event))
return minutes_to_next_event
def _pick_time_to_use(self, event_1, event_2, event_3):
if self._at_least_one_is_not_none(event_1, event_2, event_3):
if event_1 is None:
event_1 = 999
if event_2 is None:
event_2 = 999
if event_3 is None:
event_3 = 999
time_to_use = min(event_1, event_2, event_3)
elif event_1 is not None:
time_to_use = event_1
elif event_2 is not None:
time_to_use = event_2
elif event_3 is not None:
time_to_use = event_3
else:
time_to_use = None
return time_to_use
def _at_least_one_is_not_none(self, event_1, event_2, event_3):
not_all_are_none = True
not_all_are_none = not_all_are_none and (event_1 is not None)
not_all_are_none = not_all_are_none and (event_2 is not None)
not_all_are_none = not_all_are_none and (event_3 is not None)
return not_all_are_none
def _minutes_to_file_event(self):
event = self._file_event()
if event:
now = datetime.datetime.now()
delta = event - now
minutes_to_event = round(delta.seconds / 60)
minutes_to_event += delta.days * 24 * 60
if self._passed_event(minutes_to_event):
if self._running_on_overtime(minutes_to_event):
minutes_to_file_event = minutes_to_event
else:
minutes_to_file_event = None
log.info("Overtime passed, deleting leave_at file")
delete_event = CreateEvent()
delete_event.delete()
else:
minutes_to_file_event = minutes_to_event
else:
minutes_to_file_event = None
return minutes_to_file_event
def _check_email_event(self):
email = ReadEmail(self._config_file)
email.fetch()
def _passed_event(self, event_time):
return event_time < 0
def _running_on_overtime(self, event_time):
return abs(event_time) < self._overtime
def _file_event(self):
event = None
file_name = Constants.time_to_leave_file
if os.path.exists(file_name):
try:
with open(file_name, 'r') as file:
for line in file:
time_pieces = line.split(",")
event = datetime.datetime(
year=int(time_pieces[0]),
month=int(time_pieces[1]),
day=int(time_pieces[2]),
hour=int(time_pieces[3]),
minute=int(time_pieces[4]))
except:
text = "Event file {} exists, but no time to leave info "
text += "could be extracted"
log.warning(text.format(file_name))
return event
| eragnms/cosycar | cosycar/events.py | Python | mit | 4,065 |
#!/usr/bin/env python
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Author Nikolay Nikolov/niko.b.nikolov@gmail.com
import subprocess
import os
from ..core import InstallFailed
from .pip import PIP_INSTALLER
from ..installers import PackageManagerInstaller
from .source import SOURCE_INSTALLER
from ..shell_utils import read_stdout
SLACKWARE_OS_NAME = 'slackware'
SBOTOOLS_INSTALLER = 'sbotools'
SLACKPKG_INSTALLER = 'slackpkg'
def register_installers(context):
context.set_installer(SBOTOOLS_INSTALLER, SbotoolsInstaller())
context.set_installer(SLACKPKG_INSTALLER, SlackpkgInstaller())
def register_platforms(context):
context.add_os_installer_key(SLACKWARE_OS_NAME, SBOTOOLS_INSTALLER)
context.add_os_installer_key(SLACKWARE_OS_NAME, PIP_INSTALLER)
context.add_os_installer_key(SLACKWARE_OS_NAME, SOURCE_INSTALLER)
context.add_os_installer_key(SLACKWARE_OS_NAME, SLACKPKG_INSTALLER)
context.set_default_os_installer_key(SLACKWARE_OS_NAME, lambda self: SBOTOOLS_INSTALLER)
def sbotools_available():
if not os.path.exists("/usr/sbin/sboinstall"):
return False
return True
def sbotools_detect_single(p):
pkg_list = read_stdout(['ls', '/var/log/packages'])
p = subprocess.Popen(['grep', '-i', '^' + p], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate(pkg_list)
return not p.returncode
def sbotools_detect(packages):
return [p for p in packages if sbotools_detect_single(p)]
class SbotoolsInstaller(PackageManagerInstaller):
def __init__(self):
super(SbotoolsInstaller, self).__init__(sbotools_detect)
def get_install_command(self, resolved, interactive=True, reinstall=False, quiet=False):
if not sbotools_available():
raise InstallFailed((SBOTOOLS_INSTALLER, "sbotools is not installed"))
packages = self.get_packages_to_install(resolved, reinstall=reinstall)
if not packages:
return []
cmd = ['sboinstall']
if not interactive:
cmd.append('-r')
return [self.elevate_priv(cmd + [p]) for p in packages]
def slackpkg_available():
if not os.path.exists("/usr/sbin/slackpkg"):
return False
return True
def slackpkg_detect_single(p):
return not subprocess.call(['slackpkg', 'search', p], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def slackpkg_detect(packages):
return [p for p in packages if slackpkg_detect_single(p)]
class SlackpkgInstaller(PackageManagerInstaller):
def __init__(self):
super(SlackpkgInstaller, self).__init__(slackpkg_detect)
def get_install_command(self, resolved, interactive=True, reinstall=False, quiet=False):
# slackpkg does not provide non-interactive mode
packages = self.get_packages_to_install(resolved, reinstall=reinstall)
if not packages:
return []
else:
return [self.elevate_priv(['slackpkg', 'install', p]) for p in packages]
| spaghetti-/rosdep | src/rosdep2/platforms/slackware.py | Python | bsd-3-clause | 4,542 |
from __future__ import absolute_import
from sentry.api.serializers import serialize, Serializer, register
from sentry.app import env
from sentry.auth.superuser import is_active_superuser
from sentry.constants import SentryAppStatus
from sentry.models import IntegrationFeature, SentryApp
from sentry.models.sentryapp import MASKED_VALUE
from sentry.utils.compat import map
@register(SentryApp)
class SentryAppSerializer(Serializer):
def serialize(self, obj, attrs, user, access):
from sentry.mediators.service_hooks.creator import consolidate_events
data = {
"name": obj.name,
"slug": obj.slug,
"author": obj.author,
"scopes": obj.get_scopes(),
"events": consolidate_events(obj.events),
"status": obj.get_status_display(),
"schema": obj.schema,
"uuid": obj.uuid,
"webhookUrl": obj.webhook_url,
"redirectUrl": obj.redirect_url,
"isAlertable": obj.is_alertable,
"verifyInstall": obj.verify_install,
"overview": obj.overview,
"allowedOrigins": obj.application.get_allowed_origins(),
}
data["featureData"] = []
if obj.status != SentryAppStatus.INTERNAL:
features = IntegrationFeature.objects.filter(sentry_app_id=obj.id)
data["featureData"] = map(lambda x: serialize(x, user), features)
if obj.status == SentryAppStatus.PUBLISHED and obj.date_published:
data.update({"datePublished": obj.date_published})
if (env.request and is_active_superuser(env.request)) or (
hasattr(user, "get_orgs") and obj.owner in user.get_orgs()
):
client_secret = (
obj.application.client_secret if obj.show_auth_info(access) else MASKED_VALUE
)
data.update(
{
"clientId": obj.application.client_id,
"clientSecret": client_secret,
"owner": {"id": obj.owner.id, "slug": obj.owner.slug},
}
)
return data
| beeftornado/sentry | src/sentry/api/serializers/models/sentry_app.py | Python | bsd-3-clause | 2,133 |
"""
Gitlab API: https://docs.gitlab.com/ee/api/merge_request_approvals.html
"""
import copy
import pytest
import responses
import gitlab
approval_rule_id = 1
approval_rule_name = "security"
approvals_required = 3
user_ids = [5, 50]
group_ids = [5]
new_approval_rule_name = "new approval rule"
new_approval_rule_user_ids = user_ids
new_approval_rule_approvals_required = 2
updated_approval_rule_user_ids = [5]
updated_approval_rule_approvals_required = 1
@pytest.fixture
def resp_mr_approval_rules():
mr_ars_content = [
{
"id": approval_rule_id,
"name": approval_rule_name,
"rule_type": "regular",
"eligible_approvers": [
{
"id": user_ids[0],
"name": "John Doe",
"username": "jdoe",
"state": "active",
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "http://localhost/jdoe",
},
{
"id": user_ids[1],
"name": "Group Member 1",
"username": "group_member_1",
"state": "active",
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "http://localhost/group_member_1",
},
],
"approvals_required": approvals_required,
"source_rule": None,
"users": [
{
"id": 5,
"name": "John Doe",
"username": "jdoe",
"state": "active",
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "http://localhost/jdoe",
}
],
"groups": [
{
"id": 5,
"name": "group1",
"path": "group1",
"description": "",
"visibility": "public",
"lfs_enabled": False,
"avatar_url": None,
"web_url": "http://localhost/groups/group1",
"request_access_enabled": False,
"full_name": "group1",
"full_path": "group1",
"parent_id": None,
"ldap_cn": None,
"ldap_access": None,
}
],
"contains_hidden_groups": False,
"overridden": False,
}
]
approval_state_rules = copy.deepcopy(mr_ars_content)
approval_state_rules[0]["approved"] = False
approval_state_rules[0]["approved_by"] = []
mr_approval_state_content = {
"approval_rules_overwritten": False,
"rules": approval_state_rules,
}
with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/merge_requests/1/approval_rules",
json=mr_ars_content,
content_type="application/json",
status=200,
)
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/merge_requests/1/approval_state",
json=mr_approval_state_content,
content_type="application/json",
status=200,
)
new_mr_ars_content = dict(mr_ars_content[0])
new_mr_ars_content["name"] = new_approval_rule_name
new_mr_ars_content["approvals_required"] = new_approval_rule_approvals_required
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/projects/1/merge_requests/1/approval_rules",
json=new_mr_ars_content,
content_type="application/json",
status=200,
)
updated_mr_ars_content = copy.deepcopy(mr_ars_content[0])
updated_mr_ars_content["eligible_approvers"] = [
mr_ars_content[0]["eligible_approvers"][0]
]
updated_mr_ars_content[
"approvals_required"
] = updated_approval_rule_approvals_required
rsps.add(
method=responses.PUT,
url="http://localhost/api/v4/projects/1/merge_requests/1/approval_rules/1",
json=updated_mr_ars_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_delete_mr_approval_rule(no_content):
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.DELETE,
url="http://localhost/api/v4/projects/1/merge_requests/1/approval_rules/1",
json=no_content,
content_type="application/json",
status=204,
)
yield rsps
def test_project_approval_manager_update_uses_post(project):
"""Ensure the
gitlab.v4.objects.merge_request_approvals.ProjectApprovalManager object has
_update_uses_post set to True"""
approvals = project.approvals
assert isinstance(
approvals, gitlab.v4.objects.merge_request_approvals.ProjectApprovalManager
)
assert approvals._update_uses_post is True
def test_list_merge_request_approval_rules(project, resp_mr_approval_rules):
approval_rules = project.mergerequests.get(1, lazy=True).approval_rules.list()
assert len(approval_rules) == 1
assert approval_rules[0].name == approval_rule_name
assert approval_rules[0].id == approval_rule_id
def test_delete_merge_request_approval_rule(project, resp_delete_mr_approval_rule):
merge_request = project.mergerequests.get(1, lazy=True)
merge_request.approval_rules.delete(approval_rule_id)
def test_update_merge_request_approvals_set_approvers(project, resp_mr_approval_rules):
approvals = project.mergerequests.get(1, lazy=True).approvals
assert isinstance(
approvals,
gitlab.v4.objects.merge_request_approvals.ProjectMergeRequestApprovalManager,
)
assert approvals._update_uses_post is True
response = approvals.set_approvers(
updated_approval_rule_approvals_required,
approver_ids=updated_approval_rule_user_ids,
approver_group_ids=group_ids,
approval_rule_name=approval_rule_name,
)
assert response.approvals_required == updated_approval_rule_approvals_required
assert len(response.eligible_approvers) == len(updated_approval_rule_user_ids)
assert response.eligible_approvers[0]["id"] == updated_approval_rule_user_ids[0]
assert response.name == approval_rule_name
def test_create_merge_request_approvals_set_approvers(project, resp_mr_approval_rules):
approvals = project.mergerequests.get(1, lazy=True).approvals
assert isinstance(
approvals,
gitlab.v4.objects.merge_request_approvals.ProjectMergeRequestApprovalManager,
)
assert approvals._update_uses_post is True
response = approvals.set_approvers(
new_approval_rule_approvals_required,
approver_ids=new_approval_rule_user_ids,
approver_group_ids=group_ids,
approval_rule_name=new_approval_rule_name,
)
assert response.approvals_required == new_approval_rule_approvals_required
assert len(response.eligible_approvers) == len(new_approval_rule_user_ids)
assert response.eligible_approvers[0]["id"] == new_approval_rule_user_ids[0]
assert response.name == new_approval_rule_name
def test_create_merge_request_approval_rule(project, resp_mr_approval_rules):
approval_rules = project.mergerequests.get(1, lazy=True).approval_rules
data = {
"name": new_approval_rule_name,
"approvals_required": new_approval_rule_approvals_required,
"rule_type": "regular",
"user_ids": new_approval_rule_user_ids,
"group_ids": group_ids,
}
response = approval_rules.create(data)
assert response.approvals_required == new_approval_rule_approvals_required
assert len(response.eligible_approvers) == len(new_approval_rule_user_ids)
assert response.eligible_approvers[0]["id"] == new_approval_rule_user_ids[0]
assert response.name == new_approval_rule_name
def test_update_merge_request_approval_rule(project, resp_mr_approval_rules):
approval_rules = project.mergerequests.get(1, lazy=True).approval_rules
ar_1 = approval_rules.list()[0]
ar_1.user_ids = updated_approval_rule_user_ids
ar_1.approvals_required = updated_approval_rule_approvals_required
ar_1.save()
assert ar_1.approvals_required == updated_approval_rule_approvals_required
assert len(ar_1.eligible_approvers) == len(updated_approval_rule_user_ids)
assert ar_1.eligible_approvers[0]["id"] == updated_approval_rule_user_ids[0]
def test_get_merge_request_approval_state(project, resp_mr_approval_rules):
merge_request = project.mergerequests.get(1, lazy=True)
approval_state = merge_request.approval_state.get()
assert isinstance(
approval_state,
gitlab.v4.objects.merge_request_approvals.ProjectMergeRequestApprovalState,
)
assert not approval_state.approval_rules_overwritten
assert len(approval_state.rules) == 1
assert approval_state.rules[0]["name"] == approval_rule_name
assert approval_state.rules[0]["id"] == approval_rule_id
assert not approval_state.rules[0]["approved"]
assert approval_state.rules[0]["approved_by"] == []
| python-gitlab/python-gitlab | tests/unit/objects/test_project_merge_request_approvals.py | Python | lgpl-3.0 | 9,503 |
###############################################################################
# Copyright 2006 to the present, Orbitz Worldwide, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
noDefault = object()
class EntityContext(object):
"""Abstract base class for representing context about an Entity with
read-only metadata transparently mixed in as "special keys".
Subclasses must override entityAttr and specialKeys attributes and
for each specialKey a get_<key> method must be implemented.
"""
entityAttr = None
specialKeys = []
def __init__(self, entity):
#Verify that the subclass conforms to the API
assert self.entityAttr is not None
for key in self.specialKeys:
assert hasattr(self, 'get_' + key)
setattr(self, self.entityAttr, entity)
self.data = {}
def __getitem__(self, key):
if key in self.specialKeys:
accessor = getattr(self, 'get_' + key)
return accessor()
if key in self.data:
return self.data[key]
else:
entity = getattr(self, self.entityAttr)
raise KeyError("\"%s\" is not in the %s context" % (key, entity))
def __setitem__(self, key, value):
if key in self.specialKeys:
raise KeyError("Cannot override special key \"%s\"" % key)
self.data[key] = value
def __delitem__(self, key):
if key in self.specialKeys:
raise KeyError("Cannot delete special key \"%s\"" % key)
del self.data[key]
def __contains__(self, key):
return key in self.data or key in self.specialKeys
def __iter__(self):
for key in self.specialKeys:
yield key
for key in self.data:
yield key
def get(self, key, default=noDefault):
try:
return self[key]
except KeyError:
if default == noDefault:
raise
else:
return default
def pop(self, key, default=noDefault):
try:
return self.data.pop(key)
except KeyError:
if default == noDefault:
raise
else:
return default
def keys(self):
return list(self)
def values(self):
return [ self[key] for key in self ]
def items(self):
return [ (key, self[key]) for key in self ]
def update(self, otherDict):
return self.data.update(otherDict)
def clear(self):
return self.data.clear()
def copy(self):
return dict( self.items() )
def __repr__(self):
return repr( self.copy() )
| OrbitzWorldwide/droned | romeo/lib/romeo/context.py | Python | apache-2.0 | 3,267 |
#!/usr/bin/env python
"""
Gather all information about the software
"""
NAME = "Vitalus"
URL = "https://github.com/sciunto/Vitalus"
LICENSE = "GPLv3+"
EMAIL = "fboulogne@sciunto.org"
SHORT_DESCRIPTION = "Rsync wrapper for backups"
AUHTORS = ["Francois Boulogne <fboulogne@sciunto.org>"]
| sciunto/Vitalus | Vitalus/info.py | Python | gpl-3.0 | 292 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-04-06 18:35
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tournament', '0182_auto_20180728_2344'),
]
operations = [
migrations.CreateModel(
name='PlayerSetting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField(auto_now_add=True)),
('date_modified', models.DateTimeField(auto_now=True)),
('dark_mode', models.BooleanField(default=False)),
('player', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='tournament.Player')),
],
options={
'abstract': False,
},
),
]
| cyanfish/heltour | heltour/tournament/migrations/0183_playersetting.py | Python | mit | 968 |
# Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
This modules contains a ARM disassembler based on the Capstone
disassembly framework.
"""
from __future__ import absolute_import
import logging
from capstone import CS_ARCH_ARM
from capstone import CS_MODE_ARM
from capstone import CS_MODE_THUMB
from capstone import Cs
from capstone.arm import ARM_CC_AL
from capstone.arm import ARM_CC_EQ
from capstone.arm import ARM_CC_GE
from capstone.arm import ARM_CC_GT
from capstone.arm import ARM_CC_HI
from capstone.arm import ARM_CC_HS
from capstone.arm import ARM_CC_INVALID
from capstone.arm import ARM_CC_LE
from capstone.arm import ARM_CC_LO
from capstone.arm import ARM_CC_LS
from capstone.arm import ARM_CC_LT
from capstone.arm import ARM_CC_MI
from capstone.arm import ARM_CC_NE
from capstone.arm import ARM_CC_PL
from capstone.arm import ARM_CC_VC
from capstone.arm import ARM_CC_VS
from capstone.arm import ARM_OP_IMM
from capstone.arm import ARM_OP_MEM
from capstone.arm import ARM_OP_REG
from capstone.arm import ARM_SFT_ASR
from capstone.arm import ARM_SFT_ASR_REG
from capstone.arm import ARM_SFT_LSL
from capstone.arm import ARM_SFT_LSL_REG
from capstone.arm import ARM_SFT_LSR
from capstone.arm import ARM_SFT_LSR_REG
from capstone.arm import ARM_SFT_ROR
from capstone.arm import ARM_SFT_ROR_REG
from capstone.arm import ARM_SFT_RRX
from capstone.arm import ARM_SFT_RRX_REG
from barf.arch import ARCH_ARM_MODE_ARM
from barf.arch import ARCH_ARM_MODE_THUMB
from barf.arch.arm import ARM_COND_CODE_AL
from barf.arch.arm import ARM_COND_CODE_EQ
from barf.arch.arm import ARM_COND_CODE_GE
from barf.arch.arm import ARM_COND_CODE_GT
from barf.arch.arm import ARM_COND_CODE_HI
from barf.arch.arm import ARM_COND_CODE_HS
from barf.arch.arm import ARM_COND_CODE_LE
from barf.arch.arm import ARM_COND_CODE_LO
from barf.arch.arm import ARM_COND_CODE_LS
from barf.arch.arm import ARM_COND_CODE_LT
from barf.arch.arm import ARM_COND_CODE_MI
from barf.arch.arm import ARM_COND_CODE_NE
from barf.arch.arm import ARM_COND_CODE_PL
from barf.arch.arm import ARM_COND_CODE_VC
from barf.arch.arm import ARM_COND_CODE_VS
from barf.arch.arm import ARM_MEMORY_INDEX_OFFSET
from barf.arch.arm import ArmArchitectureInformation
from barf.arch.arm import ArmImmediateOperand
from barf.arch.arm import ArmInstruction
from barf.arch.arm import ArmMemoryOperand
from barf.arch.arm import ArmRegisterListOperand
from barf.arch.arm import ArmRegisterOperand
from barf.arch.arm import ArmShiftedRegisterOperand
from barf.arch.arm import arm_alias_reg_map
from barf.arch.arm import cc_inverse_mapper
from barf.arch.arm import ldm_stm_am_mapper
# from barf.arch.arm.parser import ArmParser
from barf.arch.disassembler import Disassembler
from barf.arch.disassembler import DisassemblerError
from barf.arch.disassembler import InvalidDisassemblerData
cc_capstone_barf_mapper = {
ARM_CC_EQ: ARM_COND_CODE_EQ,
ARM_CC_NE: ARM_COND_CODE_NE,
ARM_CC_MI: ARM_COND_CODE_MI,
ARM_CC_PL: ARM_COND_CODE_PL,
ARM_CC_VS: ARM_COND_CODE_VS,
ARM_CC_VC: ARM_COND_CODE_VC,
ARM_CC_HI: ARM_COND_CODE_HI,
ARM_CC_LS: ARM_COND_CODE_LS,
ARM_CC_GE: ARM_COND_CODE_GE,
ARM_CC_LT: ARM_COND_CODE_LT,
ARM_CC_GT: ARM_COND_CODE_GT,
ARM_CC_LE: ARM_COND_CODE_LE,
ARM_CC_AL: ARM_COND_CODE_AL,
ARM_CC_HS: ARM_COND_CODE_HS,
ARM_CC_LO: ARM_COND_CODE_LO,
}
logger = logging.getLogger(__name__)
class CapstoneOperandNotSupported(Exception):
pass
# class ArmDisassembler(Disassembler):
# """ARM Disassembler.
# """
# def __init__(self, architecture_mode=ARCH_ARM_MODE_THUMB):
# super(ArmDisassembler, self).__init__()
# arch_map = {
# ARCH_ARM_MODE_ARM : CS_MODE_ARM,
# ARCH_ARM_MODE_THUMB : CS_MODE_THUMB,
# }
# self._parser = ArmParser(architecture_mode)
# self._disassembler = Cs(CS_ARCH_ARM, arch_map[architecture_mode])
# def disassemble(self, data, address):
# """Disassemble the data into an instruction.
# """
# asm, size = self._cs_disassemble_one(data, address)
# instr = self._parser.parse(asm) if asm else None
# if instr:
# instr.address = address
# instr.size = size
# instr.bytes = data[0:size]
# return instr
# def disassemble_all(self, data, address):
# """Disassemble the data into multiple instructions.
# """
# raise NotImplementedError()
# def _cs_disassemble_one(self, data, address):
# """Disassemble the data into an instruction in string form.
# """
# asm, size = "", 0
# disasm = list(self._disassembler.disasm_lite(data, address))
# if len(disasm) > 0:
# address, size, mnemonic, op_str = disasm[0]
# asm = str(mnemonic + " " + op_str).strip()
# else:
# # FIXME: Hack to bypass immediate constants embedded in the
# # text section that do not conform to any valid instruction.
# asm = "mov r0, r0" # Preferred ARM no-operation code
# size = 4
# return asm, size
class ArmDisassembler(Disassembler):
"""ARM Disassembler.
"""
def __init__(self, architecture_mode=ARCH_ARM_MODE_THUMB):
super(ArmDisassembler, self).__init__()
self._arch_mode = architecture_mode
self._arch_info = ArmArchitectureInformation(architecture_mode)
self._available_disassemblers = {}
self.__setup_available_disassemblers()
# TODO: define default disassembler externally
self._disassembler = self._available_disassemblers[architecture_mode]
def disassemble(self, data, address, architecture_mode=None):
"""Disassemble the data into an instruction.
"""
# TODO: Improve this code!
if architecture_mode is None:
if self._arch_mode is None:
architecture_mode = ARCH_ARM_MODE_THUMB
else:
architecture_mode = self._arch_mode
self._disassembler = self._available_disassemblers[architecture_mode]
disasm = self._cs_disassemble_one(data, address)
instr = self._cs_translate_insn(disasm)
if instr:
instr.address = address
instr.size = disasm.size
instr.bytes = data[0:disasm.size]
else:
raise DisassemblerError()
return instr
def disassemble_all(self, data, address):
"""Disassemble the data into multiple instructions.
"""
raise NotImplementedError()
def _cs_disassemble_one(self, data, address):
"""Disassemble the data into an instruction in string form.
"""
disasm = list(self._disassembler.disasm(bytes(data), address))
# TODO: Improve this check.
if len(disasm) > 0:
return disasm[0]
else:
cs_arm = Cs(CS_ARCH_ARM, CS_MODE_ARM)
cs_arm.detail = True
disasm = list(cs_arm.disasm(bytes(data), address))
if len(disasm) > 0:
return disasm[0]
else:
raise InvalidDisassemblerData("CAPSTONE: Unknown instruction (Addr: {:s}).".format(hex(address)))
def __setup_available_disassemblers(self):
arch_map = {
ARCH_ARM_MODE_ARM: CS_MODE_ARM,
ARCH_ARM_MODE_THUMB: CS_MODE_THUMB,
}
self._available_disassemblers = {
ARCH_ARM_MODE_ARM: Cs(CS_ARCH_ARM, arch_map[ARCH_ARM_MODE_ARM]),
ARCH_ARM_MODE_THUMB: Cs(CS_ARCH_ARM, arch_map[ARCH_ARM_MODE_THUMB]),
}
self._available_disassemblers[ARCH_ARM_MODE_ARM].detail = True
self._available_disassemblers[ARCH_ARM_MODE_THUMB].detail = True
# Casptone to BARF translation
# ======================================================================== #
def __cs_reg_idx_to_arm_op_reg(self, cs_reg_idx, cs_insn):
name = str(cs_insn.reg_name(cs_reg_idx))
if name in arm_alias_reg_map:
name = arm_alias_reg_map[name]
if name in self._arch_info.registers_size:
size = self._arch_info.registers_size[name]
else:
size = self._arch_info.architecture_size
return ArmRegisterOperand(name, size)
def __cs_shift_to_arm_op(self, cs_op, cs_insn, arm_base):
if cs_op.shift.type == 0:
raise Exception("Invalid shift type.")
cs_shift_mapper = {
ARM_SFT_ASR: "asr",
ARM_SFT_LSL: "lsl",
ARM_SFT_LSR: "lsr",
ARM_SFT_ROR: "ror",
ARM_SFT_RRX: "rrx",
ARM_SFT_ASR_REG: "asr",
ARM_SFT_LSL_REG: "lsl",
ARM_SFT_LSR_REG: "lsr",
ARM_SFT_ROR_REG: "ror",
ARM_SFT_RRX_REG: "rrx",
}
# The base register (arm_base) is not included in the shift
# struct in Capstone, so it's provided separately.
sh_type = cs_shift_mapper[cs_op.shift.type]
if cs_op.shift.type <= ARM_SFT_RRX:
amount = ArmImmediateOperand(cs_op.shift.value, self._arch_info.operand_size)
# TODO: check if this is a valid case.
if cs_op.shift.value == 0:
raise Exception("Shift value is zero.")
elif cs_op.shift.type <= ARM_SFT_RRX_REG:
amount = self.__cs_reg_idx_to_arm_op_reg(cs_op.shift.value, cs_insn)
else:
raise Exception("Unknown shift type.")
return ArmShiftedRegisterOperand(arm_base, sh_type, amount, arm_base.size)
def __cs_translate_operand(self, cs_op, cs_insn):
if cs_op.type == ARM_OP_REG:
reg = self.__cs_reg_idx_to_arm_op_reg(cs_op.value.reg, cs_insn)
if cs_op.shift.type > 0:
oprnd = self.__cs_shift_to_arm_op(cs_op, cs_insn, reg)
else:
oprnd = reg
elif cs_op.type == ARM_OP_IMM:
oprnd = ArmImmediateOperand(cs_op.value.imm, self._arch_info.operand_size)
elif cs_op.type == ARM_OP_MEM:
reg_base = self.__cs_reg_idx_to_arm_op_reg(cs_op.mem.base, cs_insn)
# TODO: memory index type
index_type = ARM_MEMORY_INDEX_OFFSET
if cs_op.mem.index > 0:
if cs_op.mem.disp > 0:
raise Exception("ARM_OP_MEM: Both index and disp > 0, only one can be.")
displacement = self.__cs_reg_idx_to_arm_op_reg(cs_op.mem.index, cs_insn)
# NOTE: In the case of a memory operand, in the second
# position (slot [1]), the information regarding whether
# or not the displacement of the operand has a shifted
# register is encoded in the first operand (slot [0]),
# that doesn't have a direct relation with the other.
# TODO: Check if this has to be reported to CS.
if cs_insn.operands[0].shift.type > 0:
# There's a shift operation, the displacement extracted
# earlier was just the base register of the shifted
# register that is generating the displacement.
displacement = self.__cs_shift_to_arm_op(cs_insn.operands[0], cs_insn, displacement)
else:
displacement = ArmImmediateOperand(cs_op.mem.disp, self._arch_info.operand_size)
disp_minus = True if cs_op.mem.index == -1 else False
oprnd = ArmMemoryOperand(reg_base, index_type, displacement, disp_minus, self._arch_info.operand_size)
else:
error_msg = "Instruction: " + cs_insn.mnemonic + " " + cs_insn.op_str + ". Unknown operand type: " + str(cs_op.type)
logger.error(error_msg)
raise CapstoneOperandNotSupported(error_msg)
return oprnd
def _cs_translate_insn(self, cs_insn):
operands = [self.__cs_translate_operand(op, cs_insn) for op in cs_insn.operands]
mnemonic = cs_insn.mnemonic
# Special case: register list "{rX - rX}", stored as a series of
# registers has to be converted to ArmRegisterListOperand.
if "{" in cs_insn.op_str:
reg_list = []
op_translated = []
if not("push" in mnemonic or "pop" in mnemonic):
# First operand is the base (in push/pop, the base
# register, sp is omitted)
op_translated.append(operands[0])
operands = operands[1:]
for r in operands:
reg_list.append([r])
op_translated.append(ArmRegisterListOperand(reg_list, reg_list[0][0].size))
operands = op_translated
# Remove narrow/wide compiler suffixes (.w/.n), they are of no
# interest for translation purposes
if mnemonic[-2:] == ".w" or mnemonic[-2:] == ".n":
mnemonic = mnemonic[:-2]
# Remove condition code from the mnemonic, this goes first than the
# removal of the update flags suffix, because according to UAL syntax
# the this suffix goes after the update flags suffix in the mnemonic.
if cs_insn.cc != ARM_CC_INVALID and cs_insn.cc != ARM_CC_AL:
cc_suffix_str = cc_inverse_mapper[cc_capstone_barf_mapper[cs_insn.cc]]
if cc_suffix_str == mnemonic[-2:]:
mnemonic = mnemonic[:-2]
# Remove update flags suffix (s)
if cs_insn.update_flags and mnemonic[-1] == 's':
mnemonic = mnemonic[:-1]
# Remove LDM/STM addressing modes from the mnemonic, later include it in the ArmInstruction
if mnemonic[0:3] == "ldm" or mnemonic[0:3] == "stm":
ldm_stm_am = None
if mnemonic[-2:] in ldm_stm_am_mapper:
ldm_stm_am = ldm_stm_am_mapper[mnemonic[-2:]]
mnemonic = mnemonic[:-2]
# TODO: Temporary hack to accommodate THUMB short notation:
# "add r0, r1" -> "add r0, r0, r1"
if len(operands) == 2 and (mnemonic == "add" or
mnemonic == "eor" or
mnemonic == "orr" or
mnemonic == "sub"):
operands = [operands[0], operands[0], operands[1]]
instr = ArmInstruction(
mnemonic + " " + cs_insn.op_str,
mnemonic,
operands,
self._arch_mode
)
if cs_insn.cc != ARM_CC_INVALID:
instr.condition_code = cc_capstone_barf_mapper[cs_insn.cc]
if cs_insn.update_flags:
instr.update_flags = True
if mnemonic[0:3] == "ldm" or mnemonic[0:3] == "stm":
instr.ldm_stm_addr_mode = ldm_stm_am
if "!" in cs_insn.op_str:
instr.operands[0].wb = True
# TODO: LOAD/STORE MODE (it may be necessary to parse the mnemonic).
return instr
| programa-stic/barf-project | barf/arch/arm/disassembler.py | Python | bsd-2-clause | 16,224 |
#!/usr/bin/env python
# encoding: utf-8
"""Snippet representation after parsing."""
import re
import vim
import textwrap
from UltiSnips import _vim
from UltiSnips.compatibility import as_unicode
from UltiSnips.indent_util import IndentUtil
from UltiSnips.text import escape
from UltiSnips.text_objects import SnippetInstance
from UltiSnips.text_objects._python_code import SnippetUtilForAction
__WHITESPACE_SPLIT = re.compile(r"\s")
class _SnippetUtilCursor(object):
def __init__(self, cursor):
self._cursor = [cursor[0] - 1, cursor[1]]
self._set = False
def preserve(self):
self._set = True
self._cursor = [
_vim.buf.cursor[0],
_vim.buf.cursor[1],
]
def is_set(self):
return self._set
def set(self, line, column):
self.__setitem__(0, line)
self.__setitem__(1, column)
def to_vim_cursor(self):
return (self._cursor[0] + 1, self._cursor[1])
def __getitem__(self, index):
return self._cursor[index]
def __setitem__(self, index, value):
self._set = True
self._cursor[index] = value
def __len__(self):
return 2
def __str__(self):
return str((self._cursor[0], self._cursor[1]))
def split_at_whitespace(string):
"""Like string.split(), but keeps empty words as empty words."""
return re.split(__WHITESPACE_SPLIT, string)
def _words_for_line(trigger, before, num_words=None):
"""Gets the final 'num_words' words from 'before'.
If num_words is None, then use the number of words in 'trigger'.
"""
if num_words is None:
num_words = len(split_at_whitespace(trigger))
word_list = split_at_whitespace(before)
if len(word_list) <= num_words:
return before.strip()
else:
before_words = before
for i in range(-1, -(num_words + 1), -1):
left = before_words.rfind(word_list[i])
before_words = before_words[:left]
return before[len(before_words):].strip()
class SnippetDefinition(object):
"""Represents a snippet as parsed from a file."""
_INDENT = re.compile(r"^[ \t]*")
_TABS = re.compile(r"^\t*")
def __init__(self, priority, trigger, value, description,
options, globals, location, context, actions):
self._priority = int(priority)
self._trigger = as_unicode(trigger)
self._value = as_unicode(value)
self._description = as_unicode(description)
self._opts = options
self._matched = ''
self._last_re = None
self._globals = globals
self._location = location
self._context_code = context
self._context = None
self._actions = actions
# Make sure that we actually match our trigger in case we are
# immediately expanded.
self.matches(self._trigger)
def __repr__(self):
return '_SnippetDefinition(%r,%s,%s,%s)' % (
self._priority, self._trigger, self._description, self._opts)
def _re_match(self, trigger):
"""Test if a the current regex trigger matches `trigger`.
If so, set _last_re and _matched.
"""
for match in re.finditer(self._trigger, trigger):
if match.end() != len(trigger):
continue
else:
self._matched = trigger[match.start():match.end()]
self._last_re = match
return match
return False
def _context_match(self, visual_content):
# skip on empty buffer
if len(vim.current.buffer) == 1 and vim.current.buffer[0] == "":
return
locals = {
'context': None,
'visual_mode': '',
'visual_text': '',
'last_placeholder': None
}
if visual_content:
locals['visual_mode'] = visual_content.mode
locals['visual_text'] = visual_content.text
locals['last_placeholder'] = visual_content.placeholder
return self._eval_code('snip.context = ' + self._context_code,
locals).context
def _eval_code(self, code, additional_locals={}):
code = "\n".join([
'import re, os, vim, string, random',
'\n'.join(self._globals.get('!p', [])).replace('\r\n', '\n'),
code
])
current = vim.current
locals = {
'window': current.window,
'buffer': current.buffer,
'line': current.window.cursor[0]-1,
'column': current.window.cursor[1]-1,
'cursor': _SnippetUtilCursor(current.window.cursor),
}
locals.update(additional_locals)
snip = SnippetUtilForAction(locals)
try:
exec(code, {'snip': snip})
except Exception as e:
self._make_debug_exception(e, code)
raise
return snip
def _execute_action(
self,
action,
context,
additional_locals={}
):
mark_to_use = '`'
with _vim.save_mark(mark_to_use):
_vim.set_mark_from_pos(mark_to_use, _vim.get_cursor_pos())
cursor_line_before = _vim.buf.line_till_cursor
locals = {
'context': context,
}
locals.update(additional_locals)
snip = self._eval_code(action, locals)
if snip.cursor.is_set():
vim.current.window.cursor = snip.cursor.to_vim_cursor()
else:
new_mark_pos = _vim.get_mark_pos(mark_to_use)
cursor_invalid = False
if _vim._is_pos_zero(new_mark_pos):
cursor_invalid = True
else:
_vim.set_cursor_from_pos(new_mark_pos)
if cursor_line_before != _vim.buf.line_till_cursor:
cursor_invalid = True
if cursor_invalid:
raise RuntimeError(
'line under the cursor was modified, but ' +
'"snip.cursor" variable is not set; either set set ' +
'"snip.cursor" to new cursor position, or do not ' +
'modify cursor line'
)
return snip
def _make_debug_exception(self, e, code=''):
e.snippet_info = textwrap.dedent("""
Defined in: {}
Trigger: {}
Description: {}
Context: {}
Pre-expand: {}
Post-expand: {}
""").format(
self._location,
self._trigger,
self._description,
self._context_code if self._context_code else '<none>',
self._actions['pre_expand'] if 'pre_expand' in self._actions
else '<none>',
self._actions['post_expand'] if 'post_expand' in self._actions
else '<none>',
code,
)
e.snippet_code = code
def has_option(self, opt):
"""Check if the named option is set."""
return opt in self._opts
@property
def description(self):
"""Descriptive text for this snippet."""
return ('(%s) %s' % (self._trigger, self._description)).strip()
@property
def priority(self):
"""The snippets priority, which defines which snippet will be preferred
over others with the same trigger."""
return self._priority
@property
def trigger(self):
"""The trigger text for the snippet."""
return self._trigger
@property
def matched(self):
"""The last text that matched this snippet in match() or
could_match()."""
return self._matched
@property
def location(self):
"""Where this snippet was defined."""
return self._location
@property
def context(self):
"""The matched context."""
return self._context
def matches(self, before, visual_content=None):
"""Returns True if this snippet matches 'before'."""
# If user supplies both "w" and "i", it should perhaps be an
# error, but if permitted it seems that "w" should take precedence
# (since matching at word boundary and within a word == matching at word
# boundary).
self._matched = ''
words = _words_for_line(self._trigger, before)
if 'r' in self._opts:
try:
match = self._re_match(before)
except Exception as e:
self._make_debug_exception(e)
raise
elif 'w' in self._opts:
words_len = len(self._trigger)
words_prefix = words[:-words_len]
words_suffix = words[-words_len:]
match = (words_suffix == self._trigger)
if match and words_prefix:
# Require a word boundary between prefix and suffix.
boundary_chars = escape(words_prefix[-1:] +
words_suffix[:1], r'\"')
match = _vim.eval(
'"%s" =~# "\\\\v.<."' %
boundary_chars) != '0'
elif 'i' in self._opts:
match = words.endswith(self._trigger)
else:
match = (words == self._trigger)
# By default, we match the whole trigger
if match and not self._matched:
self._matched = self._trigger
# Ensure the match was on a word boundry if needed
if 'b' in self._opts and match:
text_before = before.rstrip()[:-len(self._matched)]
if text_before.strip(' \t') != '':
self._matched = ''
return False
self._context = None
if match and self._context_code:
self._context = self._context_match(visual_content)
if not self.context:
match = False
return match
def could_match(self, before):
"""Return True if this snippet could match the (partial) 'before'."""
self._matched = ''
# List all on whitespace.
if before and before[-1] in (' ', '\t'):
before = ''
if before and before.rstrip() is not before:
return False
words = _words_for_line(self._trigger, before)
if 'r' in self._opts:
# Test for full match only
match = self._re_match(before)
elif 'w' in self._opts:
# Trim non-empty prefix up to word boundary, if present.
qwords = escape(words, r'\"')
words_suffix = _vim.eval(
'substitute("%s", "\\\\v^.+<(.+)", "\\\\1", "")' % qwords)
match = self._trigger.startswith(words_suffix)
self._matched = words_suffix
# TODO: list_snippets() function cannot handle partial-trigger
# matches yet, so for now fail if we trimmed the prefix.
if words_suffix != words:
match = False
elif 'i' in self._opts:
# TODO: It is hard to define when a inword snippet could match,
# therefore we check only for full-word trigger.
match = self._trigger.startswith(words)
else:
match = self._trigger.startswith(words)
# By default, we match the words from the trigger
if match and not self._matched:
self._matched = words
# Ensure the match was on a word boundry if needed
if 'b' in self._opts and match:
text_before = before.rstrip()[:-len(self._matched)]
if text_before.strip(' \t') != '':
self._matched = ''
return False
return match
def instantiate(self, snippet_instance, initial_text, indent):
"""Parses the content of this snippet and brings the corresponding text
objects alive inside of Vim."""
raise NotImplementedError()
def do_pre_expand(self, visual_content, snippets_stack):
if 'pre_expand' in self._actions:
locals = {'buffer': _vim.buf, 'visual_content': visual_content}
snip = self._execute_action(
self._actions['pre_expand'], self._context, locals
)
self._context = snip.context
return snip.cursor.is_set()
else:
return False
def do_post_expand(self, start, end, snippets_stack):
if 'post_expand' in self._actions:
locals = {
'snippet_start': start,
'snippet_end': end,
'buffer': _vim.buf
}
snip = self._execute_action(
self._actions['post_expand'], snippets_stack[-1].context, locals
)
snippets_stack[-1].context = snip.context
return snip.cursor.is_set()
else:
return False
def do_post_jump(
self, tabstop_number, jump_direction, snippets_stack, current_snippet
):
if 'post_jump' in self._actions:
start = current_snippet.start
end = current_snippet.end
locals = {
'tabstop': tabstop_number,
'jump_direction': jump_direction,
'tabstops': current_snippet.get_tabstops(),
'snippet_start': start,
'snippet_end': end,
'buffer': _vim.buf
}
snip = self._execute_action(
self._actions['post_jump'], current_snippet.context, locals
)
current_snippet.context = snip.context
return snip.cursor.is_set()
else:
return False
def launch(self, text_before, visual_content, parent, start, end):
"""Launch this snippet, overwriting the text 'start' to 'end' and
keeping the 'text_before' on the launch line.
'Parent' is the parent snippet instance if any.
"""
indent = self._INDENT.match(text_before).group(0)
lines = (self._value + '\n').splitlines()
ind_util = IndentUtil()
# Replace leading tabs in the snippet definition via proper indenting
initial_text = []
for line_num, line in enumerate(lines):
if 't' in self._opts:
tabs = 0
else:
tabs = len(self._TABS.match(line).group(0))
line_ind = ind_util.ntabs_to_proper_indent(tabs)
if line_num != 0:
line_ind = indent + line_ind
result_line = line_ind + line[tabs:]
if 'm' in self._opts:
result_line = result_line.rstrip()
initial_text.append(result_line)
initial_text = '\n'.join(initial_text)
snippet_instance = SnippetInstance(
self, parent, initial_text, start, end, visual_content,
last_re=self._last_re, globals=self._globals,
context=self._context)
self.instantiate(snippet_instance, initial_text, indent)
snippet_instance.replace_initial_text(_vim.buf)
snippet_instance.update_textobjects(_vim.buf)
return snippet_instance
| yslin/tools-zodlin | ubuntu/vim/.vim/lang/all/ultisnips/pythonx/UltiSnips/snippet/definition/_base.py | Python | apache-2.0 | 15,191 |
#! /usr/bin/env python
'''
Dumps in the stdout the IasValue published in the
BSDB
Created on Jun 12, 2018
@author: acaproni
'''
import argparse
from IasKafkaUtils.KafkaValueConsumer import IasValueListener, KafkaValueConsumer
# The kafka consumer
consumer = None
# Signal that the user pressed CTRL+C
terminated = False
class DumperListener(IasValueListener):
def __init__(self,verbose, toJson):
"""
Constructor
@param if True prints verbose messages
@param if True print JSON string
"""
self.verbose = verbose
self.toJson = toJson
def iasValueReceived(self,iasValue):
"""
Print the IasValue in the stdout
"""
if self.toJson:
print(iasValue.toJSonString())
else:
print(iasValue.toString(self.verbose))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Dumps IasValue published in the BSDB')
parser.add_argument(
'-k',
'--kafkabrokers',
help='The kafka bootstrap servers to connect to like iasdevel.hq.eso.org:9092',
action='store',
required=True)
parser.add_argument(
'-t',
'--topic',
help='The topic to connect to; defaults to BsdbCoreKTopic',
action='store',
default="BsdbCoreKTopic",
required=False)
parser.add_argument(
'-c',
'--clientid',
help='Kafka group ID',
action='store',
default="iasValueDumper",
required=False)
parser.add_argument(
'-g',
'--groupid',
help='Kafka group ID',
action='store',
default="iasValueDumper-group",
required=False)
parser.add_argument(
'-j',
'--jsonformat',
help='prints the JSON representation of the IasValue',
action='store_true',
default=False,
required=False)
parser.add_argument(
'-v',
'--verbose',
help='Verbose messages',
action='store_true',
default=False,
required=False)
args = parser.parse_args()
listener = DumperListener(args.verbose,args.jsonformat)
consumer = KafkaValueConsumer(
listener,
args.kafkabrokers,
args.topic,
args.clientid,
args.groupid)
consumer.start()
try:
consumer.join()
except KeyboardInterrupt:
pass
| IntegratedAlarmSystem-Group/ias | KafkaUtils/src/main/python/iasValueDumper.py | Python | lgpl-3.0 | 3,031 |
import telnetlib
import time
import socket
import sys
TELNET_PORT = 23
TELNET_TIMEOUT = 6
def send_command(remote_conn, cmd):
'''this lets us send our commands'''
cmd = cmd.rstrip()
remote_conn.write(cmd + '\n')
time.sleep(1)
return remote_conn.read_very_eager()
def telnet_connect(ip_addr):
try:
return telnetlib.Telnet(ip_addr, TELNET_PORT, TELNET_TIMEOUT)
except socket.timeout:
sys.exit("Connection timed out")
def login(remote_conn, username, password):
'''this is for logging in'''
output = remote_conn.read_until("sername:", TELNET_TIMEOUT)
remote_conn.write(username + '\n')
output += remote_conn.read_until("ssword:", TELNET_TIMEOUT)
remote_conn.write(password + '\n')
print output
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
password = '88newclass'
remote_conn = telnet_connect(ip_addr)
output = login(remote_conn, username, password)
output = send_command(remote_conn, 'terminal len 0')
print output
output = send_command(remote_conn, 'show version')
print output
remote_conn.close()
if __name__ == "__main__":
main()
| joeyb182/pynet_ansible | byers-paid/test_telnet_from_video.py | Python | apache-2.0 | 1,167 |
# -*- coding: iso-8859-1 -*-
# -----------------------------------------------------------------------------
# real.py - parser for real media files
# -----------------------------------------------------------------------------
# $Id$
#
# -----------------------------------------------------------------------------
# kaa-Metadata - Media Metadata for Python
# Copyright (C) 2003-2006 Thomas Schueppel, Dirk Meyer
#
# First Edition: Thomas Schueppel <stain@acm.org>
# Maintainer: Dirk Meyer <dischi@freevo.org>
#
# Please see the file AUTHORS for a complete list of authors.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MER-
# CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# -----------------------------------------------------------------------------
__all__ = ['Parser']
# python imports
import struct
import logging
# import kaa.metadata.video core
from . import core
# http://www.pcisys.net/~melanson/codecs/rmff.htm
# http://www.pcisys.net/~melanson/codecs/
# get logging object
log = logging.getLogger('metadata')
class RealVideo(core.AVContainer):
def __init__(self,file):
core.AVContainer.__init__(self)
self.mime = 'video/real'
self.type = 'Real Video'
h = file.read(10)
try:
(object_id,object_size,object_version) = struct.unpack('>4sIH',h)
except struct.error:
# EOF.
raise core.ParseError()
if not object_id == '.RMF':
raise core.ParseError()
file_version, num_headers = struct.unpack('>II', file.read(8))
log.debug("size: %d, ver: %d, headers: %d" % \
(object_size, file_version,num_headers))
for i in range(0,num_headers):
try:
oi = struct.unpack('>4sIH',file.read(10))
except (struct.error, IOError):
# Header data we expected wasn't there. File may be
# only partially complete.
break
if object_id == 'DATA' and oi[0] != 'INDX':
log.debug('INDX chunk expected after DATA but not found -- file corrupt')
break
(object_id,object_size,object_version) = oi
if object_id == 'DATA':
# Seek over the data chunk rather than reading it in.
file.seek(object_size - 10, 1)
else:
self._read_header(object_id, file.read(object_size-10))
log.debug("%s [%d]" % (object_id,object_size-10))
# Read all the following headers
def _read_header(self,object_id,s):
if object_id == 'PROP':
prop = struct.unpack('>9IHH', s)
log.debug(prop)
if object_id == 'MDPR':
mdpr = struct.unpack('>H7I', s[:30])
log.debug(mdpr)
self.length = mdpr[7]/1000.0
(stream_name_size,) = struct.unpack('>B', s[30:31])
stream_name = s[31:31+stream_name_size]
pos = 31+stream_name_size
(mime_type_size,) = struct.unpack('>B', s[pos:pos+1])
mime = s[pos+1:pos+1+mime_type_size]
pos += mime_type_size+1
(type_specific_len,) = struct.unpack('>I', s[pos:pos+4])
type_specific = s[pos+4:pos+4+type_specific_len]
pos += 4+type_specific_len
if mime[:5] == 'audio':
ai = core.AudioStream()
ai.id = mdpr[0]
ai.bitrate = mdpr[2]
self.audio.append(ai)
elif mime[:5] == 'video':
vi = core.VideoStream()
vi.id = mdpr[0]
vi.bitrate = mdpr[2]
self.video.append(vi)
else:
log.debug("Unknown: %s" % mime)
if object_id == 'CONT':
pos = 0
(title_len,) = struct.unpack('>H', s[pos:pos+2])
self.title = s[2:title_len+2]
pos += title_len+2
(author_len,) = struct.unpack('>H', s[pos:pos+2])
self.artist = s[pos+2:pos+author_len+2]
pos += author_len+2
(copyright_len,) = struct.unpack('>H', s[pos:pos+2])
self.copyright = s[pos+2:pos+copyright_len+2]
pos += copyright_len+2
(comment_len,) = struct.unpack('>H', s[pos:pos+2])
self.comment = s[pos+2:pos+comment_len+2]
Parser = RealVideo
| jtackaberry/stagehand | external/metadata/video/real.py | Python | mit | 4,964 |
from django.db.models import Manager
class SnippetsManager(Manager):
def optimized(self):
queryset = self.get_queryset()
return queryset.select_related('created_by') \
.prefetch_related('tags', 'created_by__following',
'created_by__followers',
'created_by__stars',
'comments_set',
'pages_set',
'pages_set__language').filter()
def public(self):
return self.get_queryset().filter(is_public=True)
def private(self):
return self.get_queryset().filter(is_public=False)
| pombredanne/snippit | snippit/apps/snippet/managers.py | Python | mit | 674 |
import custom
class ParameterSet:
def __init__(self, campaign, scenarioID):
self.campaign = campaign
self.scenarioID = scenarioID
self.params = {}
def getParametersOfCampaign(campaign):
p = custom.query("SELECT DISTINCT(parameter_name) FROM parameters WHERE campaign_id = %d" % campaign.campaignID, campaign)
result = []
for l in p:
result.append(l[0])
return result
def getParameterSet(campaign, scenario):
p = custom.query("SELECT parameter_name, parameter_type, type_bool, type_integer, type_float, type_string "
"FROM parameters WHERE scenario_id = %d" % scenario.scenarioID, campaign)
ps = ParameterSet(campaign, scenario.scenarioID)
for l in p:
paramname = str(l[0])
typename = l[1].rstrip()
if typename == "type_bool":
#print "Adding bool parameter %s:%s" % (paramname, l[2])
ps.params[paramname] = bool(l[2])
if typename == "type_integer":
#print "Adding int parameter %s:%s" % (paramname, l[3])
ps.params[paramname] = int(l[3])
if typename == "type_float":
#print "Adding float parameter %s:%s" % (paramname, l[4])
ps.params[paramname] = float(l[4])
if typename == "type_string":
#print "Adding string parameter %s:%s" % (paramname, l[5])
ps.params[paramname] = str(l[5])
return ps
| openwns/wrowser | openwns/wrowser/simdb/api/parameters.py | Python | gpl-2.0 | 1,444 |
"""Contains utility functions, mainly to help with polygon creation"""
from __future__ import division
__docformat__ = "reStructuredText"
from .vec2d import Vec2d
from math import fabs, sqrt
X, Y = 0, 1
try:
from functools import partial
except ImportError:
# Python 2.4 support
def partial(func, *args, **keywords):
def newfunc(*fargs, **fkeywords):
newkeywords = keywords.copy()
newkeywords.update(fkeywords)
return func(*(args + fargs), **newkeywords)
newfunc.func = func
newfunc.args = args
newfunc.keywords = keywords
return newfunc
def is_clockwise(points):
"""
Check if the points given forms a clockwise polygon
:return: True if the points forms a clockwise polygon
"""
a = 0
i, j = 0, 0
for i in range(len(points)):
j = i + 1
if j == len(points): j = 0
a += points[i][X]*points[j][Y] - points[i][Y]*points[j][X]
return a <= 0 #or is it the other way around?
def is_left(p0, p1, p2):
"""Test if p2 is left, on or right of the (infinite) line (p0,p1).
:return: > 0 for p2 left of the line through p0 and p1
= 0 for p2 on the line
< 0 for p2 right of the line
"""
# cast the answer to an int so it can be used directly from sort()
# cast is not a good idea.. use something else
#return int((p1.x - p0.x)*(p2.y-p0.y) - (p2.x-p0.x)*(p1.y-p0.y))
sorting = (p1[X] - p0[X])*(p2[Y]-p0[Y]) - (p2[X]-p0[X])*(p1[Y]-p0[Y])
if sorting > 0: return 1
elif sorting < 0: return -1
else: return 0
def is_convex(points):
"""Test if a polygon (list of (x,y)) is convex or not
:return: True if the polygon is convex, False otherwise
"""
assert len(points) > 2, "need at least 3 points to form a polygon"
p0 = points[0]
p1 = points[1]
p2 = points[2]
xc, yc = 0, 0
is_same_winding = is_left(p0, p1, p2)
for p2 in points[2:] + [p0] + [p1]:
if is_same_winding != is_left(p0, p1, p2):
return False
a = p1[X] - p0[X], p1[Y] - p0[Y] # p1-p0
b = p2[X] - p1[X], p2[Y] - p1[Y] # p2-p1
if sign(a[X]) != sign(b[X]): xc +=1
if sign(a[Y]) != sign(b[Y]): yc +=1
p0, p1 = p1, p2
return xc <= 2 and yc <= 2
def sign(x):
"""Sign function.
:return -1 if x < 0, else return 1
"""
if x < 0: return -1
else: return 1
def reduce_poly(points, tolerance=0.5):
"""Remove close points to simplify a polyline
tolerance is the min distance between two points squared.
:return: The reduced polygon as a list of (x,y)
"""
assert len(points) > 0, "reduce_poly can not simplify an empty points list"
curr_p = points[0]
reduced_ps = [points[0]]
for p in points[1:]:
distance = (curr_p[X] - p[X])**2 + (curr_p[Y] - p[Y])**2
if distance > tolerance:
curr_p = p
reduced_ps.append(p)
return reduced_ps
def convex_hull(points):
"""Create a convex hull from a list of points.
This function uses the Graham Scan Algorithm.
:return: Convex hull as a list of (x,y)
"""
assert len(points) > 2, "need at least 3 points to form a convex hull"
### Find lowest rightmost point
p0 = points[0]
for p in points[1:]:
if p[Y] < p0[Y]:
p0 = p
elif p[Y] == p0[Y] and p[X] > p0[X]:
p0 = p
points.remove(p0)
### Sort the points angularly about p0 as center
f = partial(is_left, p0)
points.sort(cmp = f)
points.reverse()
points.insert(0, p0)
### Find the hull points
hull = [p0, points[1]]
for p in points[2:]:
pt1 = hull[-1]
pt2 = hull[-2]
l = is_left(pt2, pt1, p)
if l > 0:
hull.append(p)
else:
while l <= 0 and len(hull) > 2:
hull.pop()
pt1 = hull[-1]
pt2 = hull[-2]
l = is_left(pt2, pt1, p)
hull.append(p)
return hull
def calc_center(points):
"""Calculate the center of a polygon
:return: The center (x,y)
"""
#ref: http://en.wikipedia.org/wiki/Polygon
assert len(points) > 0, "need at least 1 points to calculate the center"
area = calc_area(points)
p1 = points[0]
cx = cy = 0
for p2 in points[1:] + [points[0]]:
tmp = (p1[X]*p2[Y] - p2[X]*p1[Y])
cx += (p1[X] + p2[X]) * tmp
cy += (p1[Y] + p2[Y]) * tmp
p1 = p2
c = 1 / (6. * area) * cx, 1 / (6. * area) * cy
return c
def poly_vectors_around_center(pointlist, points_as_Vec2d=True):
"""Rearranges vectors around the center
If points_as_Vec2d, then return points are also Vec2d, else pos
:return: pointlist ([Vec2d/pos, ...])
"""
poly_points_center = []
cx, cy = calc_center(pointlist)
if points_as_Vec2d:
for p in pointlist:
x = p[X] - cx
y = p[Y] - cy
poly_points_center.append(Vec2d((x, y)))
else:
for p in pointlist:
x = p[X] - cx
y = cy - p[Y]
poly_points_center.append((x, y))
return poly_points_center
def calc_area(points):
"""Calculate the area of a polygon
:return: Area of polygon
"""
#ref: http://en.wikipedia.org/wiki/Polygon
if len(points) < 3: return 0
p1 = points[0]
a = 0
for p2 in points[1:] + [points[0]]:
a += p1[X] * p2[Y] - p2[X] * p1[Y]
p1 = p2
a = 0.5 * a
return a
def calc_perimeter(points):
"""Calculate the perimeter of a polygon
:return: Perimeter of polygon
"""
if len(points) < 2: return 0
p1 = points[0]
c = 0
for p2 in points[1:] + [points[0]]:
c += sqrt((p2[X] - p1[X])**2 + (p2[Y] - p1[Y])**2)
p1 = p2
return c
def get_poly_UA(pointlist, points_as_Vec2d=True):
"""Calculates the perimeter and area of a given polygon
Use calc_area() to get the area instead of this method
:deprecated: Scheduled for deletion in pymunk 0.8.5+
:return: U, A
"""
return calc_perimeter(pointlist), calc_area(pointlist) # ugly fix until this method is removed
p1 = p2 = None
U = 0
A = 0
for p in pointlist:
if p1 == None:
p1 = p
else:
p2 = p
# Extract x and y
if points_as_Vec2d:
x1, y1 = p1[X], p1[Y]
x2, y2 = p2[X], p2[Y]
else:
x1, y1 = p1
x2, y2 = p2
# Get distance between the two Points
dx = fabs(x2 - x1)
dy = fabs(y2 - y1)
# U += c = sqrt(a^2+b^2) | A += (a*b)/2
U += sqrt((dx*dx) + (dy*dy))
A += ((dx*dy)/2)
# Current End Point becomes Next Start Point
p1 = p2
return U, A
__all__ = ["is_clockwise", "is_left", "reduce_poly", "convex_hull",
"calc_center", "poly_vectors_around_center", "get_poly_UA", "is_convex"]
| cullophid/Scienceman | pymunk/util.py | Python | lgpl-2.1 | 7,527 |
from copy import copy, deepcopy
import pandas as pd
from .exceptions import PlotnineError
from .utils import array_kind, ninteraction
from .utils import check_required_aesthetics, defaults
from .mapping.aes import aes, NO_GROUP
from .mapping.evaluation import stage, evaluate
class Layers(list):
"""
List of layers
During the plot building pipeline, many operations are
applied at all layers in the plot. This class makes those
tasks easier.
"""
def __iadd__(self, other):
return Layers(super(Layers, self).__iadd__(other))
def __add__(self, other):
return Layers(super(Layers, self).__add__(other))
def __radd__(self, other, inplace=False):
"""
Add layers to ggplot object
"""
from .ggplot import ggplot
if isinstance(other, ggplot):
other = other if inplace else deepcopy(other)
for obj in self:
other += obj
else:
msg = "Cannot add Layers to object of type {!r}".format
raise PlotnineError(msg(type(other)))
return other
def __getitem__(self, key):
result = super(Layers, self).__getitem__(key)
if not isinstance(key, int):
result = Layers(result)
return result
@property
def data(self):
return [l.data for l in self]
def prepare(self, plot):
for l in self:
l.make_layer_data(plot.data)
l.make_layer_mapping(plot.mapping)
l.make_layer_environments(plot.environment)
def setup_data(self):
for l in self:
l.setup_data()
def draw(self, layout, coord):
# If zorder is 0, it is left to MPL
for i, l in enumerate(self, start=1):
l.zorder = i
l.draw(layout, coord)
def compute_aesthetics(self, plot):
for l in self:
l.compute_aesthetics(plot)
def compute_statistic(self, layout):
for l in self:
l.compute_statistic(layout)
def map_statistic(self, plot):
for l in self:
l.map_statistic(plot)
def compute_position(self, layout):
for l in self:
l.compute_position(layout)
def use_defaults(self, data=None, aes_modifiers=None):
for l in self:
l.use_defaults(data, aes_modifiers)
def transform(self, scales):
for l in self:
l.data = scales.transform_df(l.data)
def train(self, scales):
for l in self:
l.data = scales.train_df(l.data)
def map(self, scales):
for l in self:
l.data = scales.map_df(l.data)
def finish_statistics(self):
for l in self:
l.finish_statistics()
def update_labels(self, plot):
for l in self:
plot._update_labels(l)
class layer:
"""
Layer
When a ``geom`` or ``stat`` is added to a
:class:`~plotnine.ggplot` object, it creates a single layer.
This class is a representation of that layer.
Parameters
----------
geom : geom, optional
geom to used to draw this layer.
stat : stat, optional
stat used for the statistical transformation of
data in this layer
data : dataframe, optional
Data plotted in this layer. If ``None``, the data from
the :class:`~plotnine.ggplot` object will be used.
mapping : aes, optional
Aesthetic mappings.
position : position, optional
Position object to adjust the geometries in this layer.
inherit_aes : bool, optional
If ``True`` inherit from the aesthetic mappings of
the :class:`~plotnine.ggplot` object. Default ``True``.
show_legend : bool or None, optional
Whether to make up and show a legend for the mappings
of this layer. If ``None`` then an automatic/good choice
is made. Default is ``None``.
raster : bool, optional (default: False)
If ``True``, draw onto this layer a raster (bitmap) object
even if the final image format is vector.
Notes
-----
There is no benefit to manually creating a layer. You should
always use a ``geom`` or ``stat``.
"""
def __init__(self, geom=None, stat=None, data=None, mapping=None,
position=None, inherit_aes=True, show_legend=None,
raster=False):
self.geom = geom
self.stat = stat
self.data = data
self.mapping = mapping
self.position = position
self.inherit_aes = inherit_aes
self.show_legend = show_legend
self.raster = raster
self._active_mapping = {}
self.zorder = 0
@staticmethod
def from_geom(geom):
"""
Create a layer given a :class:`geom`
Parameters
----------
geom : geom
`geom` from which a layer will be created
Returns
-------
out : layer
Layer that represents the specific `geom`.
"""
kwargs = geom._kwargs
lkwargs = {'geom': geom,
'mapping': geom.mapping,
'data': geom.data,
'stat': geom._stat,
'position': geom._position}
layer_params = ('inherit_aes', 'show_legend', 'raster')
for param in layer_params:
if param in kwargs:
lkwargs[param] = kwargs[param]
elif param in geom.DEFAULT_PARAMS:
lkwargs[param] = geom.DEFAULT_PARAMS[param]
return layer(**lkwargs)
def __radd__(self, gg):
"""
Add layer to ggplot object
"""
try:
gg.layers.append(self)
except AttributeError:
msg = "Cannot add layer to object of type {!r}".format
raise PlotnineError(msg(type(gg)))
return gg
def __deepcopy__(self, memo):
"""
Deep copy without copying the self.data dataframe
"""
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
old = self.__dict__
new = result.__dict__
for key, item in old.items():
if key == 'data':
new[key] = old[key]
else:
new[key] = deepcopy(old[key], memo)
return result
def make_layer_data(self, plot_data):
"""
Generate data to be used by this layer
Parameters
----------
plot_data : dataframe
ggplot object data
"""
if plot_data is None:
plot_data = pd.DataFrame()
# Each layer that does not have data gets a copy of
# of the ggplot.data. If the has data it is replaced
# by copy so that we do not alter the users data
if self.data is None:
try:
self.data = plot_data.copy()
except AttributeError:
_geom_name = self.geom.__class__.__name__
_data_name = plot_data.__class__.__name__
raise PlotnineError(
"{} layer expects a dataframe, but it got "
"{} instead.".format(_geom_name, _data_name)
)
elif callable(self.data):
self.data = self.data(plot_data)
if not isinstance(self.data, pd.DataFrame):
raise PlotnineError(
"Data function must return a dataframe")
else:
self.data = self.data.copy()
def make_layer_mapping(self, plot_mapping):
"""
Create the aesthetic mappings to be used by this layer
Parameters
----------
plot_mapping : aes
ggplot object mapping
"""
if self.inherit_aes:
self.mapping = defaults(self.mapping, plot_mapping)
# aesthetics set as parameters override the same
# aesthetics set as mappings, so we can ignore
# those in the mapping
for ae in self.geom.aes_params:
if ae in self.mapping:
del self.mapping[ae]
# Set group as a mapping if set as a parameter
if 'group' in self.geom.aes_params:
group = self.geom.aes_params['group']
# Double quote str so that it evaluates to itself
if isinstance(group, str):
group = f'"{group}"'
self.mapping['group'] = stage(start=group)
def make_layer_environments(self, plot_environment):
"""
Create the aesthetic mappings to be used by this layer
Parameters
----------
plot_environment : ~patsy.Eval.EvalEnvironment
Namespace in which to execute aesthetic expressions.
"""
self.geom.environment = plot_environment
self.stat.environment = plot_environment
def compute_aesthetics(self, plot):
"""
Return a dataframe where the columns match the
aesthetic mappings.
Transformations like 'factor(cyl)' and other
expression evaluation are made in here
"""
evaled = evaluate(self.mapping._starting, self.data, plot.environment)
evaled_aes = aes(**{col: col for col in evaled})
plot.scales.add_defaults(evaled, evaled_aes)
if len(self.data) == 0 and len(evaled) > 0:
# No data, and vectors suppled to aesthetics
evaled['PANEL'] = 1
else:
evaled['PANEL'] = self.data['PANEL']
self.data = add_group(evaled)
def compute_statistic(self, layout):
"""
Compute & return statistics for this layer
"""
data = self.data
if not len(data):
return type(data)()
params = self.stat.setup_params(data)
data = self.stat.use_defaults(data)
data = self.stat.setup_data(data)
data = self.stat.compute_layer(data, params, layout)
self.data = data
def map_statistic(self, plot):
"""
Mapping aesthetics to computed statistics
"""
data = self.data
if not len(data):
return type(data)()
# Mixin default stat aesthetic mappings
aesthetics = defaults(self.mapping, self.stat.DEFAULT_AES)
stat_data = evaluate(aesthetics._calculated, data, plot.environment)
if not len(stat_data):
return
# (see stat_spoke for one exception)
if self.stat.retransform:
stat_data = plot.scales.transform_df(stat_data)
# When there are duplicate columns, we use the computed
# ones in stat_data
columns = data.columns.difference(stat_data.columns)
self.data = pd.concat([data[columns], stat_data], axis=1)
# Add any new scales, if needed
new = {ae: ae for ae in stat_data.columns}
plot.scales.add_defaults(self.data, new)
def setup_data(self):
"""
Prepare/modify data for plotting
"""
data = self.data
if len(data) == 0:
return type(data)()
data = self.geom.setup_data(data)
check_required_aesthetics(
self.geom.REQUIRED_AES,
set(data.columns) | set(self.geom.aes_params),
self.geom.__class__.__name__)
self.data = data
def compute_position(self, layout):
"""
Compute the position of each geometric object
in concert with the other objects in the panel
"""
if len(self.data) == 0:
return self.data
params = self.position.setup_params(self.data)
data = self.position.setup_data(self.data, params)
data = self.position.compute_layer(data, params, layout)
self.data = data
def draw(self, layout, coord):
"""
Draw geom
Parameters
----------
layout : Layout
Layout object created when the plot is getting
built
coord : coord
Type of coordinate axes
"""
params = copy(self.geom.params)
params.update(self.stat.params)
params['zorder'] = self.zorder
params['raster'] = self.raster
self.data = self.geom.handle_na(self.data)
# At this point each layer must have the data
# that is created by the plot build process
self.geom.draw_layer(self.data, layout, coord, **params)
def use_defaults(self, data=None, aes_modifiers=None):
"""
Prepare/modify data for plotting
Parameters
----------
data : dataframe, optional
Data
aes_modifiers : dict
Expression to evaluate and replace aesthetics in
the data.
"""
if data is None:
data = self.data
if aes_modifiers is None:
aes_modifiers = self.mapping._scaled
return self.geom.use_defaults(data, aes_modifiers)
def finish_statistics(self):
"""
Prepare/modify data for plotting
"""
# params = self.stat.setup_params(self.data)
self.stat.finish_layer(self.data, self.stat.params)
def add_group(data):
if len(data) == 0:
return data
if 'group' not in data:
disc = discrete_columns(data, ignore=['label'])
if disc:
data['group'] = ninteraction(data[disc], drop=True)
else:
data['group'] = NO_GROUP
else:
data['group'] = ninteraction(data[['group']], drop=True)
return data
def discrete_columns(df, ignore):
"""
Return a list of the discrete columns in the
dataframe `df`. `ignore` is a list|set|tuple with the
names of the columns to skip.
"""
lst = []
for col in df:
if array_kind.discrete(df[col]) and (col not in ignore):
# Some columns are represented as object dtype
# but may have compound structures as values.
try:
hash(df[col].iloc[0])
except TypeError:
continue
lst.append(col)
return lst
| has2k1/plotnine | plotnine/layer.py | Python | gpl-2.0 | 14,062 |
import os
import fnmatch
import re # for htest
import sys
from Tkinter import StringVar, BooleanVar, Checkbutton # for GrepDialog
from Tkinter import Tk, Text, Button, SEL, END # for htest
from idlelib import SearchEngine
import itertools
from idlelib.SearchDialogBase import SearchDialogBase
# Importing OutputWindow fails due to import loop
# EditorWindow -> GrepDialop -> OutputWindow -> EditorWindow
def grep(text, io=None, flist=None):
root = text._root()
engine = SearchEngine.get(root)
if not hasattr(engine, "_grepdialog"):
engine._grepdialog = GrepDialog(root, engine, flist)
dialog = engine._grepdialog
searchphrase = text.get("sel.first", "sel.last")
dialog.open(text, searchphrase, io)
class GrepDialog(SearchDialogBase):
title = "Find in Files Dialog"
icon = "Grep"
needwrapbutton = 0
def __init__(self, root, engine, flist):
SearchDialogBase.__init__(self, root, engine)
self.flist = flist
self.globvar = StringVar(root)
self.recvar = BooleanVar(root)
def open(self, text, searchphrase, io=None):
SearchDialogBase.open(self, text, searchphrase)
if io:
path = io.filename or ""
else:
path = ""
dir, base = os.path.split(path)
head, tail = os.path.splitext(base)
if not tail:
tail = ".py"
self.globvar.set(os.path.join(dir, "*" + tail))
def create_entries(self):
SearchDialogBase.create_entries(self)
self.globent = self.make_entry("In files:", self.globvar)
def create_other_buttons(self):
f = self.make_frame()
btn = Checkbutton(f, anchor="w",
variable=self.recvar,
text="Recurse down subdirectories")
btn.pack(side="top", fill="both")
btn.select()
def create_command_buttons(self):
SearchDialogBase.create_command_buttons(self)
self.make_button("Search Files", self.default_command, 1)
def default_command(self, event=None):
prog = self.engine.getprog()
if not prog:
return
path = self.globvar.get()
if not path:
self.top.bell()
return
from idlelib.OutputWindow import OutputWindow # leave here!
save = sys.stdout
try:
sys.stdout = OutputWindow(self.flist)
self.grep_it(prog, path)
finally:
sys.stdout = save
def grep_it(self, prog, path):
dir, base = os.path.split(path)
list = self.findfiles(dir, base, self.recvar.get())
list.sort()
self.close()
pat = self.engine.getpat()
print("Searching %r in %s ..." % (pat, path))
hits = 0
try:
for fn in list:
try:
with open(fn) as f:
for lineno, line in enumerate(f, 1):
if line[-1:] == '\n':
line = line[:-1]
if prog.search(line):
sys.stdout.write("%s: %s: %s\n" %
(fn, lineno, line))
hits += 1
except IOError as msg:
print(msg)
print(("Hits found: %s\n"
"(Hint: right-click to open locations.)"
% hits) if hits else "No hits.")
except AttributeError:
# Tk window has been closed, OutputWindow.text = None,
# so in OW.write, OW.text.insert fails.
pass
def findfiles(self, dir, base, rec):
try:
names = os.listdir(dir or os.curdir)
except os.error as msg:
print(msg)
return []
list = []
subdirs = []
for name in names:
fn = os.path.join(dir, name)
if os.path.isdir(fn):
subdirs.append(fn)
else:
if fnmatch.fnmatch(name, base):
list.append(fn)
if rec:
for subdir in subdirs:
list.extend(self.findfiles(subdir, base, rec))
return list
def close(self, event=None):
if self.top:
self.top.grab_release()
self.top.withdraw()
def _grep_dialog(parent): # for htest
from idlelib.PyShell import PyShellFileList
root = Tk()
root.title("Test GrepDialog")
width, height, x, y = list(map(int, re.split('[x+]', parent.geometry())))
root.geometry("+%d+%d"%(x, y + 150))
flist = PyShellFileList(root)
text = Text(root, height=5)
text.pack()
def show_grep_dialog():
text.tag_add(SEL, "1.0", END)
grep(text, flist=flist)
text.tag_remove(SEL, "1.0", END)
button = Button(root, text="Show GrepDialog", command=show_grep_dialog)
button.pack()
root.mainloop()
if __name__ == "__main__":
import unittest
unittest.main('idlelib.idle_test.test_grep', verbosity=2, exit=False)
from idlelib.idle_test.htest import run
run(_grep_dialog)
| BartoszCichecki/onlinepython | onlinepython/pypy-2.4.0-win32/lib-python/2.7/idlelib/GrepDialog.py | Python | gpl-2.0 | 5,120 |
import json, os, time
import logging
import etcd
import helpers.errors
logger = logging.getLogger(__name__)
class Etcd:
def __init__(self, config):
self.scope = config["scope"]
self.host, self.port = config["host"].split(":")
self.client = etcd.Client(host=self.host, port=int(self.port))
self.ttl = config["ttl"]
def get(self, path, max_attempts=1):
attempts = 0
response = None
while True:
try:
logger.debug("GET: /service/%s%s", self.scope, path)
response = self.client.read("/service/%s%s" % (self.scope, path))
break
except (etcd.EtcdKeyNotFound) as e:
attempts += 1
if attempts < max_attempts:
logger.info("Failed to return %s, trying again. (%s of %s)" % (path, attempts, max_attempts))
time.sleep(3)
else:
raise e
return (response.value or response)
def set(self, path, value, ttl=None, prevExist=None, prevValue=None):
additional_params = {}
if ttl is not None:
additional_params['ttl'] = ttl
if prevExist is not None:
additional_params['prevExist'] = prevExist
if prevValue is not None:
additional_params['prevValue'] = prevValue
logger.debug("SET: /service/%s%s > %s", self.scope, path, value)
self.client.write("/service/%s%s" % (self.scope, path),
value, **additional_params)
def current_leader(self):
try:
hostname = self.get("/leader")
address = self.get("/members/%s" % hostname)
return {"hostname": hostname, "address": address}
except etcd.EtcdKeyNotFound:
return None
except Exception:
raise helpers.errors.CurrentLeaderError("Etcd is not responding properly")
def members(self):
try:
members = []
members_dir = self.get("/members")
if members_dir:
for member in members_dir.children:
members.append({"hostname": member.key.split('/')[-1], "address": member.value})
return members
except etcd.EtcdKeyNotFound:
return None
except Exception:
raise helpers.errors.CurrentLeaderError("Etcd is not responding properly")
def touch_member(self, member, connection_string):
self.set("/members/%s" % member, connection_string, ttl=self.ttl)
def take_leader(self, value):
self.set("/leader", value, ttl=self.ttl)
def attempt_to_acquire_leader(self, value):
try:
self.set("/leader", value, ttl=self.ttl, prevExist=False)
return True
except etcd.EtcdAlreadyExist:
logger.info("Could not aquire leader: already exists")
return False
def update_leader(self, state_handler):
try:
self.set("/leader", state_handler.name, ttl=self.ttl, prevValue=state_handler.name)
self.set("/optime/leader", state_handler.last_operation())
except ValueError:
logger.error("Error updating leader lock and optime on ETCD for primary.")
return False
def last_leader_operation(self):
try:
return int(self.get("/optime/leader"))
except etcd.EtcdKeyNotFound:
logger.error("Error reading TTL on ETCD for primary.")
return None
def leader_unlocked(self):
try:
self.get("/leader")
return False
except etcd.EtcdKeyNotFound:
return True
return False
def am_i_leader(self, value):
leader = self.get("/leader")
logger.info("Lock owner: %s; I am %s", leader, value)
return leader == value
def race(self, path, value):
try:
self.set(path, value, prevExist=False)
return True
except etcd.EtcdAlreadyExist:
return False
| Tapjoy/governor | helpers/keystore.py | Python | mit | 4,071 |
import unittest2
import webtest
import json
import webapp2
from datetime import datetime
from google.appengine.ext import ndb
from google.appengine.ext import testbed
from consts.district_type import DistrictType
from consts.event_type import EventType
from controllers.api.api_event_controller import ApiEventController
from controllers.api.api_event_controller import ApiEventTeamsController
from controllers.api.api_event_controller import ApiEventMatchesController
from controllers.api.api_event_controller import ApiEventStatsController
from controllers.api.api_event_controller import ApiEventListController
from controllers.api.api_event_controller import ApiEventRankingsController
from models.event import Event
from models.event_team import EventTeam
from models.match import Match
from models.team import Team
class TestEventApiController(unittest2.TestCase):
def setUp(self):
app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventController, methods=['GET'])], debug=True)
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(root_path=".")
self.event = Event(
id="2010sc",
name="Palmetto Regional",
event_type_enum=EventType.REGIONAL,
event_district_enum=DistrictType.NO_DISTRICT,
short_name="Palmetto",
event_short="sc",
year=2010,
end_date=datetime(2010, 03, 27),
official=True,
location='Clemson, SC',
venue="Long Beach Arena",
venue_address="Long Beach Arena\r\n300 East Ocean Blvd\r\nLong Beach, CA 90802\r\nUSA",
start_date=datetime(2010, 03, 24),
webcast_json="[{\"type\": \"twitch\", \"channel\": \"frcgamesense\"}]",
alliance_selections_json="[ {\"declines\": [], \"picks\": [\"frc971\", \"frc254\", \"frc1662\"]},"+
"{\"declines\": [], \"picks\": [\"frc1678\", \"frc368\", \"frc4171\"]},"+
"{\"declines\": [], \"picks\": [\"frc2035\", \"frc192\", \"frc4990\"]},"+
"{\"declines\": [], \"picks\": [\"frc1323\", \"frc846\", \"frc2135\"]},"+
"{\"declines\": [], \"picks\": [\"frc2144\", \"frc1388\", \"frc668\"]},"+
"{\"declines\": [], \"picks\": [\"frc1280\", \"frc604\", \"frc100\"]},"+
"{\"declines\": [], \"picks\": [\"frc114\", \"frc852\", \"frc841\"]},"+
"{\"declines\": [], \"picks\": [\"frc2473\", \"frc3256\", \"frc1868\"]}]",
website="http://www.firstsv.org",
)
self.event.put()
def tearDown(self):
self.testbed.deactivate()
def assertEventJson(self, event):
self.assertEqual(event["key"], self.event.key_name)
self.assertEqual(event["name"], self.event.name)
self.assertEqual(event["short_name"], self.event.short_name)
self.assertEqual(event["official"], self.event.official)
self.assertEqual(event["event_type_string"], self.event.event_type_str)
self.assertEqual(event["event_type"], self.event.event_type_enum)
self.assertEqual(event["event_district_string"], self.event.event_district_str)
self.assertEqual(event["event_district"], self.event.event_district_enum)
self.assertEqual(event["start_date"], self.event.start_date.date().isoformat())
self.assertEqual(event["end_date"], self.event.end_date.date().isoformat())
self.assertEqual(event["location"], self.event.location)
self.assertEqual(event["venue_address"], self.event.venue_address.replace('\r\n', '\n'))
self.assertEqual(event["webcast"], json.loads(self.event.webcast_json))
self.assertEqual(event["alliances"], json.loads(self.event.alliance_selections_json))
self.assertEqual(event["website"], self.event.website)
def testEventApi(self):
response = self.testapp.get('/2010sc', headers={"X-TBA-App-Id": "tba-tests:event-controller-test:v01"})
event_dict = json.loads(response.body)
self.assertEventJson(event_dict)
class TestEventTeamsApiController(unittest2.TestCase):
def setUp(self):
app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventTeamsController, methods=['GET'])], debug=True)
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(root_path=".")
self.event = Event(
id="2010sc",
name="Palmetto Regional",
event_type_enum=EventType.REGIONAL,
short_name="Palmetto",
event_short="sc",
year=2010,
end_date=datetime(2010, 03, 27),
official=True,
location='Clemson, SC',
start_date=datetime(2010, 03, 24),
)
self.event.put()
self.team = Team(
id="frc281",
name="Michelin / Caterpillar / Greenville Technical College /\
jcpenney / Baldor / ASME / Gastroenterology Associates /\
Laserflex South & Greenville County Schools & Greenville\
Technical Charter High School",
team_number=281,
nickname="EnTech GreenVillians",
address="Greenville, SC, USA",
website="www.entech.org",
)
self.team.put()
self.event_team = EventTeam(
team=self.team.key,
event=self.event.key,
year=datetime.now().year
)
self.event_team.put()
def tearDown(self):
self.testbed.deactivate()
def assertTeamJson(self, team):
team = team[0]
self.assertEqual(team["key"], self.team.key_name)
self.assertEqual(team["team_number"], self.team.team_number)
self.assertEqual(team["nickname"], self.team.nickname)
self.assertEqual(team["location"], self.team.location)
self.assertEqual(team["locality"], "Greenville")
self.assertEqual(team["country_name"], "USA")
self.assertEqual(team["region"], "SC")
self.assertEqual(team["website"], self.team.website)
def testEventTeamsApi(self):
response = self.testapp.get('/2010sc', headers={"X-TBA-App-Id": "tba-tests:event-controller-test:v01"})
team_dict = json.loads(response.body)
self.assertTeamJson(team_dict)
class TestEventMatchApiController(unittest2.TestCase):
def setUp(self):
app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventMatchesController, methods=['GET'])], debug=True)
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(root_path=".")
self.event = Event(
id="2010sc",
name="Palmetto Regional",
event_type_enum=EventType.REGIONAL,
short_name="Palmetto",
event_short="sc",
year=2010,
end_date=datetime(2010, 03, 27),
official=True,
location='Clemson, SC',
start_date=datetime(2010, 03, 24),
)
self.event.put()
self.match = Match(
id="2010sc_qm1",
alliances_json="""{"blue": {"score": 57, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": 74, "teams": ["frc281", "frc571", "frc176"]}}""",
comp_level="qm",
event=self.event.key,
game="frc_2012_rebr",
set_number=1,
match_number=1,
team_key_names=[u'frc281', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073'],
youtube_videos=["94UGXIq6jUA"],
tba_videos=[".mp4"],
time=datetime.fromtimestamp(1409527874)
)
self.match.put()
def tearDown(self):
self.testbed.deactivate()
def assertMatchJson(self, matches):
match = matches[0]
self.assertEqual(str(match["key"]), self.match.key.string_id())
self.assertEqual(match["comp_level"], self.match.comp_level)
self.assertEqual(match["event_key"], self.match.event.string_id())
self.assertEqual(match["set_number"], self.match.set_number)
self.assertEqual(match["match_number"], self.match.match_number)
self.assertEqual(match["videos"], self.match.videos)
self.assertEqual(match["time_string"], self.match.time_string)
self.assertEqual(match["time"], 1409527874)
def testEventMatchApi(self):
response = self.testapp.get('/2010sc', headers={"X-TBA-App-Id": "tba-tests:event-controller-test:v01"})
match_json = json.loads(response.body)
self.assertMatchJson(match_json)
class TestEventStatsApiController(unittest2.TestCase):
def setUp(self):
app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventStatsController, methods=['GET'])], debug=True)
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(root_path=".")
self.matchstats = {
"dprs": {"971": 10.52178695299036, "114": 23.7313645955704, "115": 29.559784481082044},
"oprs": {"971": 91.42946669932006, "114": 59.27751047482864, "115": 13.285278757495144},
"ccwms": {"971": 80.90767974632955, "114": 35.54614587925829, "115": -16.27450572358693},
}
self.event = Event(
id="2010sc",
name="Palmetto Regional",
event_type_enum=EventType.REGIONAL,
short_name="Palmetto",
event_short="sc",
year=2010,
end_date=datetime(2010, 03, 27),
official=True,
location='Clemson, SC',
start_date=datetime(2010, 03, 24),
matchstats_json=json.dumps(self.matchstats)
)
self.event.put()
def tearDown(self):
self.testbed.deactivate()
def testEventStatsApi(self):
response = self.testapp.get('/2010sc', headers={"X-TBA-App-Id": "tba-tests:event-controller-test:v01"})
matchstats = json.loads(response.body)
self.assertEqual(self.matchstats, matchstats)
class TestEventRankingsApiController(unittest2.TestCase):
def setUp(self):
app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventRankingsController, methods=['GET'])], debug=True)
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(root_path=".")
self.rankings = [
["Rank", "Team", "QS", "ASSIST", "AUTO", "T&C", "TELEOP", "Record (W-L-T)", "DQ", "PLAYED"],
["1", "1126", "20.00", "240.00", "480.00", "230.00", "478.00", "10-2-0", "0", "12"],
["2", "5030", "20.00", "200.00", "290.00", "220.00", "592.00", "10-2-0", "0", "12"],
["3", "250", "20.00", "70.00", "415.00", "220.00", "352.00", "10-2-0", "0", "12"]
]
self.event = Event(
id="2010sc",
name="Palmetto Regional",
event_type_enum=EventType.REGIONAL,
short_name="Palmetto",
event_short="sc",
year=2010,
end_date=datetime(2010, 03, 27),
official=True,
location='Clemson, SC',
start_date=datetime(2010, 03, 24),
rankings_json=json.dumps(self.rankings)
)
self.event.put()
self.eventNoRanks = Event(
id="2010ct",
name="Palmetto Regional",
event_type_enum=EventType.REGIONAL,
short_name="Palmetto",
event_short="ct",
year=2010,
end_date=datetime(2010, 03, 27),
official=True,
location='Clemson, SC',
start_date=datetime(2010, 03, 24),
)
self.eventNoRanks.put()
def tearDown(self):
self.testbed.deactivate()
def testEventRankingsApi(self):
response = self.testapp.get('/2010sc', headers={"X-TBA-App-Id": "tba-tests:event-controller-test:v01"})
rankings = json.loads(response.body)
self.assertEqual(self.rankings, rankings)
def testEventNoRankingsApi(self):
response = self.testapp.get('/2010ct', headers={"X-TBA-App-Id": "tba-tests:event-controller-test:v01"})
self.assertEqual("[]", response.body)
class TestEventListApiController(unittest2.TestCase):
def setUp(self):
app = webapp2.WSGIApplication([webapp2.Route(r'/<year:>', ApiEventListController, methods=['GET'])], debug=True)
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(root_path=".")
self.event = Event(
id="2010sc",
name="Palmetto Regional",
event_type_enum=EventType.REGIONAL,
short_name="Palmetto",
event_short="sc",
year=2010,
end_date=datetime(2010, 03, 27),
official=True,
location='Clemson, SC',
start_date=datetime(2010, 03, 24),
)
self.event.put()
def tearDown(self):
self.testbed.deactivate()
def assertEventJson(self, event):
self.assertEqual(event["key"], self.event.key_name)
self.assertEqual(event["name"], self.event.name)
self.assertEqual(event["short_name"], self.event.short_name)
self.assertEqual(event["official"], self.event.official)
self.assertEqual(event["start_date"], self.event.start_date.date().isoformat())
self.assertEqual(event["end_date"], self.event.end_date.date().isoformat())
def testEventListApi(self):
response = self.testapp.get('/2010', headers={"X-TBA-App-Id": "tba-tests:event-controller-test:v01"})
event_dict = json.loads(response.body)
self.assertEventJson(event_dict[0])
| 1fish2/the-blue-alliance | tests/test_apiv2_event_controller.py | Python | mit | 15,390 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2017 F5 Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'
}
DOCUMENTATION = '''
---
module: bigip_iapp_service
short_description: Manages TCL iApp services on a BIG-IP.
description:
- Manages TCL iApp services on a BIG-IP.
version_added: "2.4"
options:
name:
description:
- The name of the iApp service that you want to deploy.
required: True
template:
description:
- The iApp template from which to instantiate a new service. This
template must exist on your BIG-IP before you can successfully
create a service. This parameter is required if the C(state)
parameter is C(present).
parameters:
description:
- A hash of all the required template variables for the iApp template.
If your parameters are stored in a file (the more common scenario)
it is recommended you use either the `file` or `template` lookups
to supply the expected parameters.
force:
description:
- Forces the updating of an iApp service even if the parameters to the
service have not changed. This option is of particular importance if
the iApp template that underlies the service has been updated in-place.
This option is equivalent to re-configuring the iApp if that template
has changed.
default: False
state:
description:
- When C(present), ensures that the iApp service is created and running.
When C(absent), ensures that the iApp service has been removed.
default: present
choices:
- present
- absent
notes:
- Requires the f5-sdk Python package on the host. This is as easy as pip
install f5-sdk.
- Requires the deepdiff Python package on the host. This is as easy as pip
install f5-sdk.
requirements:
- f5-sdk
- deepdiff
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
- name: Create HTTP iApp service from iApp template
bigip_iapp_service:
name: "foo-service"
template: "f5.http"
parameters: "{{ lookup('file', 'f5.http.parameters.json') }}"
password: "secret"
server: "lb.mydomain.com"
state: "present"
user: "admin"
delegate_to: localhost
- name: Upgrade foo-service to v1.2.0rc4 of the f5.http template
bigip_iapp_service:
name: "foo-service"
template: "f5.http.v1.2.0rc4"
password: "secret"
server: "lb.mydomain.com"
state: "present"
user: "admin"
delegate_to: localhost
- name: Configure a service using parameters in YAML
bigip_iapp_service:
name: "tests"
template: "web_frontends"
password: "admin"
server: "{{ inventory_hostname }}"
server_port: "{{ bigip_port }}"
validate_certs: "{{ validate_certs }}"
state: "present"
user: "admin"
parameters:
variables:
- name: "var__vs_address"
value: "1.1.1.1"
- name: "pm__apache_servers_for_http"
value: "2.2.2.1:80"
- name: "pm__apache_servers_for_https"
value: "2.2.2.2:80"
delegate_to: localhost
- name: Re-configure a service whose underlying iApp was updated in place
bigip_iapp_service:
name: "tests"
template: "web_frontends"
password: "admin"
force: yes
server: "{{ inventory_hostname }}"
server_port: "{{ bigip_port }}"
validate_certs: "{{ validate_certs }}"
state: "present"
user: "admin"
parameters:
variables:
- name: "var__vs_address"
value: "1.1.1.1"
- name: "pm__apache_servers_for_http"
value: "2.2.2.1:80"
- name: "pm__apache_servers_for_https"
value: "2.2.2.2:80"
delegate_to: localhost
'''
RETURN = '''
# only common fields returned
'''
from ansible.module_utils.f5_utils import (
AnsibleF5Client,
AnsibleF5Parameters,
HAS_F5SDK,
F5ModuleError,
iteritems,
iControlUnexpectedHTTPError
)
from deepdiff import DeepDiff
class Parameters(AnsibleF5Parameters):
returnables = []
api_attributes = [
'tables', 'variables', 'template', 'lists'
]
updatables = ['tables', 'variables', 'lists']
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
@property
def tables(self):
result = []
if not self._values['tables']:
return None
tables = self._values['tables']
for table in tables:
tmp = dict()
name = table.get('name', None)
if name is None:
raise F5ModuleError(
"One of the provided tables does not have a name"
)
tmp['name'] = str(name)
columns = table.get('columnNames', None)
if columns:
tmp['columnNames'] = [str(x) for x in columns]
# You cannot have rows without columns
rows = table.get('rows', None)
if rows:
tmp['rows'] = []
for row in rows:
tmp['rows'].append(dict(row=[str(x) for x in row['row']]))
result.append(tmp)
result = sorted(result, key=lambda k: k['name'])
return result
@tables.setter
def tables(self, value):
self._values['tables'] = value
@property
def variables(self):
result = []
if not self._values['variables']:
return None
variables = self._values['variables']
for variable in variables:
tmp = dict((str(k), str(v)) for k, v in iteritems(variable))
if 'encrypted' not in tmp:
# BIG-IP will inject an 'encrypted' key if you don't provide one.
# If you don't provide one, then we give you the default 'no', by
# default.
tmp['encrypted'] = 'no'
if 'value' not in tmp:
tmp['value'] = ''
# This seems to happen only on 12.0.0
elif tmp['value'] == 'none':
tmp['value'] = ''
result.append(tmp)
result = sorted(result, key=lambda k: k['name'])
return result
@variables.setter
def variables(self, value):
self._values['variables'] = value
@property
def lists(self):
result = []
if not self._values['lists']:
return None
lists = self._values['lists']
for list in lists:
tmp = dict((str(k), str(v)) for k, v in iteritems(list) if k != 'value')
if 'encrypted' not in list:
# BIG-IP will inject an 'encrypted' key if you don't provide one.
# If you don't provide one, then we give you the default 'no', by
# default.
tmp['encrypted'] = 'no'
if 'value' in list:
if len(list['value']) > 0:
# BIG-IP removes empty values entries, so mimic this behavior
# for user-supplied values.
tmp['value'] = [str(x) for x in list['value']]
result.append(tmp)
result = sorted(result, key=lambda k: k['name'])
return result
@lists.setter
def lists(self, value):
self._values['lists'] = value
@property
def parameters(self):
return dict(
tables=self.tables,
variables=self.variables,
lists=self.lists
)
@parameters.setter
def parameters(self, value):
if value is None:
return
if 'tables' in value:
self.tables = value['tables']
if 'variables' in value:
self.variables = value['variables']
if 'lists' in value:
self.lists = value['lists']
@property
def template(self):
if self._values['template'] is None:
return None
if self._values['template'].startswith("/" + self.partition):
return self._values['template']
elif self._values['template'].startswith("/"):
return self._values['template']
else:
return '/{0}/{1}'.format(
self.partition, self._values['template']
)
@template.setter
def template(self, value):
self._values['template'] = value
class ModuleManager(object):
def __init__(self, client):
self.client = client
self.have = None
self.want = Parameters(self.client.module.params)
self.changes = Parameters()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Parameters(changed)
def _update_changed_options(self):
changed = {}
for key in Parameters.updatables:
if getattr(self.want, key) is not None:
attr1 = getattr(self.want, key)
attr2 = getattr(self.have, key)
if attr1 != attr2:
changed[key] = str(DeepDiff(attr1, attr2))
if changed:
self.changes = Parameters(changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
return result
def exists(self):
result = self.client.api.tm.sys.application.services.service.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
self._set_changed_options()
if self.client.check_mode:
return True
self.create_on_device()
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update() and not self.want.force:
return False
if self.client.check_mode:
return True
self.update_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update_on_device(self):
params = self.want.api_params()
params['execute-action'] = 'definition'
resource = self.client.api.tm.sys.application.services.service.load(
name=self.want.name,
partition=self.want.partition
)
resource.update(**params)
def read_current_from_device(self):
result = self.client.api.tm.sys.application.services.service.load(
name=self.want.name,
partition=self.want.partition
).to_dict()
result.pop('_meta_data', None)
return Parameters(result)
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.sys.application.services.service.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.client.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the iApp service")
return True
def remove_from_device(self):
resource = self.client.api.tm.sys.application.services.service.load(
name=self.want.name,
partition=self.want.partition
)
if resource:
resource.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.argument_spec = dict(
name=dict(required=True),
template=dict(),
parameters=dict(
type='dict'
),
state=dict(
default='present',
choices=['absent', 'present']
),
force=dict(
default=False,
type='bool'
)
)
self.f5_product_name = 'bigip'
def main():
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
spec = ArgumentSpec()
client = AnsibleF5Client(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
f5_product_name=spec.f5_product_name
)
try:
mm = ModuleManager(client)
results = mm.exec_module()
client.module.exit_json(**results)
except F5ModuleError as e:
client.module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
| rcarrillocruz/ansible | lib/ansible/modules/network/f5/bigip_iapp_service.py | Python | gpl-3.0 | 14,587 |
#!/usr/bin/env python
import os
import re
import sys
from codecs import open
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
packages = [
'jarvy',
'jarvy.packages',
'jarvy.packages.google',
]
requires = ['beautifulsoup4', 'google']
version = ''
with open('jarvy/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jarvy',
version=version,
description='Python Intelligent Assistant for Humans.',
long_description=readme + '\n\n' + history,
author='Semih Yagcioglu',
author_email='semihyagcioglu@yahoo.com',
url='http://github.com/jarvy/jarvy',
packages=packages,
package_data={'': ['LICENSE']},
package_dir={'jarvis': 'jarvis'},
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
),
)
| jarvy/jarvy | setup.py | Python | mit | 1,566 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) cgstudiomap <cgstudiomap@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_res_partner
from . import test_res_industry
| cgstudiomap/cgstudiomap | main/local_modules/frontend_base/tests/__init__.py | Python | agpl-3.0 | 1,050 |
import jobinfo
__author__ = 'Masataka'
class JobMaya(jobinfo.JobInfo):
def __init__(self, param):
jobinfo.JobInfo.__init__(self, param)
class JobMayaSw(JobMaya):
def __init__(self, param):
JobMaya.__init__(self, param)
class JobMayaMr(JobMaya):
def __init__(self, param):
JobMaya.__init__(self, param)
class JobMayaFile(JobMaya):
def __init__(self, param):
JobMaya.__init__(self, param)
| plinecom/JobManager | job/_maya.py | Python | mit | 444 |
from ethronsoft.gcspypi.package.package_manager import PackageManager
from ethronsoft.gcspypi.utilities.console import Console
from ethronsoft.gcspypi.parsers.commons import init_repository
def handle_(config, data):
with Console(verbose=config.get("verbose", False), exit_on_error=True) as c:
repo = init_repository(c, config["repository"])
pkg_mgr = PackageManager(repo, console=c)
for syntax in data["syntax"]:
pkg = pkg_mgr.search(syntax)
if pkg:
c.output(pkg)
class SearchParser(object):
def __init__(self, subparsers):
self.name = "search"
seach_parser = subparsers.add_parser(self.name,
description="Search for packages in the GCS repository. View syntax using command syntax")
seach_parser.add_argument("syntax", nargs="+", help="Search syntax")
def handle(self, config, data):
handle_(config, data) | ethronsoft/gcspypi | ethronsoft/gcspypi/parsers/search.py | Python | bsd-2-clause | 960 |
"""
A Cobbler System.
Copyright 2006-2009, Red Hat, Inc
Michael DeHaan <mdehaan@redhat.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import utils
import item
import time
from cexceptions import *
from utils import _
# this datastructure is described in great detail in item_distro.py -- read the comments there.
FIELDS = [
["name","",0,"Name",True,"Ex: vanhalen.example.org",0,"str"],
["uid","",0,"",False,"",0,"str"],
["owners","SETTINGS:default_ownership",0,"Owners",True,"Owners list for authz_ownership (space delimited)",0,"list"],
["profile",None,0,"Profile",True,"Parent profile",[],"str"],
["image",None,0,"Image",True,"Parent image (if not a profile)",0,"str"],
["kernel_options",{},0,"Kernel Options",True,"Ex: selinux=permissive",0,"dict"],
["kernel_options_post",{},0,"Kernel Options (Post Install)",True,"Ex: clocksource=pit noapic",0,"dict"],
["ks_meta",{},0,"Kickstart Metadata",True,"Ex: dog=fang agent=86",0,"dict"],
["netboot_enabled",True,0,"Netboot Enabled",True,"PXE (re)install this machine at next boot?",0,"bool"],
["kickstart","<<inherit>>",0,"Kickstart",True,"Path to kickstart template",0,"str"],
["comment","",0,"Comment",True,"Free form text description",0,"str"],
["depth",2,0,"",False,"",0,"int"],
["server","<<inherit>>",0,"Server Override",True,"See manpage or leave blank",0,"str"],
["virt_path","<<inherit>>",0,"Virt Path",True,"Ex: /directory or VolGroup00",0,"str"],
["virt_type","<<inherit>>",0,"Virt Type",True,"Virtualization technology to use",["xenpv","xenfv","qemu","vmware"],"str"],
["virt_cpus","<<inherit>>",0,"Virt CPUs",True,"",0,"int"],
["virt_file_size","<<inherit>>",0,"Virt File Size(GB)",True,"",0,"float"],
["virt_ram","<<inherit>>",0,"Virt RAM (MB)",True,"",0,"int"],
["virt_auto_boot","<<inherit>>",0,"Virt Auto Boot",True,"Auto boot this VM?",0,"bool"],
["ctime",0,0,"",False,"",0,"float"],
["mtime",0,0,"",False,"",0,"float"],
["power_type","SETTINGS:power_management_default_type",0,"Power Management Type",True,"",utils.get_power_types(),"str"],
["power_address","",0,"Power Management Address",True,"Ex: power-device.example.org",0,"str"],
["power_user","",0,"Power Username ",True,"",0,"str"],
["power_pass","",0,"Power Password",True,"",0,"str"],
["power_id","",0,"Power ID",True,"Usually a plug number or blade name, if power type requires it",0,"str"],
["hostname","",0,"Hostname",True,"",0,"str"],
["gateway","",0,"Gateway",True,"",0,"str"],
["name_servers",[],0,"Name Servers",True,"space delimited",0,"list"],
["name_servers_search",[],0,"Name Servers Search Path",True,"space delimited",0,"list"],
["network_widget_a","",0,"Add Interface",True,"",0,"str"], # not a real field, a marker for the web app
["network_widget_b","",0,"Edit Interface",True,"",0,"str"], # not a real field, a marker for the web app
["*mac_address","",0,"MAC Address",True,"",0,"str"],
["*ip_address","",0,"IP Address",True,"",0,"str"],
["*bonding","na",0,"Bonding Mode",True,"",["na","master","slave"],"str"],
["*bonding_master","",0,"Bonding Master",True,"",0,"str"],
["*bonding_opts","",0,"Bonding Opts",True,"",0,"str"],
["*static",False,0,"Static",True,"Is this interface static?",0,"bool"],
["*subnet","",0,"Subnet",True,"",0,"str"],
["*dhcp_tag","",0,"DHCP Tag",True,"",0,"str"],
["*dns_name","",0,"DNS Name",True,"",0,"str"],
["*static_routes",[],0,"Static Routes",True,"",0,"list"],
["*virt_bridge","",0,"Virt Bridge",True,"",0,"str"],
["mgmt_classes",[],0,"Management Classes",True,"For external config management",0,"list"],
["template_files",{},0,"Template Files",True,"File mappings for built-in configuration management",0,"dict"],
["redhat_management_key","<<inherit>>",0,"Red Hat Management Key",True,"Registration key for RHN, Satellite, or Spacewalk",0,"str"],
["redhat_management_server","<<inherit>>",0,"Red Hat Management Server",True,"Address of Satellite or Spacewalk Server",0,"str"],
]
class System(item.Item):
TYPE_NAME = _("system")
COLLECTION_TYPE = "system"
def get_fields(self):
return FIELDS
def make_clone(self):
ds = self.to_datastruct()
cloned = System(self.config)
cloned.from_datastruct(ds)
return cloned
def delete_interface(self,name):
"""
Used to remove an interface.
"""
if self.interfaces.has_key(name) and len(self.interfaces) > 1:
del self.interfaces[name]
else:
if not self.interfaces.has_key(name):
# no interface here to delete
pass
else:
raise CX(_("At least one interface needs to be defined."))
return True
def __get_interface(self,name):
if not self.interfaces.has_key(name):
self.interfaces[name] = {
"mac_address" : "",
"ip_address" : "",
"dhcp_tag" : "",
"subnet" : "",
"virt_bridge" : "",
"static" : False,
"bonding" : "",
"bonding_master" : "",
"bonding_opts" : "",
"dns_name" : "",
"static_routes" : [],
}
return self.interfaces[name]
def from_datastruct(self,seed_data):
# FIXME: most definitely doesn't grok interfaces yet.
return utils.from_datastruct_from_fields(self,seed_data,FIELDS)
def get_parent(self):
"""
Return object next highest up the tree.
"""
if (self.parent is None or self.parent == '') and self.profile:
return self.config.profiles().find(name=self.profile)
elif (self.parent is None or self.parent == '') and self.image:
return self.config.images().find(name=self.image)
else:
return self.config.systems().find(name=self.parent)
def set_name(self,name):
"""
Set the name. If the name is a MAC or IP, and the first MAC and/or IP is not defined, go ahead
and fill that value in.
"""
if self.name not in ["",None] and self.parent not in ["",None] and self.name == self.parent:
raise CX(_("self parentage is weird"))
if not isinstance(name, basestring):
raise CX(_("name must be a string"))
for x in name:
if not x.isalnum() and not x in [ "_", "-", ".", ":", "+" ] :
raise CX(_("invalid characters in name: %s") % x)
# Stuff here defaults to eth0. Yes, it's ugly and hardcoded, but so was
# the default interface behaviour that's now removed. ;)
# --Jasper Capel
if utils.is_mac(name):
intf = self.__get_interface("eth0")
if intf["mac_address"] == "":
intf["mac_address"] = name
elif utils.is_ip(name):
intf = self.__get_interface("eth0")
if intf["ip_address"] == "":
intf["ip_address"] = name
self.name = name
return True
def set_redhat_management_key(self,key):
return utils.set_redhat_management_key(self,key)
def set_redhat_management_server(self,server):
return utils.set_redhat_management_server(self,server)
def set_server(self,server):
"""
If a system can't reach the boot server at the value configured in settings
because it doesn't have the same name on it's subnet this is there for an override.
"""
if server is None or server == "":
server = "<<inherit>>"
self.server = server
return True
def get_mac_address(self,interface):
"""
Get the mac address, which may be implicit in the object name or explicit with --mac-address.
Use the explicit location first.
"""
intf = self.__get_interface(interface)
if intf["mac_address"] != "":
return intf["mac_address"].strip()
else:
return None
def get_ip_address(self,interface):
"""
Get the IP address, which may be implicit in the object name or explict with --ip-address.
Use the explicit location first.
"""
intf = self.__get_interface(interface)
if intf["ip_address"] != "":
return intf["ip_address"].strip()
else:
return ""
def is_management_supported(self,cidr_ok=True):
"""
Can only add system PXE records if a MAC or IP address is available, else it's a koan
only record. Actually Itanium goes beyond all this and needs the IP all of the time
though this is enforced elsewhere (action_sync.py).
"""
if self.name == "default":
return True
for (name,x) in self.interfaces.iteritems():
mac = x.get("mac_address",None)
ip = x.get("ip_address",None)
if ip is not None and not cidr_ok and ip.find("/") != -1:
# ip is in CIDR notation
return False
if mac is not None or ip is not None:
# has ip and/or mac
return True
return False
def set_dhcp_tag(self,dhcp_tag,interface):
intf = self.__get_interface(interface)
intf["dhcp_tag"] = dhcp_tag
return True
def set_dns_name(self,dns_name,interface):
intf = self.__get_interface(interface)
# FIXME: move duplicate supression code to the object validation
# functions to take a harder line on supression?
if dns_name != "" and not str(self.config._settings.allow_duplicate_hostnames).lower() in [ "1", "y", "yes"]:
matched = self.config.api.find_items("system", {"dns_name" : dns_name})
for x in matched:
if x.name != self.name:
raise CX("dns-name duplicated: %s" % dns_name)
intf["dns_name"] = dns_name
return True
def set_static_routes(self,routes,interface):
intf = self.__get_interface(interface)
data = utils.input_string_or_list(routes)
intf["static_routes"] = data
return True
def set_hostname(self,hostname):
if hostname is None:
hostname = ""
self.hostname = hostname
return True
def set_static(self,truthiness,interface):
intf = self.__get_interface(interface)
intf["static"] = utils.input_boolean(truthiness)
return True
def set_ip_address(self,address,interface):
"""
Assign a IP or hostname in DHCP when this MAC boots.
Only works if manage_dhcp is set in /etc/cobbler/settings
"""
intf = self.__get_interface(interface)
# FIXME: move duplicate supression code to the object validation
# functions to take a harder line on supression?
if address != "" and not str(self.config._settings.allow_duplicate_ips).lower() in [ "1", "y", "yes"]:
matched = self.config.api.find_items("system", {"ip_address" : address})
for x in matched:
if x.name != self.name:
raise CX("IP address duplicated: %s" % address)
if address == "" or utils.is_ip(address):
intf["ip_address"] = address.strip()
return True
raise CX(_("invalid format for IP address (%s)") % address)
def set_mac_address(self,address,interface):
if address == "random":
address = utils.get_random_mac(self.config.api)
# FIXME: move duplicate supression code to the object validation
# functions to take a harder line on supression?
if address != "" and not str(self.config._settings.allow_duplicate_macs).lower() in [ "1", "y", "yes"]:
matched = self.config.api.find_items("system", {"mac_address" : address})
for x in matched:
if x.name != self.name:
raise CX("MAC address duplicated: %s" % address)
intf = self.__get_interface(interface)
if address == "" or utils.is_mac(address):
intf["mac_address"] = address.strip()
return True
raise CX(_("invalid format for MAC address (%s)" % address))
def set_gateway(self,gateway):
if gateway is None:
gateway = ""
if utils.is_ip(gateway) or gateway == "":
self.gateway = gateway
else:
raise CX(_("invalid format for gateway IP address (%s)") % gateway)
return True
def set_name_servers(self,data):
if data == "<<inherit>>":
data = []
data = utils.input_string_or_list(data)
self.name_servers = data
return True
def set_name_servers_search(self,data):
if data == "<<inherit>>":
data = []
data = utils.input_string_or_list(data)
self.name_servers_search = data
return True
def set_subnet(self,subnet,interface):
intf = self.__get_interface(interface)
intf["subnet"] = subnet
return True
def set_virt_bridge(self,bridge,interface):
if bridge == "":
bridge = self.settings.default_virt_bridge
intf = self.__get_interface(interface)
intf["virt_bridge"] = bridge
return True
def set_bonding(self,bonding,interface):
if bonding not in ["master","slave","na",""] :
raise CX(_("bonding value must be one of: master, slave, na"))
if bonding == "na":
bonding = ""
intf = self.__get_interface(interface)
intf["bonding"] = bonding
return True
def set_bonding_master(self,bonding_master,interface):
intf = self.__get_interface(interface)
intf["bonding_master"] = bonding_master
return True
def set_bonding_opts(self,bonding_opts,interface):
intf = self.__get_interface(interface)
intf["bonding_opts"] = bonding_opts
return True
def set_profile(self,profile_name):
"""
Set the system to use a certain named profile. The profile
must have already been loaded into the Profiles collection.
"""
if profile_name in [ "delete", "None", "~", ""] or profile_name is None:
self.profile = ""
return True
self.image = "" # mutual exclusion rule
p = self.config.profiles().find(name=profile_name)
if p is not None:
self.profile = profile_name
self.depth = p.depth + 1 # subprofiles have varying depths.
return True
raise CX(_("invalid profile name: %s") % profile_name)
def set_image(self,image_name):
"""
Set the system to use a certain named image. Works like set_profile
but cannot be used at the same time. It's one or the other.
"""
if image_name in [ "delete", "None", "~", ""] or image_name is None:
self.image = ""
return True
self.profile = "" # mutual exclusion rule
img = self.config.images().find(name=image_name)
if img is not None:
self.image = image_name
self.depth = img.depth + 1
return True
raise CX(_("invalid image name (%s)") % image_name)
def set_virt_cpus(self,num):
return utils.set_virt_cpus(self,num)
def set_virt_file_size(self,num):
return utils.set_virt_file_size(self,num)
def set_virt_auto_boot(self,num):
return utils.set_virt_auto_boot(self,num)
def set_virt_ram(self,num):
return utils.set_virt_ram(self,num)
def set_virt_type(self,vtype):
return utils.set_virt_type(self,vtype)
def set_virt_path(self,path):
return utils.set_virt_path(self,path,for_system=True)
def set_netboot_enabled(self,netboot_enabled):
"""
If true, allows per-system PXE files to be generated on sync (or add). If false,
these files are not generated, thus eliminating the potential for an infinite install
loop when systems are set to PXE boot first in the boot order. In general, users
who are PXE booting first in the boot order won't create system definitions, so this
feature primarily comes into play for programmatic users of the API, who want to
initially create a system with netboot enabled and then disable it after the system installs,
as triggered by some action in kickstart %post. For this reason, this option is not
surfaced in the CLI, output, or documentation (yet).
Use of this option does not affect the ability to use PXE menus. If an admin has machines
set up to PXE only after local boot fails, this option isn't even relevant.
"""
self.netboot_enabled = utils.input_boolean(netboot_enabled)
return True
def set_kickstart(self,kickstart):
"""
Sets the kickstart. This must be a NFS, HTTP, or FTP URL.
Or filesystem path. Minor checking of the URL is performed here.
NOTE -- usage of the --kickstart parameter in the profile
is STRONGLY encouraged. This is only for exception cases
where a user already has kickstarts made for each system
and can't leverage templating. Profiles provide an important
abstraction layer -- assigning systems to defined and repeatable
roles.
"""
if kickstart is None or kickstart in [ "", "delete", "<<inherit>>" ]:
self.kickstart = "<<inherit>>"
return True
if utils.find_kickstart(kickstart):
self.kickstart = kickstart
return True
raise CX(_("kickstart not found: %s" % kickstart))
def set_power_type(self, power_type):
# FIXME: modularize this better
if power_type is None:
power_type = ""
power_type = power_type.lower()
choices = utils.get_power_types()
choices.sort()
if power_type not in choices:
raise CX("power type must be one of: %s" % ",".join(choices))
self.power_type = power_type
return True
def set_power_user(self, power_user):
if power_user is None:
power_user = ""
utils.safe_filter(power_user)
self.power_user = power_user
return True
def set_power_pass(self, power_pass):
if power_pass is None:
power_pass = ""
utils.safe_filter(power_pass)
self.power_pass = power_pass
return True
def set_power_address(self, power_address):
if power_address is None:
power_address = ""
utils.safe_filter(power_address)
self.power_address = power_address
return True
def set_power_id(self, power_id):
if power_id is None:
power_id = ""
utils.safe_filter(power_id)
self.power_id = power_id
return True
def modify_interface(self, hash):
"""
Used by the WUI to modify an interface more-efficiently
"""
for (key,value) in hash.iteritems():
(field,interface) = key.split("-")
field = field.replace("_","").replace("-","")
if field == "macaddress" : self.set_mac_address(value, interface)
if field == "ipaddress" : self.set_ip_address(value, interface)
if field == "dnsname" : self.set_dns_name(value, interface)
if field == "static" : self.set_static(value, interface)
if field == "dhcptag" : self.set_dhcp_tag(value, interface)
if field == "subnet" : self.set_subnet(value, interface)
if field == "virtbridge" : self.set_virt_bridge(value, interface)
if field == "bonding" : self.set_bonding(value, interface)
if field == "bondingmaster" : self.set_bonding_master(value, interface)
if field == "bondingopts" : self.set_bonding_opts(value, interface)
if field == "staticroutes" : self.set_static_routes(value, interface)
return True
def check_if_valid(self):
if self.name is None or self.name == "":
raise CX("name is required")
if self.profile is None or self.profile == "":
if self.image is None or self.image == "":
raise CX("profile or image is required")
| ssalevan/cobbler | cobbler/item_system.py | Python | gpl-2.0 | 21,071 |
import ocl
import pyocl
import camvtk
import time
import datetime
import vtk
def main(filename="frame/f.png"):
print ocl.revision()
myscreen = camvtk.VTKScreen()
myscreen.camera.SetPosition(-15, -8, 15)
myscreen.camera.SetFocalPoint(5,5, 0)
# axis arrows
camvtk.drawArrows(myscreen,center=(0,0,0))
# screenshot writer
w2if = vtk.vtkWindowToImageFilter()
w2if.SetInput(myscreen.renWin)
lwr = vtk.vtkPNGWriter()
lwr.SetInput( w2if.GetOutput() )
c = ocl.CylCutter(3,10) # cutter
c.length = 3
print "cutter length=", c.length
cp= ocl.Point(0,0,0)
max_depth = 9
root_scale = 3
t = ocl.Octree(root_scale, max_depth, cp)
print t
nodes = t.get_leaf_nodes()
t.init(1)
#nodes=[]
s = ocl.SphereOCTVolume()
s.center = ocl.Point(0,0,0)
s.radius = 2.6345
print "build...",
t.build(s)
print "done."
print t
sphere = camvtk.Sphere( center=(s.center.x,s.center.y,s.center.z), radius=s.radius, color=camvtk.cyan)
sphere.SetOpacity(0.1)
myscreen.addActor( sphere );
nodes = t.get_surface_nodes()
print "got ", len(nodes)," surface nodes"
points=[]
for n in nodes:
#n=nodes[0]
verts = n.vertices()
#c = n.center
#print " node at depth=", n.depth," center=",c
#myscreen.addActor( camvtk.Sphere( center=(c.x,c.y,c.z), radius=0.1, color=camvtk.yellow ))
for v in verts:
#print v
#myscreen.addActor( camvtk.Sphere( center=(v.x,v.y,v.z), radius=0.1 ))
#
points.append(v)
#myscreen.addActor( camvtk.PointCloud( pointlist= points))
tris = t.mc_triangles()
mc_surf = camvtk.STLSurf( triangleList=tris, color=camvtk.red )
#mc_surf.SetWireframe()
myscreen.addActor( mc_surf )
print " render()...",
myscreen.render()
print "done."
#time.sleep(0.2)
myscreen.iren.Start()
if __name__ == "__main__":
main()
| AlanZatarain/opencamlib | scripts/ocode/cutsim_test_4_octree2.py | Python | gpl-3.0 | 2,075 |
#!/usr/bin/env python
"""Test for depth estimation
TODO: Render Baxter's hand at the goal position in Rviz, as well as the arm position that induced this goal position
"""
import rospy
from geometry_msgs.msg import Pose, PoseArray, Point, Quaternion
from visualization_msgs.msg import Marker, MarkerArray
import visualization_msgs.msg
global marker_ests
marker_ests = MarkerArray()
goal_position = Point( x=0.785, y=0, z=-0.166)
#play the rosbag (user)
def callback(data):
global marker_ests
#Publish it as a marker in rviz
marker_ests = MarkerArray()
marker_ests.markers = []
print len(data.poses)
i = 0
for pose in data.poses:
marker_est = Marker()
marker_est.header.frame_id = "base"
marker_est.ns = "est_pose_"+str(i)
marker_est.id = 42+i
marker_est.type = Marker.CUBE
marker_est.action = Marker.ADD
marker_est.pose = pose
marker_est.color.r, marker_est.color.g, marker_est.color.b = (0, 255, 0)
marker_est.color.a = 0.5
marker_est.scale.x, marker_est.scale.y, marker_est.scale.z = (0.06, 0.06, 0.06)
marker_ests.markers.append(marker_est)
i+=1
rospy.init_node("estimate_depth_test")
#Subscribe to object_tracker/right/goal_pose
goal_sub = rospy.Subscriber("/object_tracker/right/goal_poses", PoseArray, callback)
est_pub = rospy.Publisher("object_estimates", MarkerArray)
rate = rospy.Rate(100)
while not rospy.is_shutdown():
est_pub.publish(marker_ests)
rate.sleep()
| osrf/baxter_demos | tests/estimate_depth_test.py | Python | apache-2.0 | 1,522 |
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
import sys
# set profiling globally
from Cython.Compiler.Options import directive_defaults
directive_defaults['profile'] = False
# extra compile flags
extra_compile_args = ["-O0"]
#sys.argv.append("build_ext")
#sys.argv.append("--inplace")
extensions = [
Extension("Controller", ["src/Controller.pyx"], extra_compile_args=extra_compile_args),
Extension("BaseMotor", ["src/Motor/BaseMotor.pyx"], extra_compile_args=extra_compile_args),
Extension("UnipolarStepperMotor", ["src/Motor/UnipolarStepperMotor.pyx"], extra_compile_args=extra_compile_args),
Extension("BipolarStepperMotor", ["src/Motor/BipolarStepperMotor.pyx"], extra_compile_args=extra_compile_args),
Extension("LaserMotor", ["src/Motor/LaserMotor.pyx"], extra_compile_args=extra_compile_args),
Extension("A5988DriverMotor", ["src/Motor/A5988DriverMotor.pyx"], extra_compile_args=extra_compile_args),
Extension("StepDirMotor", ["src/Motor/StepDirMotor.pyx"], extra_compile_args=extra_compile_args),
Extension("Parser", ["src/Parser.pyx"], extra_compile_args=extra_compile_args),
Extension("Point3d", ["src/Point3d.pyx"], extra_compile_args=extra_compile_args),
Extension("LaserSpindle", ["src/Spindle/LaserSpindle.pyx"], extra_compile_args=extra_compile_args),
Extension("BaseSpindle", ["src/Spindle/BaseSpindle.pyx"], extra_compile_args=extra_compile_args),
Extension("ShiftRegister", ["src/ShiftRegister/ShiftRegister.pyx"], extra_compile_args=extra_compile_args),
Extension("ShiftGPIOWrapper", ["src/ShiftRegister/ShiftGPIOWrapper.pyx"], extra_compile_args=extra_compile_args),
Extension("GPIOObject", ["src/GPIOObject/GPIOObject.pyx"], extra_compile_args=extra_compile_args),
Extension("FakeGPIO", ["src/GPIOObject/FakeGPIO.pyx"], extra_compile_args=extra_compile_args),
Extension("GPIOWrapper", ["src/GPIOObject/GPIOWrapper.pyx"], extra_compile_args=extra_compile_args),
Extension("Transformer", ["src/Transformer/Transformer.pyx"], extra_compile_args=extra_compile_args),
]
setup(
name = "python-gcode",
ext_modules = cythonize(extensions), # accepts a glob pattern
)
| gunny26/python-gcode | setup.py | Python | gpl-2.0 | 2,225 |
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
import json
from pyramid.view import view_config
from pyramid.httpexceptions import HTTPFound
from dace.util import getSite
from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS
from dace.objectofcollaboration.principal.util import get_current, has_role
from pontus.view import BasicView
from deform_treepy.utilities.tree_utility import tree_diff
from lac.content.processes.base_review_management.behaviors import (
SeeCinemaReview)
from lac.content.cinema_review import CinemaReview
from lac.content.smart_folder import generate_search_smart_folder
from lac.content.processes import get_states_mapping
from lac.utilities.utils import (
generate_navbars,
ObjectRemovedException,
get_site_folder)
from lac.content.interface import IFilmSchedule
from lac.views.filter import get_entities_by_title
def find_related_film_schedules(review):
interfaces = [IFilmSchedule]
title = getattr(review, 'title', '')
return get_entities_by_title(
interfaces, title, metadata_filter={'states': ['published']})
@view_config(
name='seecinemareview',
context=CinemaReview,
renderer='pontus:templates/views_templates/grid.pt',
)
class SeeCinemaReviewView(BasicView):
title = ''
name = 'seecinemareview'
viewid = 'seecinemareview'
behaviors = [SeeCinemaReview]
template = 'lac:views/cinema_review_management/templates/see_review.pt'
requirements = {'css_links': ['deform_treepy:static/vakata-jstree/dist/themes/default/style.min.css',
'deform_treepy:static/css/treepy.css'],
'js_links': ['deform_treepy:static/js/treepy.js',
'deform_treepy:static/vakata-jstree/dist/jstree.js']}
def update(self):
self.execute(None)
try:
navbars = generate_navbars(self, self.context, self.request)
except ObjectRemovedException:
return HTTPFound(self.request.resource_url(getSite(), ''))
result = {}
user = get_current()
related_film_schedules = find_related_film_schedules(self.context)
related_film_schedules = [a for a in related_film_schedules]
films_body = ''
if related_film_schedules:
films_folder = generate_search_smart_folder('fil schedules',
('city_classification',
'venue_classification'))
films_body = films_folder.classifications.render(
related_film_schedules, self.request, films_folder)
site = get_site_folder(True, self.request)
diff_marker = "#diff"
values = {'object': self.context,
'films_body': films_body,
'state': get_states_mapping(
user, self.context,
getattr(self.context, 'state_or_none', [None])[0]),
'navbar_body': navbars['navbar_body'],
'footer_body': navbars['footer_body'],
'services_body': navbars['services_body'],
'is_portalmanager': has_role(user=user, role=('PortalManager',)),
'tree_diff': json.dumps(
tree_diff(site.tree, self.context.tree, diff_marker)),
'diff_marker': diff_marker}
body = self.content(args=values, template=self.template)['body']
item = self.adapt_item(body, self.viewid)
item['messages'] = navbars['messages']
item['isactive'] = navbars['isactive']
result.update(navbars['resources'])
result.update(self.requirements)
result['coordinates'] = {self.coordinates: [item]}
return result
DEFAULTMAPPING_ACTIONS_VIEWS.update({SeeCinemaReview: SeeCinemaReviewView})
| ecreall/lagendacommun | lac/views/cinema_review_management/see_review.py | Python | agpl-3.0 | 3,952 |
# IMAPFS - Cloud storage via IMAP
# Copyright (C) 2013 Wes Weber
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import time
from imapfs import message
class Directory:
"""Represents a directory
Contains a list of file names
"""
def __init__(self, msg, ctime, mtime, children):
self.message = msg
self.ctime = ctime
self.mtime = mtime
self.children = children
self.dirty = False
def add_child(self, key, name):
"""Add a child to this directory
"""
self.children[key] = name
self.dirty = True
def remove_child(self, key):
"""Remove a child by key from this dir
"""
if key not in self.children:
return
self.children.pop(key)
self.dirty = True
def get_child_by_name(self, name):
"""Get a child's key by its name
"""
for child_key, child_name in self.children.items():
if child_name == name:
return child_key
return None
def flush(self):
"""Writes the changes to the server
"""
if self.dirty:
self.mtime = time.time()
self.message.truncate(0) # clear
self.message.write("d\r\n%d\t%d\r\n" % (self.ctime, self.mtime))
for child_key, child_name in self.children.items():
self.message.write("%s\t%s\r\n" % (child_key, child_name))
self.message.flush()
self.dirty = False
def close(self):
"""Close
Calls flush
"""
self.flush()
self.message.close()
@staticmethod
def create(conn):
"""Create a directory
"""
msg = message.Message.create(conn)
d = Directory(msg, time.time(), time.time(), {})
d.dirty = True
return d
@staticmethod
def from_message(msg):
"""Create a directory object from a message
"""
data = str(msg.read())
lines = data.split("\r\n")
info = lines[1].split("\t")
children = {}
for line in lines[2:]:
if not line:
continue
line_info = line.split("\t")
children[line_info[0]] = line_info[1]
d = Directory(msg, int(info[0]), int(info[1]), children)
return d
| waweber/imapfs | imapfs/directory.py | Python | gpl-3.0 | 2,645 |
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.models import Site
from django.test import TestCase, modify_settings, override_settings
from .settings import FLATPAGES_TEMPLATES
class TestDataMixin:
@classmethod
def setUpTestData(cls):
# don't use the manager because we want to ensure the site exists
# with pk=1, regardless of whether or not it already exists.
cls.site1 = Site(pk=1, domain='example.com', name='example.com')
cls.site1.save()
cls.fp1 = FlatPage.objects.create(
url='/flatpage/', title='A Flatpage', content="Isn't it flat!",
enable_comments=False, template_name='', registration_required=False
)
cls.fp2 = FlatPage.objects.create(
url='/location/flatpage/', title='A Nested Flatpage', content="Isn't it flat and deep!",
enable_comments=False, template_name='', registration_required=False
)
cls.fp3 = FlatPage.objects.create(
url='/sekrit/', title='Sekrit Flatpage', content="Isn't it sekrit!",
enable_comments=False, template_name='', registration_required=True
)
cls.fp4 = FlatPage.objects.create(
url='/location/sekrit/', title='Sekrit Nested Flatpage', content="Isn't it sekrit and deep!",
enable_comments=False, template_name='', registration_required=True
)
cls.fp1.sites.add(cls.site1)
cls.fp2.sites.add(cls.site1)
cls.fp3.sites.add(cls.site1)
cls.fp4.sites.add(cls.site1)
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.flatpages'})
@override_settings(
LOGIN_URL='/accounts/login/',
MIDDLEWARE=[
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# no 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware'
],
ROOT_URLCONF='flatpages_tests.urls',
TEMPLATES=FLATPAGES_TEMPLATES,
SITE_ID=1,
)
class FlatpageViewTests(TestDataMixin, TestCase):
def test_view_flatpage(self):
"A flatpage can be served through a view"
response = self.client.get('/flatpage_root/flatpage/')
self.assertContains(response, "<p>Isn't it flat!</p>")
def test_view_non_existent_flatpage(self):
"A non-existent flatpage raises 404 when served through a view"
response = self.client.get('/flatpage_root/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
def test_view_authenticated_flatpage(self):
"A flatpage served through a view can require authentication"
response = self.client.get('/flatpage_root/sekrit/')
self.assertRedirects(response, '/accounts/login/?next=/flatpage_root/sekrit/')
user = User.objects.create_user('testuser', 'test@example.com', 's3krit')
self.client.force_login(user)
response = self.client.get('/flatpage_root/sekrit/')
self.assertContains(response, "<p>Isn't it sekrit!</p>")
def test_fallback_flatpage(self):
"A fallback flatpage won't be served if the middleware is disabled"
response = self.client.get('/flatpage/')
self.assertEqual(response.status_code, 404)
def test_fallback_non_existent_flatpage(self):
"A non-existent flatpage won't be served if the fallback middleware is disabled"
response = self.client.get('/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
def test_view_flatpage_special_chars(self):
"A flatpage with special chars in the URL can be served through a view"
fp = FlatPage.objects.create(
url="/some.very_special~chars-here/",
title="A very special page",
content="Isn't it special!",
enable_comments=False,
registration_required=False,
)
fp.sites.add(settings.SITE_ID)
response = self.client.get('/flatpage_root/some.very_special~chars-here/')
self.assertContains(response, "<p>Isn't it special!</p>")
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.flatpages'})
@override_settings(
APPEND_SLASH=True,
LOGIN_URL='/accounts/login/',
MIDDLEWARE=[
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# no 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware'
],
ROOT_URLCONF='flatpages_tests.urls',
TEMPLATES=FLATPAGES_TEMPLATES,
SITE_ID=1,
)
class FlatpageViewAppendSlashTests(TestDataMixin, TestCase):
def test_redirect_view_flatpage(self):
"A flatpage can be served through a view and should add a slash"
response = self.client.get('/flatpage_root/flatpage')
self.assertRedirects(response, '/flatpage_root/flatpage/', status_code=301)
def test_redirect_view_non_existent_flatpage(self):
"A non-existent flatpage raises 404 when served through a view and should not add a slash"
response = self.client.get('/flatpage_root/no_such_flatpage')
self.assertEqual(response.status_code, 404)
def test_redirect_fallback_flatpage(self):
"A fallback flatpage won't be served if the middleware is disabled and should not add a slash"
response = self.client.get('/flatpage')
self.assertEqual(response.status_code, 404)
def test_redirect_fallback_non_existent_flatpage(self):
"A non-existent flatpage won't be served if the fallback middleware is disabled and should not add a slash"
response = self.client.get('/no_such_flatpage')
self.assertEqual(response.status_code, 404)
def test_redirect_view_flatpage_special_chars(self):
"A flatpage with special chars in the URL can be served through a view and should add a slash"
fp = FlatPage.objects.create(
url="/some.very_special~chars-here/",
title="A very special page",
content="Isn't it special!",
enable_comments=False,
registration_required=False,
)
fp.sites.add(settings.SITE_ID)
response = self.client.get('/flatpage_root/some.very_special~chars-here')
self.assertRedirects(response, '/flatpage_root/some.very_special~chars-here/', status_code=301)
| mattseymour/django | tests/flatpages_tests/test_views.py | Python | bsd-3-clause | 6,752 |
from django import forms
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.base_user import BaseUserManager
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from pyswot.pyswot import _domain_parts, _is_stoplisted
from grandchallenge.core.forms import SaveFormInitMixin
from grandchallenge.verifications.models import Verification
from grandchallenge.verifications.resources.free_email_domains import (
FREE_EMAIL_DOMAINS,
)
from grandchallenge.verifications.tokens import (
email_verification_token_generator,
)
class VerificationForm(SaveFormInitMixin, forms.ModelForm):
def __init__(self, *args, user, **kwargs):
super().__init__(*args, **kwargs)
self.user = user
self.fields["user"].queryset = get_user_model().objects.filter(
pk=self.user.pk
)
self.fields["user"].initial = self.user
self.fields["user"].widget = forms.HiddenInput()
self.fields["email"].initial = self.user.email
self.fields["email"].required = True
self.fields[
"email"
].help_text = (
"Please provide your work, corporate or institutional email"
)
def clean_email(self):
email = self.cleaned_data["email"]
email = BaseUserManager.normalize_email(email)
domain = email.split("@")[1].lower()
if domain in FREE_EMAIL_DOMAINS:
raise ValidationError(
f"Email hosted by {domain} cannot be used for verification, "
"please provide your work, corporate or institutional email."
)
if domain in settings.DISALLOWED_EMAIL_DOMAINS or _is_stoplisted(
_domain_parts(email)
):
raise ValidationError(
f"Email hosted by {domain} cannot be used for verification "
"due to abuse. Please contact support to verify your "
"account another way."
)
if (
get_user_model()
.objects.filter(email__iexact=email)
.exclude(pk=self.user.pk)
.exists()
or Verification.objects.filter(email__iexact=email).exists()
):
raise ValidationError("This email is already in use")
return email
def clean(self):
try:
if self.user.verification:
raise ValidationError(
"You have already made a verification request"
)
except ObjectDoesNotExist:
pass
class Meta:
model = Verification
fields = ("user", "email")
class ConfirmEmailForm(SaveFormInitMixin, forms.Form):
token = forms.CharField(help_text="Enter your email confirmation token")
def __init__(self, *args, user, token, **kwargs):
super().__init__(*args, **kwargs)
self.fields["token"].initial = token
self.user = user
def clean_token(self):
token = self.cleaned_data["token"]
if not email_verification_token_generator.check_token(
self.user, token
):
raise ValidationError("Token is invalid")
return token
| comic/comic-django | app/grandchallenge/verifications/forms.py | Python | apache-2.0 | 3,220 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Checkin.accepted_by'
db.add_column('articletrack_checkin', 'accepted_by',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True),
keep_default=False)
# Adding field 'Checkin.accepted_at'
db.add_column('articletrack_checkin', 'accepted_at',
self.gf('django.db.models.fields.DateTimeField')(null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Checkin.accepted_by'
db.delete_column('articletrack_checkin', 'accepted_by_id')
# Deleting field 'Checkin.accepted_at'
db.delete_column('articletrack_checkin', 'accepted_at')
models = {
'articletrack.article': {
'Meta': {'object_name': 'Article'},
'article_title': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'articlepkg_ref': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'eissn': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '9'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue_label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'journal_title': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'journals': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'checkin_articles'", 'null': 'True', 'to': "orm['journalmanager.Journal']"}),
'pissn': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '9'})
},
'articletrack.checkin': {
'Meta': {'ordering': "['-created_at']", 'object_name': 'Checkin'},
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'accepted_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'article': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'checkins'", 'null': 'True', 'to': "orm['articletrack.Article']"}),
'attempt_ref': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'uploaded_at': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'articletrack.comment': {
'Meta': {'ordering': "['created_at']", 'object_name': 'Comment'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments_author'", 'to': "orm['auth.User']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'ticket': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['articletrack.Ticket']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'articletrack.notice': {
'Meta': {'ordering': "['-created_at']", 'object_name': 'Notice'},
'checkin': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notices'", 'to': "orm['articletrack.Checkin']"}),
'checkpoint': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'stage': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'articletrack.ticket': {
'Meta': {'ordering': "['started_at']", 'object_name': 'Ticket'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tickets'", 'to': "orm['articletrack.Article']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tickets'", 'to': "orm['auth.User']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'journalmanager.collection': {
'Meta': {'ordering': "['name']", 'object_name': 'Collection'},
'acronym': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '16', 'blank': 'True'}),
'address': ('django.db.models.fields.TextField', [], {}),
'address_complement': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'address_number': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'collection': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'user_collection'", 'to': "orm['auth.User']", 'through': "orm['journalmanager.UserCollections']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'name_slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'})
},
'journalmanager.institution': {
'Meta': {'ordering': "['name']", 'object_name': 'Institution'},
'acronym': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '16', 'blank': 'True'}),
'address': ('django.db.models.fields.TextField', [], {}),
'address_complement': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'address_number': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'cel': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'complement': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_trashed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'db_index': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'})
},
'journalmanager.journal': {
'Meta': {'ordering': "['title']", 'object_name': 'Journal'},
'abstract_keyword_languages': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'abstract_keyword_languages'", 'symmetrical': 'False', 'to': "orm['journalmanager.Language']"}),
'acronym': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'collection': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'journals'", 'to': "orm['journalmanager.Collection']"}),
'copyrighter': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
'cover': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'enjoy_creator'", 'to': "orm['auth.User']"}),
'ctrl_vocabulary': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'current_ahead_documents': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True', 'blank': 'True'}),
'editor_address': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'editor_address_city': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'editor_address_country': ('scielo_extensions.modelfields.CountryField', [], {'max_length': '2'}),
'editor_address_state': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'editor_address_zip': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'editor_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'editor_name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'editor_phone1': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'editor_phone2': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'editorial_standard': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'editors': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'user_editors'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'eletronic_issn': ('django.db.models.fields.CharField', [], {'max_length': '9', 'db_index': 'True'}),
'final_num': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'final_vol': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'final_year': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'frequency': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index_coverage': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'init_num': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'init_vol': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'init_year': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'is_indexed_aehci': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_indexed_scie': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_indexed_ssci': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_trashed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['journalmanager.Language']", 'symmetrical': 'False'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'medline_code': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'medline_title': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'national_code': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
'other_previous_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'previous_ahead_documents': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True', 'blank': 'True'}),
'previous_title': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'prev_title'", 'null': 'True', 'to': "orm['journalmanager.Journal']"}),
'print_issn': ('django.db.models.fields.CharField', [], {'max_length': '9', 'db_index': 'True'}),
'pub_level': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'pub_status': ('django.db.models.fields.CharField', [], {'default': "'inprogress'", 'max_length': '16', 'null': 'True', 'blank': 'True'}),
'pub_status_changed_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pub_status_changed_by'", 'to': "orm['auth.User']"}),
'pub_status_reason': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'publication_city': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'publisher_country': ('scielo_extensions.modelfields.CountryField', [], {'max_length': '2'}),
'publisher_name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'publisher_state': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'scielo_issn': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'secs_code': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'short_title': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'db_index': 'True'}),
'sponsor': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'journal_sponsor'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['journalmanager.Sponsor']"}),
'study_areas': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'journals_migration_tmp'", 'null': 'True', 'to': "orm['journalmanager.StudyArea']"}),
'subject_categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'journals'", 'null': 'True', 'to': "orm['journalmanager.SubjectCategory']"}),
'subject_descriptors': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256', 'db_index': 'True'}),
'title_iso': ('django.db.models.fields.CharField', [], {'max_length': '256', 'db_index': 'True'}),
'twitter_user': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url_journal': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'url_online_submission': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'use_license': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.UseLicense']"})
},
'journalmanager.language': {
'Meta': {'ordering': "['name']", 'object_name': 'Language'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'iso_code': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'journalmanager.sponsor': {
'Meta': {'ordering': "['name']", 'object_name': 'Sponsor', '_ormbases': ['journalmanager.Institution']},
'collections': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['journalmanager.Collection']", 'symmetrical': 'False'}),
'institution_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['journalmanager.Institution']", 'unique': 'True', 'primary_key': 'True'})
},
'journalmanager.studyarea': {
'Meta': {'object_name': 'StudyArea'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'study_area': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'journalmanager.subjectcategory': {
'Meta': {'object_name': 'SubjectCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '256', 'db_index': 'True'})
},
'journalmanager.uselicense': {
'Meta': {'ordering': "['license_code']", 'object_name': 'UseLicense'},
'disclaimer': ('django.db.models.fields.TextField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'license_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'reference_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'journalmanager.usercollections': {
'Meta': {'unique_together': "(('user', 'collection'),)", 'object_name': 'UserCollections'},
'collection': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.Collection']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_manager': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['articletrack'] | jamilatta/scielo-manager | scielomanager/articletrack/migrations/0004_auto__add_field_checkin_accepted_by__add_field_checkin_accepted_at.py | Python | bsd-2-clause | 23,300 |
# -*- encoding: utf-8 -*-
import gi
import regex
gi.require_version('Gtk', '3.0')
gi.require_version('Gdk', '3.0')
from gi.repository import Gtk, Gdk
from gcustom.replaceConfirmDialog import cReplaceConfirmDialog
class cSearchReplaceDialog(Gtk.Window):
def __init__(self, parent, treeView, subtitles_model, hist):
super(cSearchReplaceDialog, self).__init__()
# Setup Window Properties and Variables
self.parent = parent
self.set_title("Search / Replace")
self.set_modal(True)
self.set_transient_for(parent)
self.set_position(Gtk.WindowPosition.CENTER_ALWAYS)
self.treeView = treeView
self.subtitles = subtitles_model
self.findText = None
self.replaceText = None
self.set_resizable(False)
self.lastFindIdx = None
self.lastFindReplaceIdx = None
self.hist = hist
# Widgets
searchEntry = Gtk.Entry()
searchEntry.props.width_request = 200
searchButton = Gtk.Button()
searchButton.set_image(Gtk.Image.new_from_stock(Gtk.STOCK_FIND, Gtk.IconSize.BUTTON))
replaceEntry = Gtk.Entry()
replaceEntry.props.width_request = 200
replaceButton = Gtk.Button()
replaceButton.set_image(Gtk.Image.new_from_stock(Gtk.STOCK_FIND_AND_REPLACE, Gtk.IconSize.BUTTON))
self.wholeWordsSearch = Gtk.CheckButton("Whole Words")
self.caseSensitive = Gtk.CheckButton("Case Sensitive")
self.caseSensitive.set_active(True)
# Layout
tmp = Gtk.Label('Search')
tmp.props.width_request = 65
searchBox = Gtk.HBox()
searchBox.pack_start(tmp, False, False, 0)
searchBox.pack_start(searchEntry, False, False, 5)
searchBox.pack_start(searchButton, False, False, 5)
replaceBox = Gtk.HBox()
tmp = Gtk.Label('Replace')
tmp.props.width_request = 65
replaceBox.pack_start(tmp, False, False, 0)
replaceBox.pack_start(replaceEntry, False, False, 5)
replaceBox.pack_start(replaceButton, False, False, 5)
settingsBox = Gtk.HBox()
settingsBox.pack_start(self.wholeWordsSearch, False, False, 5)
settingsBox.pack_start(self.caseSensitive, False, False, 5)
mainBox = Gtk.VBox(spacing = 3)
mainBox.pack_start(searchBox, False, False, 0)
mainBox.pack_start(replaceBox, False, False, 0)
mainBox.pack_start(settingsBox, False, False, 0)
tmp = Gtk.VBox()
tmp.pack_start(mainBox, False, False, 5)
self.add(tmp)
# Connections
searchEntry.connect('key-release-event', self.on_key_release, 'search')
replaceEntry.connect('key-release-event', self.on_key_release, 'replace')
searchButton.connect('key-release-event', self.on_key_release, None)
replaceButton.connect('key-release-event', self.on_key_release, None)
self.wholeWordsSearch.connect('key-release-event', self.on_key_release, None)
self.caseSensitive.connect('key-release-event', self.on_key_release, None)
searchButton.connect('clicked', self.on_clicked, 'search')
replaceButton.connect('clicked', self.on_clicked, 'replace')
searchEntry.connect('changed', self.on_entry_changed, 'search')
replaceEntry.connect('changed', self.on_entry_changed, 'replace')
# Finally
self.show_all()
def tv_select_row(self, row):
if row is None:
return
sub = self.treeView.get_model()[row][0]
path = self.subtitles.get_sub_path(sub)
if path != None:
self.treeView.set_cursor(path)
def find_row(self, text, left_idx):
lidx = -1 if left_idx is None else left_idx
res = (None, None)
search_text = text.upper() if not self.caseSensitive.get_active() else text
for iterNum, row in enumerate(self.treeView.get_model()):
if iterNum <= lidx:
continue
subText = row[0].text.upper() if not self.caseSensitive.get_active() else row[0].text
whword = '\\b' if self.wholeWordsSearch.get_active() else ''
grp = regex.finditer(whword+search_text+whword, subText, flags = regex.U | regex.M)
lst = [(item.start(), item.end()) for item in grp]
tmpidx = iterNum if len(lst) > 0 else None
res = (tmpidx, lst)
if res[0] is not None:
break
return res
def on_entry_changed(self, widget, arg):
if arg == 'search':
self.findText = widget.get_text()
self.lastFindIdx = None
self.lastFindReplaceIdx = None
elif arg == 'replace':
self.replaceText = widget.get_text()
self.lastFindIdx = None
self.lastFindReplaceIdx = None
def on_clicked(self, widget, arg):
if arg == 'search' and self.findText != None:
nextIdx = self.find_row(self.findText, self.lastFindIdx)[0]
if nextIdx is None:
self.lastFindIdx = None
return
self.lastFindIdx = nextIdx
self.tv_select_row(nextIdx)
elif arg == 'replace' and self.replaceText != None and self.findText != None:
nextIdx, posList = self.find_row(self.findText, -1)
while nextIdx is not None:
curpos = None
if posList is not None and len(posList) > 0:
curpos = posList.pop(0)
self.lastFindIdx = nextIdx
else:
nextIdx, posList = self.find_row(self.findText, self.lastFindIdx)
if nextIdx is None:
self.lastFindIdx = None
continue
curpos = posList.pop(0)
self.lastFindReplaceIdx = nextIdx
self.lastFindIdx = nextIdx
self.tv_select_row(nextIdx)
if curpos is None:
return
dialog = cReplaceConfirmDialog(self, self.treeView.get_model()[nextIdx][0].text, self.replaceText, curpos)
dialog.run()
res = dialog.result
dialog.destroy()
if res == 'OK':
tmp_txt = self.treeView.get_model()[nextIdx][0].text
new_txt = tmp_txt[:curpos[0]] + self.replaceText + tmp_txt[curpos[1]:]
sub = self.treeView.get_model()[nextIdx][0]
self.hist.add( ('replace-text', sub, tmp_txt, new_txt) )
sub.text = new_txt
elif res == 'Cancel':
continue
elif res == 'Stop':
return
def on_key_release(self, widget, event, arg):
if event.keyval == Gdk.KEY_Escape:
self.destroy()
if event.keyval == Gdk.KEY_Return:
self.on_clicked(None, arg)
| jtk1rk/xsubedit | gcustom/searchReplaceDialog.py | Python | gpl-3.0 | 6,940 |
def extra_eda(row):
"""King 2012 has extra 'random' systematic error added in quadrature."""
if row.sigflag == 3:
extra = 9.05
elif row.sigflag == 2:
extra = 17.43
else:
extra = 0.0
return np.sqrt(row.eda ** 2.0 + extra ** 2.0)
def assign_dipole(row):
"""Assign best-fit dipole from King 2012 to column in dataset."""
return dipole_alpha(row['#J2000'])
def assign_dipole_angle(row):
"""King 2012 angle from pole to position on sky via J2000."""
return j2000_to_theta(row['#J2000'])
def parse_j2000(name):
"""Takes the J2000 name stored in the results and returns it in a format astropy can understand."""
return ' '.join([name[1:3], name[3:5], name[5:7], name[7:10], name[10:12], name[12:]])
def j2000_to_theta(name):
"""Returns the angle (degrees) between the position on the sky from
a given `name` and the position of the dipole model from 2012, King."""
c = SkyCoord(parse_j2000(name), unit=(u.hourangle, u.deg))
return float(c.separation(dipole).to_string(decimal=True))
def dipole_alpha(name):
"""Returns the value of Delta alpha/alpha as given by the best fit 2012 King model for
the given name (position).
"""
theta = j2000_to_theta(name)
return (DIP_AMPLITUDE * np.cos(np.deg2rad(theta)) + DIP_MONOPOLE) * 1e6
full_parse = pd.read_csv("../data/full-parse.tsv", sep='\t')
full_parse['extraeda'] = full_parse.apply(extra_eda, axis=1)
full_parse['dipole_fit'] = full_parse.apply(assign_dipole, axis=1)
full_parse['dipole_angle'] = full_parse.apply(assign_dipole_angle, axis=1)
full_parse = full_parse.rename(columns={"#J2000":"J2000",
'zem': 'z_emission',
'zabs': 'z_absorption',
'da': 'delta_alpha',
'eda': 'error_delta_alpha',
'extraeda': 'extra_error_delta_alpha',
'dipole_fit': 'dipole_delta_alpha',
'dipole_angle': 'dipole_angle',
'sample': 'sample',
'source': 'source',
'sigflag': 'sigflag',
'imrotator': 'imrotator',
'transition': 'transitions',
})
full_parse[['J2000',
'z_emission',
'z_absorption',
'delta_alpha',
'error_delta_alpha',
'extra_error_delta_alpha',
'dipole_delta_alpha',
'dipole_angle',
'sample',
'source',
'sigflag',
'imrotator',
'transitions',
]].to_csv("../data/full-parse-new.tsv", sep='\t', index=False) | jbwhit/fine-structure-inference | src/create-full-parse-new.py | Python | mit | 2,251 |
#!/usr/bin/python3
#
# $Id:$
#
# We test a bit of the atrshmlog here.
#
# This is for the first starter, so only the basic things.
import sys
import atrshmlog
r = atrshmlog.attach()
id = atrshmlog.get_clock_id()
print('clock id : ' + str(id) + ' : ')
oldid = atrshmlog.set_clock_id(2)
print('clock id : ' + str(oldid) + ' : ')
id = atrshmlog.get_clock_id()
print('clock id : ' + str(id) + ' : ')
oldid = atrshmlog.set_clock_id(1)
print('clock id : ' + str(oldid) + ' : ')
id = atrshmlog.get_clock_id()
print('clock id : ' + str(id) + ' : ')
print (' ')
exit(0);
# end of test
| atrsoftgmbh/atrshmlog | python/src/tests/t_clock_id.py | Python | apache-2.0 | 594 |
#!/usr/bin/env python3
# The following overwrites the module name, so other functions in
# polyproject.py would also have to be imported here in future.
from .polyproject import polyproject
| praveenv253/polyproject | polyproject/__init__.py | Python | mit | 191 |
#!/bin/env python3
import sys,os
import random
import functools
p_sg = ['el', 'la']
p_pl = ['los', 'las']
# func to add prefix
def with_prefix(name):
if name[-1:] == 's':
# plural
return random.choice(p_pl) + ' ' + name
else:
return random.choice(p_sg) + ' ' + name
# Find the path
path = '.'
if len(sys.argv) > 1:
for i, p in enumerate(sys.argv[1:]):
# ignore ls options
if p[0] == '-':
continue
path = p
# handle nonexistent
if not os.path.exists(path):
print('No such file or directory.', file=sys.stderr)
sys.exit(1)
# is not a directory
if not os.path.isdir(path):
print(with_prefix(os.path.basename(path)))
sys.exit(1)
# comparator
def compare(x, y):
dirx = os.path.isdir(os.path.join(path, x))
diry = os.path.isdir(os.path.join(path, y))
if dirx and not diry: return -1
if diry and not dirx: return 1
if x < y: return -1
if x > y: return 1
return 0
# list dir and sort using comparator
files = os.listdir(path)
files.sort(key=functools.cmp_to_key(compare))
# Print
for f in files:
s = with_prefix(f)
pa = os.path.join(path, f)
if os.path.isdir(pa):
s += '/'
if os.path.islink(pa):
s = '\033[1;36m' + s + '\033[0m' # cyan
elif os.path.isdir(pa): # blue
s = '\033[1;34m' + s + '\033[0m' # blue
elif os.access(pa, os.X_OK):
s = '\033[1;32m' + s + '\033[0m' # green
else:
s = s
print(s)
| MightyPork/los | los.py | Python | mit | 1,353 |
import time
from options.train_options import TrainOptions
from data.data_loader import CreateDataLoader
from models.models import create_model
from util.visualizer import Visualizer
opt = TrainOptions().parse()
data_loader = CreateDataLoader(opt)
dataset = data_loader.load_data()
dataset_size = len(data_loader)
dataset_batch_num = len(dataset)
print('#training images = {}, training images batch num = {}'.format(dataset_size, dataset_batch_num))
model = create_model(opt)
visualizer = Visualizer(opt)
total_steps = 0
netG_iter_count = 0
MODE = 'wgan-gp' # Valid options are dcgan, wgan, or wgan-gp
DIM = 128 # This overfits substantially; you're probably better off with 64
# critic_iters = 5 # How many critic iterations per generator iteration
ITERS = 200000 # How many generator iterations to train for
OUTPUT_DIM = 3072 # Number of pixels in CIFAR10 (3*32*32)
for epoch in range(opt.epoch_count, opt.niter + opt.niter_decay + 1):
epoch_start_time = time.time()
epoch_iter = 0
for i, data in enumerate(dataset):
iter_start_time = time.time()
total_steps += opt.batchSize
epoch_iter += opt.batchSize
model.set_input(data)
# for j in range(critic_iters):
# model.optimize_parameters()
# model.optimize_netD_parameters()
model.optimize_netD_parameters_gp()
# model.optimize_netG_parameters()
if netG_iter_count < 5 or netG_iter_count % 500 == 0:
if i != 0 and i % 100 == 0:
model.optimize_netG_parameters()
netG_iter_count += 1
else:
pass
else:
if i != 0 and i % 5 == 0:
model.optimize_netG_parameters()
netG_iter_count += 1
else:
pass
if total_steps % opt.display_freq == 0:
visualizer.display_current_results(model.get_current_visuals(), epoch)
if total_steps % opt.print_freq == 0:
errors = model.get_current_errors()
t = (time.time() - iter_start_time) / opt.batchSize
visualizer.print_current_errors(epoch, epoch_iter, errors, t)
if opt.display_id > 0:
visualizer.plot_current_errors(epoch, float(epoch_iter)/dataset_size, opt, errors)
if total_steps % opt.save_latest_freq == 0:
print('saving the latest model (epoch %d, total_steps %d)' %
(epoch, total_steps))
model.save('latest')
if epoch % opt.save_epoch_freq == 0:
print('saving the model at the end of epoch %d, iters %d' %
(epoch, total_steps))
model.save('latest')
model.save(epoch)
print('End of epoch %d / %d \t Time Taken: %d sec' %
(epoch, opt.niter + opt.niter_decay, time.time() - epoch_start_time))
model.update_learning_rate()
| MingwangLin/automatic-colorization-of-sketch | train.py | Python | apache-2.0 | 2,858 |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Base class for linker-specific test cases.
The custom dynamic linker can only be tested through a custom test case
for various technical reasons:
- It's an 'invisible feature', i.e. it doesn't expose a new API or
behaviour, all it does is save RAM when loading native libraries.
- Checking that it works correctly requires several things that do not
fit the existing GTest-based and instrumentation-based tests:
- Native test code needs to be run in both the browser and renderer
process at the same time just after loading native libraries, in
a completely asynchronous way.
- Each test case requires restarting a whole new application process
with a different command-line.
- Enabling test support in the Linker code requires building a special
APK with a flag to activate special test-only support code in the
Linker code itself.
Host-driven tests have also been tried, but since they're really
sub-classes of instrumentation tests, they didn't work well either.
To build and run the linker tests, do the following:
ninja -C out/Debug chromium_linker_test_apk
build/android/test_runner.py linker
"""
# pylint: disable=R0201
import logging
import re
from devil.android import device_errors
from devil.android.sdk import intent
from pylib.base import base_test_result
ResultType = base_test_result.ResultType
_PACKAGE_NAME = 'org.chromium.chromium_linker_test_apk'
_ACTIVITY_NAME = '.ChromiumLinkerTestActivity'
_COMMAND_LINE_FILE = '/data/local/tmp/chromium-linker-test-command-line'
# Logcat filters used during each test. Only the 'chromium' one is really
# needed, but the logs are added to the TestResult in case of error, and
# it is handy to have others as well when troubleshooting.
_LOGCAT_FILTERS = ['*:s', 'chromium:v', 'cr.chromium:v',
'cr.chromium_android_linker:v', 'cr.library_loader:v',
'cr.linker_test:v']
#_LOGCAT_FILTERS = ['*:v'] ## DEBUG
# Regular expression used to match status lines in logcat.
_RE_BROWSER_STATUS_LINE = re.compile(r' BROWSER_LINKER_TEST: (FAIL|SUCCESS)$')
_RE_RENDERER_STATUS_LINE = re.compile(r' RENDERER_LINKER_TEST: (FAIL|SUCCESS)$')
def _StartActivityAndWaitForLinkerTestStatus(device, timeout):
"""Force-start an activity and wait up to |timeout| seconds until the full
linker test status lines appear in the logcat, recorded through |device|.
Args:
device: A DeviceUtils instance.
timeout: Timeout in seconds
Returns:
A (status, logs) tuple, where status is a ResultType constant, and logs
if the final logcat output as a string.
"""
# 1. Start recording logcat with appropriate filters.
with device.GetLogcatMonitor(filter_specs=_LOGCAT_FILTERS) as logmon:
# 2. Force-start activity.
device.StartActivity(
intent.Intent(package=_PACKAGE_NAME, activity=_ACTIVITY_NAME),
force_stop=True)
# 3. Wait up to |timeout| seconds until the test status is in the logcat.
result = ResultType.PASS
try:
browser_match = logmon.WaitFor(_RE_BROWSER_STATUS_LINE, timeout=timeout)
logging.debug('Found browser match: %s', browser_match.group(0))
renderer_match = logmon.WaitFor(_RE_RENDERER_STATUS_LINE,
timeout=timeout)
logging.debug('Found renderer match: %s', renderer_match.group(0))
if (browser_match.group(1) != 'SUCCESS'
or renderer_match.group(1) != 'SUCCESS'):
result = ResultType.FAIL
except device_errors.CommandTimeoutError:
result = ResultType.TIMEOUT
return result, '\n'.join(device.adb.Logcat(dump=True))
class LibraryLoadMap(dict):
"""A helper class to pretty-print a map of library names to load addresses."""
def __str__(self):
items = ['\'%s\': 0x%x' % (name, address) for \
(name, address) in self.iteritems()]
return '{%s}' % (', '.join(items))
def __repr__(self):
return 'LibraryLoadMap(%s)' % self.__str__()
class AddressList(list):
"""A helper class to pretty-print a list of load addresses."""
def __str__(self):
items = ['0x%x' % address for address in self]
return '[%s]' % (', '.join(items))
def __repr__(self):
return 'AddressList(%s)' % self.__str__()
class LinkerTestCaseBase(object):
"""Base class for linker test cases."""
def __init__(self, is_modern_linker=False, is_low_memory=False):
"""Create a test case.
Args:
is_modern_linker: True to test ModernLinker, False to test LegacyLinker.
is_low_memory: True to simulate a low-memory device, False otherwise.
"""
self.is_modern_linker = is_modern_linker
if is_modern_linker:
test_suffix = 'ForModernLinker'
else:
test_suffix = 'ForLegacyLinker'
self.is_low_memory = is_low_memory
if is_low_memory:
test_suffix += 'LowMemoryDevice'
else:
test_suffix += 'RegularDevice'
class_name = self.__class__.__name__
self.qualified_name = '%s.%s' % (class_name, test_suffix)
self.tagged_name = self.qualified_name
def _RunTest(self, _device):
"""Run the test, must be overriden.
Args:
_device: A DeviceUtils interface.
Returns:
A (status, log) tuple, where <status> is a ResultType constant, and <log>
is the logcat output captured during the test in case of error, or None
in case of success.
"""
return ResultType.FAIL, 'Unimplemented _RunTest() method!'
def Run(self, device):
"""Run the test on a given device.
Args:
device: Name of target device where to run the test.
Returns:
A base_test_result.TestRunResult() instance.
"""
margin = 8
print '[ %-*s ] %s' % (margin, 'RUN', self.tagged_name)
logging.info('Running linker test: %s', self.tagged_name)
# Create command-line file on device.
if self.is_modern_linker:
command_line_flags = '--use-linker=modern'
else:
command_line_flags = '--use-linker=legacy'
if self.is_low_memory:
command_line_flags += ' --low-memory-device'
device.WriteFile(_COMMAND_LINE_FILE, command_line_flags)
# Run the test.
status, logs = self._RunTest(device)
result_text = 'OK'
if status == ResultType.FAIL:
result_text = 'FAILED'
elif status == ResultType.TIMEOUT:
result_text = 'TIMEOUT'
print '[ %*s ] %s' % (margin, result_text, self.tagged_name)
results = base_test_result.TestRunResults()
results.AddResult(
base_test_result.BaseTestResult(
self.tagged_name,
status,
log=logs))
return results
def __str__(self):
return self.tagged_name
def __repr__(self):
return self.tagged_name
class LinkerSharedRelroTest(LinkerTestCaseBase):
"""A linker test case to check the status of shared RELRO sections.
The core of the checks performed here are pretty simple:
- Clear the logcat and start recording with an appropriate set of filters.
- Create the command-line appropriate for the test-case.
- Start the activity (always forcing a cold start).
- Every second, look at the current content of the filtered logcat lines
and look for instances of the following:
BROWSER_LINKER_TEST: <status>
RENDERER_LINKER_TEST: <status>
where <status> can be either FAIL or SUCCESS. These lines can appear
in any order in the logcat. Once both browser and renderer status are
found, stop the loop. Otherwise timeout after 30 seconds.
Note that there can be other lines beginning with BROWSER_LINKER_TEST:
and RENDERER_LINKER_TEST:, but are not followed by a <status> code.
- The test case passes if the <status> for both the browser and renderer
process are SUCCESS. Otherwise its a fail.
"""
def _RunTest(self, device):
# Wait up to 30 seconds until the linker test status is in the logcat.
return _StartActivityAndWaitForLinkerTestStatus(device, timeout=30)
| CapOM/ChromiumGStreamerBackend | build/android/pylib/linker/test_case.py | Python | bsd-3-clause | 8,228 |
import pkg_resources
from google.appengine.ext import vendor
path = 'lib'
vendor.add(path)
pkg_resources.working_set.add_entry(path)
# https://stackoverflow.com/a/59334563
import six; reload(six)
| phil-lopreiato/the-blue-alliance | appengine_config.py | Python | mit | 198 |
#!/usr/bin/env python
# encoding: utf-8
"""
test_geomutils.py
Created by Chris Waigl on 2015-04-21.
"""
from __future__ import division, print_function, absolute_import, unicode_literals
import os
from pygaarst import geomutils as gu
def test_modapsclient_creation():
a = True
assert a
| chryss/pygaarst | tests/test_geomutils.py | Python | mit | 298 |
# coding: utf-8
"""
OAuth2 provider setup.
It is based on the code from the example:
https://github.com/lepture/example-oauth2-server
More details are available here:
* http://flask-oauthlib.readthedocs.org/en/latest/oauth2.html
* http://lepture.com/en/2013/create-oauth-server
"""
from flask import Blueprint, request, render_template, jsonify, session, redirect
from flask_login import current_user
import flask_login
import webargs
from werkzeug import exceptions as http_exceptions
from werkzeug import security
from app.extensions import db, api, oauth2, login_manager
from app.modules.users.models import User
from . import parameters
from .models import OAuth2Client
import logging
log = logging.getLogger('flask_oauthlib')
login_manager.login_view = "auth.login"
auth_blueprint = Blueprint('auth', __name__, url_prefix='/auth') # pylint: disable=invalid-name
def get_current_user():
if 'id' in session:
uid = session['id']
return User.query.get(uid)
else:
return User.query.get(1)
@auth_blueprint.route('/login', methods=['GET', 'POST'])
def login(*args, **kwargs):
if request.method == 'GET': # Note: it is critical to not have the action parameter on the form
return '''
Please log in to access your account
<form method='POST'>
<input type='text' name='email' id='email' placeholder='email'></input>
<input type='password' name='pw' id='pw' placeholder='password'></input>
<input type='submit' name='submit'></input>
</form>
'''
email = request.form['email']
user = User.query.get(email)
if request.form['pw']:
user = User.find_with_password(request.form['email'], request.form['pw'])
flask_login.login_user(user)
next = request.args.get("next")
if next is None:
next = 'auth/protected'
return redirect(next)
return 'Bad login'
@auth_blueprint.route('/logout', methods=['GET', 'POST'])
@flask_login.login_required
def logout(*args, **kwargs):
flask_login.logout_user()
return '''
<h1>You have successfully logged out</h1>
Would you like to log in again?
<form method='POST' action='login'>
<input type='text' name='email' id='email' placeholder='email'></input>
<input type='password' name='pw' id='pw' placeholder='password'></input>
<input type='submit' name='login'></input>
</form>
'''
@auth_blueprint.route('/protected')
@flask_login.login_required
def protected():
return 'Logged in as: ' + flask_login.current_user.username
@auth_blueprint.route('/oauth2/token', methods=['GET', 'POST'])
@oauth2.token_handler
def access_token(*args, **kwargs):
# pylint: disable=unused-argument
"""
This endpoint is for exchanging/refreshing an access token.
Returns:
response (dict): a dictionary or None as the extra credentials for
creating the token response.
"""
log.debug("requested token")
return None
@auth_blueprint.route('/oauth2/revoke', methods=['POST'])
@oauth2.revoke_handler
def revoke_token():
"""
This endpoint allows a user to revoke their access token.
"""
pass
@auth_blueprint.route('/oauth2/errors', methods=['POST'])
def error_message():
"""
This endpoint allows a user to revoke their access token.
"""
log.debug("Error")
pass
@oauth2.usergetter
def get_user(username, password, *args, **kwargs):
user = User.query.filter_by(username=username).first()
print("Running user getter")
if user.check_password(password):
return user
return None
@login_manager.user_loader
def load_user(user_id):
return User.query.get(user_id)
@auth_blueprint.route('/oauth2/authorize', methods=['GET', 'POST'])
@flask_login.login_required
@oauth2.authorize_handler
def authorize(*args, **kwargs):
# pylint: disable=unused-argument
"""
This endpoint asks user if he grants access to his data to the requesting
application.
"""
log.debug("requested authorization")
if not current_user.is_authenticated:
log.debug(("NOT AUTHENTICATED"))
return api.abort(code=http_exceptions.Unauthorized.code)
if request.method == 'GET':
client_id = kwargs.get('client_id')
log.debug("render authorizer")
oauth2_client = OAuth2Client.query.filter_by(client_id=client_id).first()
kwargs['client'] = oauth2_client
kwargs['user'] = current_user
# TODO: improve template design
return render_template('authorize.html', **kwargs)
confirm = request.form.get('confirm', 'no')
return confirm == 'yes'
| millen1m/flask-restplus-server-example | app/modules/auth/views.py | Python | mit | 4,794 |
# coding=utf-8
from mainTools import main
def create_results(doc):
yield {
'sentiment': str(doc['sentiment'])
}
def csv_header():
return ['sentiment']
if __name__ == '__main__':
main('sentiment', csv_header(), create_results)
| Geneea/keboola-connection | src/sentiment.py | Python | apache-2.0 | 254 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""sloth dataView application
=============================
"""
import os
import sys
from silx.gui import qt
import sloth
from sloth.gui.deprecated.dataview import DataViewDockWidget
class SlothDataViewMainWindow(qt.QMainWindow):
def __init__(self, parent=None):
super(SlothDataViewMainWindow, self).__init__(parent)
uiPath = os.path.join(sloth._resourcesPath, 'gui', 'uis',
'main_base.ui')
qt.loadUi(uiPath, baseinstance=self, package='sloth.gui')
logoPath = os.path.join(sloth._resourcesPath, 'logo',
'xraysloth_logo_03.svg')
self.setWindowTitle('Sloth - dataview')
self.setWindowIcon(qt.QIcon(logoPath))
# DATAVIEW DOCK WIDGET
self.dataviewDockWidget = DataViewDockWidget(parent=self)
self.addDockWidget(qt.Qt.LeftDockWidgetArea, self.dataviewDockWidget)
self.dataviewDockWidget.setVisible(True)
def sloth_dataview_app():
app = qt.QApplication(sys.argv)
app.setStyle("plastique")
appli = SlothDataViewMainWindow()
appli.show()
sys.exit(app.exec_())
if __name__ == '__main__':
# from silx import sx
# sx.enable_gui()
sloth_dataview_app()
| maurov/xraysloth | sloth/gui/deprecated/sloth_dataview.py | Python | bsd-3-clause | 1,274 |
# Copyright (C) 2001-2007 Python Software Foundation
# email package unit tests
# The specific tests now live in Lib/email/test
from email.test.test_email import suite
from test import support
def test_main():
support.run_unittest(suite())
if __name__ == '__main__':
test_main()
| MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.2/Lib/test/test_email.py | Python | mit | 290 |
# Python code for picture Watermarking, 03/03/2016
from PIL import Image, ImageDraw
import glob
import os
import sys
#Bellow is the path to the folder "Watermarked"
#"Watermarked" is the folder with the photographs that I want to watermark
indir = "/.../Watermarked/*.*"
counter = 0
def getSize(fl):
st = os.stat(fl)
return st.st_size
#I start a loop reading the photographs
for picname in glob.glob(indir):
counter +=1
#I open and read the photograph
im = Image.open(picname)
#Checking the different file sizes
print "Size of foto #", counter, "(bytes):", getSize(picname)
print "Size of im string #", counter, "(bytes):", sys.getsizeof(im)
#I calculate the photograph's size in pixels
width, height = im.size
#A new image is created
WaterMark = Image.new("RGBA", im.size)
#A new image is drawn
waterdraw = ImageDraw.Draw(WaterMark, "RGBA")
#The position and text the new image is stated
waterdraw.text((width-100, height-15)," © K. Michail ")
#Use a filter
watermask = WaterMark.convert("L").point(lambda x: min(x, 150))
#Implementing the mask
WaterMark.putalpha(watermask)
#Merge the 2 pictures
im.paste(WaterMark, None, WaterMark)
#Saving the watermarked pictures (why does it have a smaller size?)
# (Do I loose any picture information ?)
im.save(picname,"JPEG")
print "Size of output string #", counter, "(bytes):", sys.getsizeof(im)
| Michael3con/Photographs_WaterMarking | WaterMarking.py | Python | mpl-2.0 | 1,524 |
"""Calibration of predicted probabilities."""
# Author: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Balazs Kegl <balazs.kegl@gmail.com>
# Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>
# Mathieu Blondel <mathieu@mblondel.org>
#
# License: BSD 3 clause
import warnings
from inspect import signature
from functools import partial
from math import log
import numpy as np
from joblib import Parallel
from scipy.special import expit
from scipy.special import xlogy
from scipy.optimize import fmin_bfgs
from .base import (
BaseEstimator,
ClassifierMixin,
RegressorMixin,
clone,
MetaEstimatorMixin,
)
from .preprocessing import label_binarize, LabelEncoder
from .utils import (
column_or_1d,
deprecated,
indexable,
)
from .utils.multiclass import check_classification_targets
from .utils.fixes import delayed
from .utils.validation import check_is_fitted, check_consistent_length
from .utils.validation import _check_sample_weight, _num_samples
from .utils import _safe_indexing
from .isotonic import IsotonicRegression
from .svm import LinearSVC
from .model_selection import check_cv, cross_val_predict
class CalibratedClassifierCV(ClassifierMixin, MetaEstimatorMixin, BaseEstimator):
"""Probability calibration with isotonic regression or logistic regression.
This class uses cross-validation to both estimate the parameters of a
classifier and subsequently calibrate a classifier. With default
`ensemble=True`, for each cv split it
fits a copy of the base estimator to the training subset, and calibrates it
using the testing subset. For prediction, predicted probabilities are
averaged across these individual calibrated classifiers. When
`ensemble=False`, cross-validation is used to obtain unbiased predictions,
via :func:`~sklearn.model_selection.cross_val_predict`, which are then
used for calibration. For prediction, the base estimator, trained using all
the data, is used. This is the method implemented when `probabilities=True`
for :mod:`sklearn.svm` estimators.
Already fitted classifiers can be calibrated via the parameter
`cv="prefit"`. In this case, no cross-validation is used and all provided
data is used for calibration. The user has to take care manually that data
for model fitting and calibration are disjoint.
The calibration is based on the :term:`decision_function` method of the
`base_estimator` if it exists, else on :term:`predict_proba`.
Read more in the :ref:`User Guide <calibration>`.
Parameters
----------
base_estimator : estimator instance, default=None
The classifier whose output need to be calibrated to provide more
accurate `predict_proba` outputs. The default classifier is
a :class:`~sklearn.svm.LinearSVC`.
method : {'sigmoid', 'isotonic'}, default='sigmoid'
The method to use for calibration. Can be 'sigmoid' which
corresponds to Platt's method (i.e. a logistic regression model) or
'isotonic' which is a non-parametric approach. It is not advised to
use isotonic calibration with too few calibration samples
``(<<1000)`` since it tends to overfit.
cv : int, cross-validation generator, iterable or "prefit", \
default=None
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 5-fold cross-validation,
- integer, to specify the number of folds.
- :term:`CV splitter`,
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if ``y`` is binary or multiclass,
:class:`~sklearn.model_selection.StratifiedKFold` is used. If ``y`` is
neither binary nor multiclass, :class:`~sklearn.model_selection.KFold`
is used.
Refer to the :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
If "prefit" is passed, it is assumed that `base_estimator` has been
fitted already and all data is used for calibration.
.. versionchanged:: 0.22
``cv`` default value if None changed from 3-fold to 5-fold.
n_jobs : int, default=None
Number of jobs to run in parallel.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors.
Base estimator clones are fitted in parallel across cross-validation
iterations. Therefore parallelism happens only when `cv != "prefit"`.
See :term:`Glossary <n_jobs>` for more details.
.. versionadded:: 0.24
ensemble : bool, default=True
Determines how the calibrator is fitted when `cv` is not `'prefit'`.
Ignored if `cv='prefit'`.
If `True`, the `base_estimator` is fitted using training data and
calibrated using testing data, for each `cv` fold. The final estimator
is an ensemble of `n_cv` fitted classifer and calibrator pairs, where
`n_cv` is the number of cross-validation folds. The output is the
average predicted probabilities of all pairs.
If `False`, `cv` is used to compute unbiased predictions, via
:func:`~sklearn.model_selection.cross_val_predict`, which are then
used for calibration. At prediction time, the classifier used is the
`base_estimator` trained on all the data.
Note that this method is also internally implemented in
:mod:`sklearn.svm` estimators with the `probabilities=True` parameter.
.. versionadded:: 0.24
Attributes
----------
classes_ : ndarray of shape (n_classes,)
The class labels.
n_features_in_ : int
Number of features seen during :term:`fit`. Only defined if the
underlying base_estimator exposes such an attribute when fit.
.. versionadded:: 0.24
calibrated_classifiers_ : list (len() equal to cv or 1 if `cv="prefit"` \
or `ensemble=False`)
The list of classifier and calibrator pairs.
- When `cv="prefit"`, the fitted `base_estimator` and fitted
calibrator.
- When `cv` is not "prefit" and `ensemble=True`, `n_cv` fitted
`base_estimator` and calibrator pairs. `n_cv` is the number of
cross-validation folds.
- When `cv` is not "prefit" and `ensemble=False`, the `base_estimator`,
fitted on all the data, and fitted calibrator.
.. versionchanged:: 0.24
Single calibrated classifier case when `ensemble=False`.
Examples
--------
>>> from sklearn.datasets import make_classification
>>> from sklearn.naive_bayes import GaussianNB
>>> from sklearn.calibration import CalibratedClassifierCV
>>> X, y = make_classification(n_samples=100, n_features=2,
... n_redundant=0, random_state=42)
>>> base_clf = GaussianNB()
>>> calibrated_clf = CalibratedClassifierCV(base_estimator=base_clf, cv=3)
>>> calibrated_clf.fit(X, y)
CalibratedClassifierCV(base_estimator=GaussianNB(), cv=3)
>>> len(calibrated_clf.calibrated_classifiers_)
3
>>> calibrated_clf.predict_proba(X)[:5, :]
array([[0.110..., 0.889...],
[0.072..., 0.927...],
[0.928..., 0.071...],
[0.928..., 0.071...],
[0.071..., 0.928...]])
>>> from sklearn.model_selection import train_test_split
>>> X, y = make_classification(n_samples=100, n_features=2,
... n_redundant=0, random_state=42)
>>> X_train, X_calib, y_train, y_calib = train_test_split(
... X, y, random_state=42
... )
>>> base_clf = GaussianNB()
>>> base_clf.fit(X_train, y_train)
GaussianNB()
>>> calibrated_clf = CalibratedClassifierCV(
... base_estimator=base_clf,
... cv="prefit"
... )
>>> calibrated_clf.fit(X_calib, y_calib)
CalibratedClassifierCV(base_estimator=GaussianNB(), cv='prefit')
>>> len(calibrated_clf.calibrated_classifiers_)
1
>>> calibrated_clf.predict_proba([[-0.5, 0.5]])
array([[0.936..., 0.063...]])
References
----------
.. [1] Obtaining calibrated probability estimates from decision trees
and naive Bayesian classifiers, B. Zadrozny & C. Elkan, ICML 2001
.. [2] Transforming Classifier Scores into Accurate Multiclass
Probability Estimates, B. Zadrozny & C. Elkan, (KDD 2002)
.. [3] Probabilistic Outputs for Support Vector Machines and Comparisons to
Regularized Likelihood Methods, J. Platt, (1999)
.. [4] Predicting Good Probabilities with Supervised Learning,
A. Niculescu-Mizil & R. Caruana, ICML 2005
"""
def __init__(
self,
base_estimator=None,
*,
method="sigmoid",
cv=None,
n_jobs=None,
ensemble=True,
):
self.base_estimator = base_estimator
self.method = method
self.cv = cv
self.n_jobs = n_jobs
self.ensemble = ensemble
def fit(self, X, y, sample_weight=None):
"""Fit the calibrated model.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Training data.
y : array-like of shape (n_samples,)
Target values.
sample_weight : array-like of shape (n_samples,), default=None
Sample weights. If None, then samples are equally weighted.
Returns
-------
self : object
Returns an instance of self.
"""
check_classification_targets(y)
X, y = indexable(X, y)
if self.base_estimator is None:
# we want all classifiers that don't expose a random_state
# to be deterministic (and we don't want to expose this one).
base_estimator = LinearSVC(random_state=0)
else:
base_estimator = self.base_estimator
self.calibrated_classifiers_ = []
if self.cv == "prefit":
# `classes_` should be consistent with that of base_estimator
check_is_fitted(self.base_estimator, attributes=["classes_"])
self.classes_ = self.base_estimator.classes_
pred_method, method_name = _get_prediction_method(base_estimator)
n_classes = len(self.classes_)
predictions = _compute_predictions(pred_method, method_name, X, n_classes)
calibrated_classifier = _fit_calibrator(
base_estimator,
predictions,
y,
self.classes_,
self.method,
sample_weight,
)
self.calibrated_classifiers_.append(calibrated_classifier)
else:
# Set `classes_` using all `y`
label_encoder_ = LabelEncoder().fit(y)
self.classes_ = label_encoder_.classes_
n_classes = len(self.classes_)
# sample_weight checks
fit_parameters = signature(base_estimator.fit).parameters
supports_sw = "sample_weight" in fit_parameters
if sample_weight is not None:
sample_weight = _check_sample_weight(sample_weight, X)
if not supports_sw:
estimator_name = type(base_estimator).__name__
warnings.warn(
f"Since {estimator_name} does not support "
"sample_weights, sample weights will only be"
" used for the calibration itself."
)
# Check that each cross-validation fold can have at least one
# example per class
if isinstance(self.cv, int):
n_folds = self.cv
elif hasattr(self.cv, "n_splits"):
n_folds = self.cv.n_splits
else:
n_folds = None
if n_folds and np.any(
[np.sum(y == class_) < n_folds for class_ in self.classes_]
):
raise ValueError(
f"Requesting {n_folds}-fold "
"cross-validation but provided less than "
f"{n_folds} examples for at least one class."
)
cv = check_cv(self.cv, y, classifier=True)
if self.ensemble:
parallel = Parallel(n_jobs=self.n_jobs)
self.calibrated_classifiers_ = parallel(
delayed(_fit_classifier_calibrator_pair)(
clone(base_estimator),
X,
y,
train=train,
test=test,
method=self.method,
classes=self.classes_,
supports_sw=supports_sw,
sample_weight=sample_weight,
)
for train, test in cv.split(X, y)
)
else:
this_estimator = clone(base_estimator)
_, method_name = _get_prediction_method(this_estimator)
pred_method = partial(
cross_val_predict,
estimator=this_estimator,
X=X,
y=y,
cv=cv,
method=method_name,
n_jobs=self.n_jobs,
)
predictions = _compute_predictions(
pred_method, method_name, X, n_classes
)
if sample_weight is not None and supports_sw:
this_estimator.fit(X, y, sample_weight)
else:
this_estimator.fit(X, y)
calibrated_classifier = _fit_calibrator(
this_estimator,
predictions,
y,
self.classes_,
self.method,
sample_weight,
)
self.calibrated_classifiers_.append(calibrated_classifier)
first_clf = self.calibrated_classifiers_[0].base_estimator
if hasattr(first_clf, "n_features_in_"):
self.n_features_in_ = first_clf.n_features_in_
return self
def predict_proba(self, X):
"""Calibrated probabilities of classification.
This function returns calibrated probabilities of classification
according to each class on an array of test vectors X.
Parameters
----------
X : The samples, as accepted by base_estimator.predict_proba
Returns
-------
C : ndarray of shape (n_samples, n_classes)
The predicted probas.
"""
check_is_fitted(self)
# Compute the arithmetic mean of the predictions of the calibrated
# classifiers
mean_proba = np.zeros((_num_samples(X), len(self.classes_)))
for calibrated_classifier in self.calibrated_classifiers_:
proba = calibrated_classifier.predict_proba(X)
mean_proba += proba
mean_proba /= len(self.calibrated_classifiers_)
return mean_proba
def predict(self, X):
"""Predict the target of new samples. The predicted class is the
class that has the highest probability, and can thus be different
from the prediction of the uncalibrated classifier.
Parameters
----------
X : The samples, as accepted by base_estimator.predict
Returns
-------
C : ndarray of shape (n_samples,)
The predicted class.
"""
check_is_fitted(self)
return self.classes_[np.argmax(self.predict_proba(X), axis=1)]
def _more_tags(self):
return {
"_xfail_checks": {
"check_sample_weights_invariance": (
"zero sample_weight is not equivalent to removing samples"
),
}
}
def _fit_classifier_calibrator_pair(
estimator, X, y, train, test, supports_sw, method, classes, sample_weight=None
):
"""Fit a classifier/calibration pair on a given train/test split.
Fit the classifier on the train set, compute its predictions on the test
set and use the predictions as input to fit the calibrator along with the
test labels.
Parameters
----------
estimator : estimator instance
Cloned base estimator.
X : array-like, shape (n_samples, n_features)
Sample data.
y : array-like, shape (n_samples,)
Targets.
train : ndarray, shape (n_train_indicies,)
Indices of the training subset.
test : ndarray, shape (n_test_indicies,)
Indices of the testing subset.
supports_sw : bool
Whether or not the `estimator` supports sample weights.
method : {'sigmoid', 'isotonic'}
Method to use for calibration.
classes : ndarray, shape (n_classes,)
The target classes.
sample_weight : array-like, default=None
Sample weights for `X`.
Returns
-------
calibrated_classifier : _CalibratedClassifier instance
"""
X_train, y_train = _safe_indexing(X, train), _safe_indexing(y, train)
X_test, y_test = _safe_indexing(X, test), _safe_indexing(y, test)
if supports_sw and sample_weight is not None:
sw_train = _safe_indexing(sample_weight, train)
sw_test = _safe_indexing(sample_weight, test)
else:
sw_train = None
sw_test = None
if supports_sw:
estimator.fit(X_train, y_train, sample_weight=sw_train)
else:
estimator.fit(X_train, y_train)
n_classes = len(classes)
pred_method, method_name = _get_prediction_method(estimator)
predictions = _compute_predictions(pred_method, method_name, X_test, n_classes)
calibrated_classifier = _fit_calibrator(
estimator, predictions, y_test, classes, method, sample_weight=sw_test
)
return calibrated_classifier
def _get_prediction_method(clf):
"""Return prediction method.
`decision_function` method of `clf` returned, if it
exists, otherwise `predict_proba` method returned.
Parameters
----------
clf : Estimator instance
Fitted classifier to obtain the prediction method from.
Returns
-------
prediction_method : callable
The prediction method.
method_name : str
The name of the prediction method.
"""
if hasattr(clf, "decision_function"):
method = getattr(clf, "decision_function")
return method, "decision_function"
elif hasattr(clf, "predict_proba"):
method = getattr(clf, "predict_proba")
return method, "predict_proba"
else:
raise RuntimeError(
"'base_estimator' has no 'decision_function' or 'predict_proba' method."
)
def _compute_predictions(pred_method, method_name, X, n_classes):
"""Return predictions for `X` and reshape binary outputs to shape
(n_samples, 1).
Parameters
----------
pred_method : callable
Prediction method.
method_name: str
Name of the prediction method
X : array-like or None
Data used to obtain predictions.
n_classes : int
Number of classes present.
Returns
-------
predictions : array-like, shape (X.shape[0], len(clf.classes_))
The predictions. Note if there are 2 classes, array is of shape
(X.shape[0], 1).
"""
predictions = pred_method(X=X)
if method_name == "decision_function":
if predictions.ndim == 1:
predictions = predictions[:, np.newaxis]
elif method_name == "predict_proba":
if n_classes == 2:
predictions = predictions[:, 1:]
else: # pragma: no cover
# this branch should be unreachable.
raise ValueError(f"Invalid prediction method: {method_name}")
return predictions
def _fit_calibrator(clf, predictions, y, classes, method, sample_weight=None):
"""Fit calibrator(s) and return a `_CalibratedClassifier`
instance.
`n_classes` (i.e. `len(clf.classes_)`) calibrators are fitted.
However, if `n_classes` equals 2, one calibrator is fitted.
Parameters
----------
clf : estimator instance
Fitted classifier.
predictions : array-like, shape (n_samples, n_classes) or (n_samples, 1) \
when binary.
Raw predictions returned by the un-calibrated base classifier.
y : array-like, shape (n_samples,)
The targets.
classes : ndarray, shape (n_classes,)
All the prediction classes.
method : {'sigmoid', 'isotonic'}
The method to use for calibration.
sample_weight : ndarray, shape (n_samples,), default=None
Sample weights. If None, then samples are equally weighted.
Returns
-------
pipeline : _CalibratedClassifier instance
"""
Y = label_binarize(y, classes=classes)
label_encoder = LabelEncoder().fit(classes)
pos_class_indices = label_encoder.transform(clf.classes_)
calibrators = []
for class_idx, this_pred in zip(pos_class_indices, predictions.T):
if method == "isotonic":
calibrator = IsotonicRegression(out_of_bounds="clip")
elif method == "sigmoid":
calibrator = _SigmoidCalibration()
else:
raise ValueError(
f"'method' should be one of: 'sigmoid' or 'isotonic'. Got {method}."
)
calibrator.fit(this_pred, Y[:, class_idx], sample_weight)
calibrators.append(calibrator)
pipeline = _CalibratedClassifier(clf, calibrators, method=method, classes=classes)
return pipeline
class _CalibratedClassifier:
"""Pipeline-like chaining a fitted classifier and its fitted calibrators.
Parameters
----------
base_estimator : estimator instance
Fitted classifier.
calibrators : list of fitted estimator instances
List of fitted calibrators (either 'IsotonicRegression' or
'_SigmoidCalibration'). The number of calibrators equals the number of
classes. However, if there are 2 classes, the list contains only one
fitted calibrator.
classes : array-like of shape (n_classes,)
All the prediction classes.
method : {'sigmoid', 'isotonic'}, default='sigmoid'
The method to use for calibration. Can be 'sigmoid' which
corresponds to Platt's method or 'isotonic' which is a
non-parametric approach based on isotonic regression.
Attributes
----------
calibrators_ : list of fitted estimator instances
Same as `calibrators`. Exposed for backward-compatibility. Use
`calibrators` instead.
.. deprecated:: 0.24
`calibrators_` is deprecated from 0.24 and will be removed in
1.1 (renaming of 0.26). Use `calibrators` instead.
"""
def __init__(self, base_estimator, calibrators, *, classes, method="sigmoid"):
self.base_estimator = base_estimator
self.calibrators = calibrators
self.classes = classes
self.method = method
# TODO: Remove in 1.1
# mypy error: Decorated property not supported
@deprecated( # type: ignore
"`calibrators_` is deprecated in 0.24 and will be removed in 1.1"
"(renaming of 0.26). Use `calibrators` instead."
)
@property
def calibrators_(self):
return self.calibrators
def predict_proba(self, X):
"""Calculate calibrated probabilities.
Calculates classification calibrated probabilities
for each class, in a one-vs-all manner, for `X`.
Parameters
----------
X : ndarray of shape (n_samples, n_features)
The sample data.
Returns
-------
proba : array, shape (n_samples, n_classes)
The predicted probabilities. Can be exact zeros.
"""
n_classes = len(self.classes)
pred_method, method_name = _get_prediction_method(self.base_estimator)
predictions = _compute_predictions(pred_method, method_name, X, n_classes)
label_encoder = LabelEncoder().fit(self.classes)
pos_class_indices = label_encoder.transform(self.base_estimator.classes_)
proba = np.zeros((_num_samples(X), n_classes))
for class_idx, this_pred, calibrator in zip(
pos_class_indices, predictions.T, self.calibrators
):
if n_classes == 2:
# When binary, `predictions` consists only of predictions for
# clf.classes_[1] but `pos_class_indices` = 0
class_idx += 1
proba[:, class_idx] = calibrator.predict(this_pred)
# Normalize the probabilities
if n_classes == 2:
proba[:, 0] = 1.0 - proba[:, 1]
else:
denominator = np.sum(proba, axis=1)[:, np.newaxis]
# In the edge case where for each class calibrator returns a null
# probability for a given sample, use the uniform distribution
# instead.
uniform_proba = np.full_like(proba, 1 / n_classes)
proba = np.divide(
proba, denominator, out=uniform_proba, where=denominator != 0
)
# Deal with cases where the predicted probability minimally exceeds 1.0
proba[(1.0 < proba) & (proba <= 1.0 + 1e-5)] = 1.0
return proba
def _sigmoid_calibration(predictions, y, sample_weight=None):
"""Probability Calibration with sigmoid method (Platt 2000)
Parameters
----------
predictions : ndarray of shape (n_samples,)
The decision function or predict proba for the samples.
y : ndarray of shape (n_samples,)
The targets.
sample_weight : array-like of shape (n_samples,), default=None
Sample weights. If None, then samples are equally weighted.
Returns
-------
a : float
The slope.
b : float
The intercept.
References
----------
Platt, "Probabilistic Outputs for Support Vector Machines"
"""
predictions = column_or_1d(predictions)
y = column_or_1d(y)
F = predictions # F follows Platt's notations
# Bayesian priors (see Platt end of section 2.2)
prior0 = float(np.sum(y <= 0))
prior1 = y.shape[0] - prior0
T = np.zeros(y.shape)
T[y > 0] = (prior1 + 1.0) / (prior1 + 2.0)
T[y <= 0] = 1.0 / (prior0 + 2.0)
T1 = 1.0 - T
def objective(AB):
# From Platt (beginning of Section 2.2)
P = expit(-(AB[0] * F + AB[1]))
loss = -(xlogy(T, P) + xlogy(T1, 1.0 - P))
if sample_weight is not None:
return (sample_weight * loss).sum()
else:
return loss.sum()
def grad(AB):
# gradient of the objective function
P = expit(-(AB[0] * F + AB[1]))
TEP_minus_T1P = T - P
if sample_weight is not None:
TEP_minus_T1P *= sample_weight
dA = np.dot(TEP_minus_T1P, F)
dB = np.sum(TEP_minus_T1P)
return np.array([dA, dB])
AB0 = np.array([0.0, log((prior0 + 1.0) / (prior1 + 1.0))])
AB_ = fmin_bfgs(objective, AB0, fprime=grad, disp=False)
return AB_[0], AB_[1]
class _SigmoidCalibration(RegressorMixin, BaseEstimator):
"""Sigmoid regression model.
Attributes
----------
a_ : float
The slope.
b_ : float
The intercept.
"""
def fit(self, X, y, sample_weight=None):
"""Fit the model using X, y as training data.
Parameters
----------
X : array-like of shape (n_samples,)
Training data.
y : array-like of shape (n_samples,)
Training target.
sample_weight : array-like of shape (n_samples,), default=None
Sample weights. If None, then samples are equally weighted.
Returns
-------
self : object
Returns an instance of self.
"""
X = column_or_1d(X)
y = column_or_1d(y)
X, y = indexable(X, y)
self.a_, self.b_ = _sigmoid_calibration(X, y, sample_weight)
return self
def predict(self, T):
"""Predict new data by linear interpolation.
Parameters
----------
T : array-like of shape (n_samples,)
Data to predict from.
Returns
-------
T_ : ndarray of shape (n_samples,)
The predicted data.
"""
T = column_or_1d(T)
return expit(-(self.a_ * T + self.b_))
def calibration_curve(y_true, y_prob, *, normalize=False, n_bins=5, strategy="uniform"):
"""Compute true and predicted probabilities for a calibration curve.
The method assumes the inputs come from a binary classifier, and
discretize the [0, 1] interval into bins.
Calibration curves may also be referred to as reliability diagrams.
Read more in the :ref:`User Guide <calibration>`.
Parameters
----------
y_true : array-like of shape (n_samples,)
True targets.
y_prob : array-like of shape (n_samples,)
Probabilities of the positive class.
normalize : bool, default=False
Whether y_prob needs to be normalized into the [0, 1] interval, i.e.
is not a proper probability. If True, the smallest value in y_prob
is linearly mapped onto 0 and the largest one onto 1.
n_bins : int, default=5
Number of bins to discretize the [0, 1] interval. A bigger number
requires more data. Bins with no samples (i.e. without
corresponding values in `y_prob`) will not be returned, thus the
returned arrays may have less than `n_bins` values.
strategy : {'uniform', 'quantile'}, default='uniform'
Strategy used to define the widths of the bins.
uniform
The bins have identical widths.
quantile
The bins have the same number of samples and depend on `y_prob`.
Returns
-------
prob_true : ndarray of shape (n_bins,) or smaller
The proportion of samples whose class is the positive class, in each
bin (fraction of positives).
prob_pred : ndarray of shape (n_bins,) or smaller
The mean predicted probability in each bin.
References
----------
Alexandru Niculescu-Mizil and Rich Caruana (2005) Predicting Good
Probabilities With Supervised Learning, in Proceedings of the 22nd
International Conference on Machine Learning (ICML).
See section 4 (Qualitative Analysis of Predictions).
Examples
--------
>>> import numpy as np
>>> from sklearn.calibration import calibration_curve
>>> y_true = np.array([0, 0, 0, 0, 1, 1, 1, 1, 1])
>>> y_pred = np.array([0.1, 0.2, 0.3, 0.4, 0.65, 0.7, 0.8, 0.9, 1.])
>>> prob_true, prob_pred = calibration_curve(y_true, y_pred, n_bins=3)
>>> prob_true
array([0. , 0.5, 1. ])
>>> prob_pred
array([0.2 , 0.525, 0.85 ])
"""
y_true = column_or_1d(y_true)
y_prob = column_or_1d(y_prob)
check_consistent_length(y_true, y_prob)
if normalize: # Normalize predicted values into interval [0, 1]
y_prob = (y_prob - y_prob.min()) / (y_prob.max() - y_prob.min())
elif y_prob.min() < 0 or y_prob.max() > 1:
raise ValueError(
"y_prob has values outside [0, 1] and normalize is set to False."
)
labels = np.unique(y_true)
if len(labels) > 2:
raise ValueError(
"Only binary classification is supported. Provided labels %s." % labels
)
y_true = label_binarize(y_true, classes=labels)[:, 0]
if strategy == "quantile": # Determine bin edges by distribution of data
quantiles = np.linspace(0, 1, n_bins + 1)
bins = np.percentile(y_prob, quantiles * 100)
bins[-1] = bins[-1] + 1e-8
elif strategy == "uniform":
bins = np.linspace(0.0, 1.0 + 1e-8, n_bins + 1)
else:
raise ValueError(
"Invalid entry to 'strategy' input. Strategy "
"must be either 'quantile' or 'uniform'."
)
binids = np.digitize(y_prob, bins) - 1
bin_sums = np.bincount(binids, weights=y_prob, minlength=len(bins))
bin_true = np.bincount(binids, weights=y_true, minlength=len(bins))
bin_total = np.bincount(binids, minlength=len(bins))
nonzero = bin_total != 0
prob_true = bin_true[nonzero] / bin_total[nonzero]
prob_pred = bin_sums[nonzero] / bin_total[nonzero]
return prob_true, prob_pred
| huzq/scikit-learn | sklearn/calibration.py | Python | bsd-3-clause | 32,602 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.0.0.11832 (http://hl7.org/fhir/StructureDefinition/TestScript) on 2017-03-22.
# 2017, SMART Health IT.
from . import domainresource
class TestScript(domainresource.DomainResource):
""" Describes a set of tests.
A structured set of tests against a FHIR server implementation to determine
compliance against the FHIR specification.
"""
resource_type = "TestScript"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.contact = None
""" Contact details for the publisher.
List of `ContactDetail` items (represented as `dict` in JSON). """
self.copyright = None
""" Use and/or publishing restrictions.
Type `str`. """
self.date = None
""" Date this was last changed.
Type `FHIRDate` (represented as `str` in JSON). """
self.description = None
""" Natural language description of the test script.
Type `str`. """
self.destination = None
""" An abstract server representing a destination or receiver in a
message exchange.
List of `TestScriptDestination` items (represented as `dict` in JSON). """
self.experimental = None
""" For testing purposes, not real usage.
Type `bool`. """
self.fixture = None
""" Fixture in the test script - by reference (uri).
List of `TestScriptFixture` items (represented as `dict` in JSON). """
self.identifier = None
""" Additional identifier for the test script.
Type `Identifier` (represented as `dict` in JSON). """
self.jurisdiction = None
""" Intended jurisdiction for test script (if applicable).
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.metadata = None
""" Required capability that is assumed to function correctly on the
FHIR server being tested.
Type `TestScriptMetadata` (represented as `dict` in JSON). """
self.name = None
""" Name for this test script (computer friendly).
Type `str`. """
self.origin = None
""" An abstract server representing a client or sender in a message
exchange.
List of `TestScriptOrigin` items (represented as `dict` in JSON). """
self.profile = None
""" Reference of the validation profile.
List of `FHIRReference` items referencing `Resource` (represented as `dict` in JSON). """
self.publisher = None
""" Name of the publisher (organization or individual).
Type `str`. """
self.purpose = None
""" Why this test script is defined.
Type `str`. """
self.rule = None
""" Assert rule used within the test script.
List of `TestScriptRule` items (represented as `dict` in JSON). """
self.ruleset = None
""" Assert ruleset used within the test script.
List of `TestScriptRuleset` items (represented as `dict` in JSON). """
self.setup = None
""" A series of required setup operations before tests are executed.
Type `TestScriptSetup` (represented as `dict` in JSON). """
self.status = None
""" draft | active | retired | unknown.
Type `str`. """
self.teardown = None
""" A series of required clean up steps.
Type `TestScriptTeardown` (represented as `dict` in JSON). """
self.test = None
""" A test in this script.
List of `TestScriptTest` items (represented as `dict` in JSON). """
self.title = None
""" Name for this test script (human friendly).
Type `str`. """
self.url = None
""" Logical URI to reference this test script (globally unique).
Type `str`. """
self.useContext = None
""" Context the content is intended to support.
List of `UsageContext` items (represented as `dict` in JSON). """
self.variable = None
""" Placeholder for evaluated elements.
List of `TestScriptVariable` items (represented as `dict` in JSON). """
self.version = None
""" Business version of the test script.
Type `str`. """
super(TestScript, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScript, self).elementProperties()
js.extend([
("contact", "contact", contactdetail.ContactDetail, True, None, False),
("copyright", "copyright", str, False, None, False),
("date", "date", fhirdate.FHIRDate, False, None, False),
("description", "description", str, False, None, False),
("destination", "destination", TestScriptDestination, True, None, False),
("experimental", "experimental", bool, False, None, False),
("fixture", "fixture", TestScriptFixture, True, None, False),
("identifier", "identifier", identifier.Identifier, False, None, False),
("jurisdiction", "jurisdiction", codeableconcept.CodeableConcept, True, None, False),
("metadata", "metadata", TestScriptMetadata, False, None, False),
("name", "name", str, False, None, True),
("origin", "origin", TestScriptOrigin, True, None, False),
("profile", "profile", fhirreference.FHIRReference, True, None, False),
("publisher", "publisher", str, False, None, False),
("purpose", "purpose", str, False, None, False),
("rule", "rule", TestScriptRule, True, None, False),
("ruleset", "ruleset", TestScriptRuleset, True, None, False),
("setup", "setup", TestScriptSetup, False, None, False),
("status", "status", str, False, None, True),
("teardown", "teardown", TestScriptTeardown, False, None, False),
("test", "test", TestScriptTest, True, None, False),
("title", "title", str, False, None, False),
("url", "url", str, False, None, True),
("useContext", "useContext", usagecontext.UsageContext, True, None, False),
("variable", "variable", TestScriptVariable, True, None, False),
("version", "version", str, False, None, False),
])
return js
from . import backboneelement
class TestScriptDestination(backboneelement.BackboneElement):
""" An abstract server representing a destination or receiver in a message
exchange.
An abstract server used in operations within this test script in the
destination element.
"""
resource_type = "TestScriptDestination"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.index = None
""" The index of the abstract destination server starting at 1.
Type `int`. """
self.profile = None
""" FHIR-Server | FHIR-SDC-FormManager | FHIR-SDC-FormReceiver | FHIR-
SDC-FormProcessor.
Type `Coding` (represented as `dict` in JSON). """
super(TestScriptDestination, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptDestination, self).elementProperties()
js.extend([
("index", "index", int, False, None, True),
("profile", "profile", coding.Coding, False, None, True),
])
return js
class TestScriptFixture(backboneelement.BackboneElement):
""" Fixture in the test script - by reference (uri).
Fixture in the test script - by reference (uri). All fixtures are required
for the test script to execute.
"""
resource_type = "TestScriptFixture"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.autocreate = None
""" Whether or not to implicitly create the fixture during setup.
Type `bool`. """
self.autodelete = None
""" Whether or not to implicitly delete the fixture during teardown.
Type `bool`. """
self.resource = None
""" Reference of the resource.
Type `FHIRReference` referencing `Resource` (represented as `dict` in JSON). """
super(TestScriptFixture, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptFixture, self).elementProperties()
js.extend([
("autocreate", "autocreate", bool, False, None, False),
("autodelete", "autodelete", bool, False, None, False),
("resource", "resource", fhirreference.FHIRReference, False, None, False),
])
return js
class TestScriptMetadata(backboneelement.BackboneElement):
""" Required capability that is assumed to function correctly on the FHIR
server being tested.
The required capability must exist and are assumed to function correctly on
the FHIR server being tested.
"""
resource_type = "TestScriptMetadata"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.capability = None
""" Capabilities that are assumed to function correctly on the FHIR
server being tested.
List of `TestScriptMetadataCapability` items (represented as `dict` in JSON). """
self.link = None
""" Links to the FHIR specification.
List of `TestScriptMetadataLink` items (represented as `dict` in JSON). """
super(TestScriptMetadata, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptMetadata, self).elementProperties()
js.extend([
("capability", "capability", TestScriptMetadataCapability, True, None, True),
("link", "link", TestScriptMetadataLink, True, None, False),
])
return js
class TestScriptMetadataCapability(backboneelement.BackboneElement):
""" Capabilities that are assumed to function correctly on the FHIR server
being tested.
Capabilities that must exist and are assumed to function correctly on the
FHIR server being tested.
"""
resource_type = "TestScriptMetadataCapability"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.capabilities = None
""" Required Capability Statement.
Type `FHIRReference` referencing `CapabilityStatement` (represented as `dict` in JSON). """
self.description = None
""" The expected capabilities of the server.
Type `str`. """
self.destination = None
""" Which server these requirements apply to.
Type `int`. """
self.link = None
""" Links to the FHIR specification.
List of `str` items. """
self.origin = None
""" Which origin server these requirements apply to.
List of `int` items. """
self.required = None
""" Are the capabilities required?.
Type `bool`. """
self.validated = None
""" Are the capabilities validated?.
Type `bool`. """
super(TestScriptMetadataCapability, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptMetadataCapability, self).elementProperties()
js.extend([
("capabilities", "capabilities", fhirreference.FHIRReference, False, None, True),
("description", "description", str, False, None, False),
("destination", "destination", int, False, None, False),
("link", "link", str, True, None, False),
("origin", "origin", int, True, None, False),
("required", "required", bool, False, None, False),
("validated", "validated", bool, False, None, False),
])
return js
class TestScriptMetadataLink(backboneelement.BackboneElement):
""" Links to the FHIR specification.
A link to the FHIR specification that this test is covering.
"""
resource_type = "TestScriptMetadataLink"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.description = None
""" Short description.
Type `str`. """
self.url = None
""" URL to the specification.
Type `str`. """
super(TestScriptMetadataLink, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptMetadataLink, self).elementProperties()
js.extend([
("description", "description", str, False, None, False),
("url", "url", str, False, None, True),
])
return js
class TestScriptOrigin(backboneelement.BackboneElement):
""" An abstract server representing a client or sender in a message exchange.
An abstract server used in operations within this test script in the origin
element.
"""
resource_type = "TestScriptOrigin"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.index = None
""" The index of the abstract origin server starting at 1.
Type `int`. """
self.profile = None
""" FHIR-Client | FHIR-SDC-FormFiller.
Type `Coding` (represented as `dict` in JSON). """
super(TestScriptOrigin, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptOrigin, self).elementProperties()
js.extend([
("index", "index", int, False, None, True),
("profile", "profile", coding.Coding, False, None, True),
])
return js
class TestScriptRule(backboneelement.BackboneElement):
""" Assert rule used within the test script.
Assert rule to be used in one or more asserts within the test script.
"""
resource_type = "TestScriptRule"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.param = None
""" Rule parameter template.
List of `TestScriptRuleParam` items (represented as `dict` in JSON). """
self.resource = None
""" Assert rule resource reference.
Type `FHIRReference` referencing `Resource` (represented as `dict` in JSON). """
super(TestScriptRule, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptRule, self).elementProperties()
js.extend([
("param", "param", TestScriptRuleParam, True, None, False),
("resource", "resource", fhirreference.FHIRReference, False, None, True),
])
return js
class TestScriptRuleParam(backboneelement.BackboneElement):
""" Rule parameter template.
Each rule template can take one or more parameters for rule evaluation.
"""
resource_type = "TestScriptRuleParam"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.name = None
""" Parameter name matching external assert rule parameter.
Type `str`. """
self.value = None
""" Parameter value defined either explicitly or dynamically.
Type `str`. """
super(TestScriptRuleParam, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptRuleParam, self).elementProperties()
js.extend([
("name", "name", str, False, None, True),
("value", "value", str, False, None, False),
])
return js
class TestScriptRuleset(backboneelement.BackboneElement):
""" Assert ruleset used within the test script.
Contains one or more rules. Offers a way to group rules so assertions
could reference the group of rules and have them all applied.
"""
resource_type = "TestScriptRuleset"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.resource = None
""" Assert ruleset resource reference.
Type `FHIRReference` referencing `Resource` (represented as `dict` in JSON). """
self.rule = None
""" The referenced rule within the ruleset.
List of `TestScriptRulesetRule` items (represented as `dict` in JSON). """
super(TestScriptRuleset, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptRuleset, self).elementProperties()
js.extend([
("resource", "resource", fhirreference.FHIRReference, False, None, True),
("rule", "rule", TestScriptRulesetRule, True, None, True),
])
return js
class TestScriptRulesetRule(backboneelement.BackboneElement):
""" The referenced rule within the ruleset.
The referenced rule within the external ruleset template.
"""
resource_type = "TestScriptRulesetRule"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.param = None
""" Ruleset rule parameter template.
List of `TestScriptRulesetRuleParam` items (represented as `dict` in JSON). """
self.ruleId = None
""" Id of referenced rule within the ruleset.
Type `str`. """
super(TestScriptRulesetRule, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptRulesetRule, self).elementProperties()
js.extend([
("param", "param", TestScriptRulesetRuleParam, True, None, False),
("ruleId", "ruleId", str, False, None, True),
])
return js
class TestScriptRulesetRuleParam(backboneelement.BackboneElement):
""" Ruleset rule parameter template.
Each rule template can take one or more parameters for rule evaluation.
"""
resource_type = "TestScriptRulesetRuleParam"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.name = None
""" Parameter name matching external assert ruleset rule parameter.
Type `str`. """
self.value = None
""" Parameter value defined either explicitly or dynamically.
Type `str`. """
super(TestScriptRulesetRuleParam, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptRulesetRuleParam, self).elementProperties()
js.extend([
("name", "name", str, False, None, True),
("value", "value", str, False, None, False),
])
return js
class TestScriptSetup(backboneelement.BackboneElement):
""" A series of required setup operations before tests are executed.
"""
resource_type = "TestScriptSetup"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.action = None
""" A setup operation or assert to perform.
List of `TestScriptSetupAction` items (represented as `dict` in JSON). """
super(TestScriptSetup, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetup, self).elementProperties()
js.extend([
("action", "action", TestScriptSetupAction, True, None, True),
])
return js
class TestScriptSetupAction(backboneelement.BackboneElement):
""" A setup operation or assert to perform.
Action would contain either an operation or an assertion.
"""
resource_type = "TestScriptSetupAction"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.assert_fhir = None
""" The assertion to perform.
Type `TestScriptSetupActionAssert` (represented as `dict` in JSON). """
self.operation = None
""" The setup operation to perform.
Type `TestScriptSetupActionOperation` (represented as `dict` in JSON). """
super(TestScriptSetupAction, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupAction, self).elementProperties()
js.extend([
("assert_fhir", "assert", TestScriptSetupActionAssert, False, None, False),
("operation", "operation", TestScriptSetupActionOperation, False, None, False),
])
return js
class TestScriptSetupActionAssert(backboneelement.BackboneElement):
""" The assertion to perform.
Evaluates the results of previous operations to determine if the server
under test behaves appropriately.
"""
resource_type = "TestScriptSetupActionAssert"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.compareToSourceExpression = None
""" The fluentpath expression to evaluate against the source fixture.
Type `str`. """
self.compareToSourceId = None
""" Id of the source fixture to be evaluated.
Type `str`. """
self.compareToSourcePath = None
""" XPath or JSONPath expression to evaluate against the source fixture.
Type `str`. """
self.contentType = None
""" xml | json | ttl | none.
Type `str`. """
self.description = None
""" Tracking/reporting assertion description.
Type `str`. """
self.direction = None
""" response | request.
Type `str`. """
self.expression = None
""" The fluentpath expression to be evaluated.
Type `str`. """
self.headerField = None
""" HTTP header field name.
Type `str`. """
self.label = None
""" Tracking/logging assertion label.
Type `str`. """
self.minimumId = None
""" Fixture Id of minimum content resource.
Type `str`. """
self.navigationLinks = None
""" Perform validation on navigation links?.
Type `bool`. """
self.operator = None
""" equals | notEquals | in | notIn | greaterThan | lessThan | empty |
notEmpty | contains | notContains | eval.
Type `str`. """
self.path = None
""" XPath or JSONPath expression.
Type `str`. """
self.requestMethod = None
""" delete | get | options | patch | post | put.
Type `str`. """
self.requestURL = None
""" Request URL comparison value.
Type `str`. """
self.resource = None
""" Resource type.
Type `str`. """
self.response = None
""" okay | created | noContent | notModified | bad | forbidden |
notFound | methodNotAllowed | conflict | gone | preconditionFailed
| unprocessable.
Type `str`. """
self.responseCode = None
""" HTTP response code to test.
Type `str`. """
self.rule = None
""" The reference to a TestScript.rule.
Type `TestScriptSetupActionAssertRule` (represented as `dict` in JSON). """
self.ruleset = None
""" The reference to a TestScript.ruleset.
Type `TestScriptSetupActionAssertRuleset` (represented as `dict` in JSON). """
self.sourceId = None
""" Fixture Id of source expression or headerField.
Type `str`. """
self.validateProfileId = None
""" Profile Id of validation profile reference.
Type `str`. """
self.value = None
""" The value to compare to.
Type `str`. """
self.warningOnly = None
""" Will this assert produce a warning only on error?.
Type `bool`. """
super(TestScriptSetupActionAssert, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionAssert, self).elementProperties()
js.extend([
("compareToSourceExpression", "compareToSourceExpression", str, False, None, False),
("compareToSourceId", "compareToSourceId", str, False, None, False),
("compareToSourcePath", "compareToSourcePath", str, False, None, False),
("contentType", "contentType", str, False, None, False),
("description", "description", str, False, None, False),
("direction", "direction", str, False, None, False),
("expression", "expression", str, False, None, False),
("headerField", "headerField", str, False, None, False),
("label", "label", str, False, None, False),
("minimumId", "minimumId", str, False, None, False),
("navigationLinks", "navigationLinks", bool, False, None, False),
("operator", "operator", str, False, None, False),
("path", "path", str, False, None, False),
("requestMethod", "requestMethod", str, False, None, False),
("requestURL", "requestURL", str, False, None, False),
("resource", "resource", str, False, None, False),
("response", "response", str, False, None, False),
("responseCode", "responseCode", str, False, None, False),
("rule", "rule", TestScriptSetupActionAssertRule, False, None, False),
("ruleset", "ruleset", TestScriptSetupActionAssertRuleset, False, None, False),
("sourceId", "sourceId", str, False, None, False),
("validateProfileId", "validateProfileId", str, False, None, False),
("value", "value", str, False, None, False),
("warningOnly", "warningOnly", bool, False, None, False),
])
return js
class TestScriptSetupActionAssertRule(backboneelement.BackboneElement):
""" The reference to a TestScript.rule.
The TestScript.rule this assert will evaluate.
"""
resource_type = "TestScriptSetupActionAssertRule"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.param = None
""" Rule parameter template.
List of `TestScriptSetupActionAssertRuleParam` items (represented as `dict` in JSON). """
self.ruleId = None
""" Id of the TestScript.rule.
Type `str`. """
super(TestScriptSetupActionAssertRule, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionAssertRule, self).elementProperties()
js.extend([
("param", "param", TestScriptSetupActionAssertRuleParam, True, None, False),
("ruleId", "ruleId", str, False, None, True),
])
return js
class TestScriptSetupActionAssertRuleParam(backboneelement.BackboneElement):
""" Rule parameter template.
Each rule template can take one or more parameters for rule evaluation.
"""
resource_type = "TestScriptSetupActionAssertRuleParam"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.name = None
""" Parameter name matching external assert rule parameter.
Type `str`. """
self.value = None
""" Parameter value defined either explicitly or dynamically.
Type `str`. """
super(TestScriptSetupActionAssertRuleParam, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionAssertRuleParam, self).elementProperties()
js.extend([
("name", "name", str, False, None, True),
("value", "value", str, False, None, True),
])
return js
class TestScriptSetupActionAssertRuleset(backboneelement.BackboneElement):
""" The reference to a TestScript.ruleset.
The TestScript.ruleset this assert will evaluate.
"""
resource_type = "TestScriptSetupActionAssertRuleset"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.rule = None
""" The referenced rule within the ruleset.
List of `TestScriptSetupActionAssertRulesetRule` items (represented as `dict` in JSON). """
self.rulesetId = None
""" Id of the TestScript.ruleset.
Type `str`. """
super(TestScriptSetupActionAssertRuleset, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionAssertRuleset, self).elementProperties()
js.extend([
("rule", "rule", TestScriptSetupActionAssertRulesetRule, True, None, False),
("rulesetId", "rulesetId", str, False, None, True),
])
return js
class TestScriptSetupActionAssertRulesetRule(backboneelement.BackboneElement):
""" The referenced rule within the ruleset.
The referenced rule within the external ruleset template.
"""
resource_type = "TestScriptSetupActionAssertRulesetRule"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.param = None
""" Rule parameter template.
List of `TestScriptSetupActionAssertRulesetRuleParam` items (represented as `dict` in JSON). """
self.ruleId = None
""" Id of referenced rule within the ruleset.
Type `str`. """
super(TestScriptSetupActionAssertRulesetRule, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionAssertRulesetRule, self).elementProperties()
js.extend([
("param", "param", TestScriptSetupActionAssertRulesetRuleParam, True, None, False),
("ruleId", "ruleId", str, False, None, True),
])
return js
class TestScriptSetupActionAssertRulesetRuleParam(backboneelement.BackboneElement):
""" Rule parameter template.
Each rule template can take one or more parameters for rule evaluation.
"""
resource_type = "TestScriptSetupActionAssertRulesetRuleParam"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.name = None
""" Parameter name matching external assert ruleset rule parameter.
Type `str`. """
self.value = None
""" Parameter value defined either explicitly or dynamically.
Type `str`. """
super(TestScriptSetupActionAssertRulesetRuleParam, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionAssertRulesetRuleParam, self).elementProperties()
js.extend([
("name", "name", str, False, None, True),
("value", "value", str, False, None, True),
])
return js
class TestScriptSetupActionOperation(backboneelement.BackboneElement):
""" The setup operation to perform.
The operation to perform.
"""
resource_type = "TestScriptSetupActionOperation"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.accept = None
""" xml | json | ttl | none.
Type `str`. """
self.contentType = None
""" xml | json | ttl | none.
Type `str`. """
self.description = None
""" Tracking/reporting operation description.
Type `str`. """
self.destination = None
""" Server responding to the request.
Type `int`. """
self.encodeRequestUrl = None
""" Whether or not to send the request url in encoded format.
Type `bool`. """
self.label = None
""" Tracking/logging operation label.
Type `str`. """
self.origin = None
""" Server initiating the request.
Type `int`. """
self.params = None
""" Explicitly defined path parameters.
Type `str`. """
self.requestHeader = None
""" Each operation can have one or more header elements.
List of `TestScriptSetupActionOperationRequestHeader` items (represented as `dict` in JSON). """
self.requestId = None
""" Fixture Id of mapped request.
Type `str`. """
self.resource = None
""" Resource type.
Type `str`. """
self.responseId = None
""" Fixture Id of mapped response.
Type `str`. """
self.sourceId = None
""" Fixture Id of body for PUT and POST requests.
Type `str`. """
self.targetId = None
""" Id of fixture used for extracting the [id], [type], and [vid] for
GET requests.
Type `str`. """
self.type = None
""" The operation code type that will be executed.
Type `Coding` (represented as `dict` in JSON). """
self.url = None
""" Request URL.
Type `str`. """
super(TestScriptSetupActionOperation, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionOperation, self).elementProperties()
js.extend([
("accept", "accept", str, False, None, False),
("contentType", "contentType", str, False, None, False),
("description", "description", str, False, None, False),
("destination", "destination", int, False, None, False),
("encodeRequestUrl", "encodeRequestUrl", bool, False, None, False),
("label", "label", str, False, None, False),
("origin", "origin", int, False, None, False),
("params", "params", str, False, None, False),
("requestHeader", "requestHeader", TestScriptSetupActionOperationRequestHeader, True, None, False),
("requestId", "requestId", str, False, None, False),
("resource", "resource", str, False, None, False),
("responseId", "responseId", str, False, None, False),
("sourceId", "sourceId", str, False, None, False),
("targetId", "targetId", str, False, None, False),
("type", "type", coding.Coding, False, None, False),
("url", "url", str, False, None, False),
])
return js
class TestScriptSetupActionOperationRequestHeader(backboneelement.BackboneElement):
""" Each operation can have one or more header elements.
Header elements would be used to set HTTP headers.
"""
resource_type = "TestScriptSetupActionOperationRequestHeader"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.field = None
""" HTTP header field name.
Type `str`. """
self.value = None
""" HTTP headerfield value.
Type `str`. """
super(TestScriptSetupActionOperationRequestHeader, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionOperationRequestHeader, self).elementProperties()
js.extend([
("field", "field", str, False, None, True),
("value", "value", str, False, None, True),
])
return js
class TestScriptTeardown(backboneelement.BackboneElement):
""" A series of required clean up steps.
A series of operations required to clean up after the all the tests are
executed (successfully or otherwise).
"""
resource_type = "TestScriptTeardown"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.action = None
""" One or more teardown operations to perform.
List of `TestScriptTeardownAction` items (represented as `dict` in JSON). """
super(TestScriptTeardown, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTeardown, self).elementProperties()
js.extend([
("action", "action", TestScriptTeardownAction, True, None, True),
])
return js
class TestScriptTeardownAction(backboneelement.BackboneElement):
""" One or more teardown operations to perform.
The teardown action will only contain an operation.
"""
resource_type = "TestScriptTeardownAction"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.operation = None
""" The teardown operation to perform.
Type `TestScriptSetupActionOperation` (represented as `dict` in JSON). """
super(TestScriptTeardownAction, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTeardownAction, self).elementProperties()
js.extend([
("operation", "operation", TestScriptSetupActionOperation, False, None, True),
])
return js
class TestScriptTest(backboneelement.BackboneElement):
""" A test in this script.
"""
resource_type = "TestScriptTest"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.action = None
""" A test operation or assert to perform.
List of `TestScriptTestAction` items (represented as `dict` in JSON). """
self.description = None
""" Tracking/reporting short description of the test.
Type `str`. """
self.name = None
""" Tracking/logging name of this test.
Type `str`. """
super(TestScriptTest, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTest, self).elementProperties()
js.extend([
("action", "action", TestScriptTestAction, True, None, True),
("description", "description", str, False, None, False),
("name", "name", str, False, None, False),
])
return js
class TestScriptTestAction(backboneelement.BackboneElement):
""" A test operation or assert to perform.
Action would contain either an operation or an assertion.
"""
resource_type = "TestScriptTestAction"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.assert_fhir = None
""" The setup assertion to perform.
Type `TestScriptSetupActionAssert` (represented as `dict` in JSON). """
self.operation = None
""" The setup operation to perform.
Type `TestScriptSetupActionOperation` (represented as `dict` in JSON). """
super(TestScriptTestAction, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTestAction, self).elementProperties()
js.extend([
("assert_fhir", "assert", TestScriptSetupActionAssert, False, None, False),
("operation", "operation", TestScriptSetupActionOperation, False, None, False),
])
return js
class TestScriptVariable(backboneelement.BackboneElement):
""" Placeholder for evaluated elements.
Variable is set based either on element value in response body or on header
field value in the response headers.
"""
resource_type = "TestScriptVariable"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.defaultValue = None
""" Default, hard-coded, or user-defined value for this variable.
Type `str`. """
self.description = None
""" Natural language description of the variable.
Type `str`. """
self.expression = None
""" The fluentpath expression against the fixture body.
Type `str`. """
self.headerField = None
""" HTTP header field name for source.
Type `str`. """
self.hint = None
""" Hint help text for default value to enter.
Type `str`. """
self.name = None
""" Descriptive name for this variable.
Type `str`. """
self.path = None
""" XPath or JSONPath against the fixture body.
Type `str`. """
self.sourceId = None
""" Fixture Id of source expression or headerField within this variable.
Type `str`. """
super(TestScriptVariable, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptVariable, self).elementProperties()
js.extend([
("defaultValue", "defaultValue", str, False, None, False),
("description", "description", str, False, None, False),
("expression", "expression", str, False, None, False),
("headerField", "headerField", str, False, None, False),
("hint", "hint", str, False, None, False),
("name", "name", str, False, None, True),
("path", "path", str, False, None, False),
("sourceId", "sourceId", str, False, None, False),
])
return js
import sys
try:
from . import codeableconcept
except ImportError:
codeableconcept = sys.modules[__package__ + '.codeableconcept']
try:
from . import coding
except ImportError:
coding = sys.modules[__package__ + '.coding']
try:
from . import contactdetail
except ImportError:
contactdetail = sys.modules[__package__ + '.contactdetail']
try:
from . import fhirdate
except ImportError:
fhirdate = sys.modules[__package__ + '.fhirdate']
try:
from . import fhirreference
except ImportError:
fhirreference = sys.modules[__package__ + '.fhirreference']
try:
from . import identifier
except ImportError:
identifier = sys.modules[__package__ + '.identifier']
try:
from . import usagecontext
except ImportError:
usagecontext = sys.modules[__package__ + '.usagecontext']
| all-of-us/raw-data-repository | rdr_service/lib_fhir/fhirclient_3_0_0/models/testscript.py | Python | bsd-3-clause | 50,708 |
# Copyright 2013 Red Hat, Inc.
# Copyright 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This is the Registry's Driver API.
This API relies on the registry RPC client (version >= 2). The functions bellow
work as a proxy for the database back-end configured in the registry service,
which means that everything returned by that back-end will be also returned by
this API.
This API exists for supporting deployments not willing to put database
credentials in glance-api. Those deployments can rely on this registry driver
that will talk to a remote registry service, which will then access the
database back-end.
"""
import functools
from glance.db import utils as db_utils
from glance.registry.client.v2 import api
def configure():
api.configure_registry_client()
def _get_client(func):
"""Injects a client instance to the each function
This decorator creates an instance of the Registry
client and passes it as an argument to each function
in this API.
"""
@functools.wraps(func)
def wrapper(context, *args, **kwargs):
client = api.get_registry_client(context)
return func(client, *args, **kwargs)
return wrapper
@_get_client
def image_create(client, values, v1_mode=False):
"""Create an image from the values dictionary."""
return client.image_create(values=values, v1_mode=v1_mode)
@_get_client
def image_update(client, image_id, values, purge_props=False, from_state=None,
v1_mode=False):
"""
Set the given properties on an image and update it.
:raises: ImageNotFound if image does not exist.
"""
return client.image_update(values=values,
image_id=image_id,
purge_props=purge_props,
from_state=from_state,
v1_mode=v1_mode)
@_get_client
def image_destroy(client, image_id):
"""Destroy the image or raise if it does not exist."""
return client.image_destroy(image_id=image_id)
@_get_client
def image_get(client, image_id, force_show_deleted=False, v1_mode=False):
return client.image_get(image_id=image_id,
force_show_deleted=force_show_deleted,
v1_mode=v1_mode)
def is_image_visible(context, image, status=None):
"""Return True if the image is visible in this context."""
return db_utils.is_image_visible(context, image, image_member_find, status)
@_get_client
def image_get_all(client, filters=None, marker=None, limit=None,
sort_key=None, sort_dir=None,
member_status='accepted', is_public=None,
admin_as_user=False, return_tag=False, v1_mode=False):
"""
Get all images that match zero or more filters.
:param filters: dict of filter keys and values. If a 'properties'
key is present, it is treated as a dict of key/value
filters on the image properties attribute
:param marker: image id after which to start page
:param limit: maximum number of images to return
:param sort_key: image attribute by which results should be sorted
:param sort_dir: direction in which results should be sorted (asc, desc)
:param member_status: only return shared images that have this membership
status
:param is_public: If true, return only public images. If false, return
only private and shared images.
:param admin_as_user: For backwards compatibility. If true, then return to
an admin the equivalent set of images which it would see
if it were a regular user
:param return_tag: To indicates whether image entry in result includes it
relevant tag entries. This could improve upper-layer
query performance, to prevent using separated calls
:param v1_mode: If true, mutates the 'visibility' value of each image
into the v1-compatible field 'is_public'
"""
sort_key = ['created_at'] if not sort_key else sort_key
sort_dir = ['desc'] if not sort_dir else sort_dir
return client.image_get_all(filters=filters, marker=marker, limit=limit,
sort_key=sort_key, sort_dir=sort_dir,
member_status=member_status,
is_public=is_public,
admin_as_user=admin_as_user,
return_tag=return_tag,
v1_mode=v1_mode)
@_get_client
def image_property_create(client, values, session=None):
"""Create an ImageProperty object"""
return client.image_property_create(values=values)
@_get_client
def image_property_delete(client, prop_ref, image_ref, session=None):
"""
Used internally by _image_property_create and image_property_update
"""
return client.image_property_delete(prop_ref=prop_ref, image_ref=image_ref)
@_get_client
def image_member_create(client, values, session=None):
"""Create an ImageMember object"""
return client.image_member_create(values=values)
@_get_client
def image_member_update(client, memb_id, values):
"""Update an ImageMember object"""
return client.image_member_update(memb_id=memb_id, values=values)
@_get_client
def image_member_delete(client, memb_id, session=None):
"""Delete an ImageMember object"""
client.image_member_delete(memb_id=memb_id)
@_get_client
def image_member_find(client, image_id=None, member=None, status=None,
include_deleted=False):
"""Find all members that meet the given criteria.
Note, currently include_deleted should be true only when create a new
image membership, as there may be a deleted image membership between
the same image and tenant, the membership will be reused in this case.
It should be false in other cases.
:param image_id: identifier of image entity
:param member: tenant to which membership has been granted
:include_deleted: A boolean indicating whether the result should include
the deleted record of image member
"""
return client.image_member_find(image_id=image_id,
member=member,
status=status,
include_deleted=include_deleted)
@_get_client
def image_member_count(client, image_id):
"""Return the number of image members for this image
:param image_id: identifier of image entity
"""
return client.image_member_count(image_id=image_id)
@_get_client
def image_tag_set_all(client, image_id, tags):
client.image_tag_set_all(image_id=image_id, tags=tags)
@_get_client
def image_tag_create(client, image_id, value, session=None):
"""Create an image tag."""
return client.image_tag_create(image_id=image_id, value=value)
@_get_client
def image_tag_delete(client, image_id, value, session=None):
"""Delete an image tag."""
client.image_tag_delete(image_id=image_id, value=value)
@_get_client
def image_tag_get_all(client, image_id, session=None):
"""Get a list of tags for a specific image."""
return client.image_tag_get_all(image_id=image_id)
@_get_client
def image_location_delete(client, image_id, location_id, status, session=None):
"""Delete an image location."""
client.image_location_delete(image_id=image_id, location_id=location_id,
status=status)
@_get_client
def image_location_update(client, image_id, location, session=None):
"""Update image location."""
client.image_location_update(image_id=image_id, location=location)
@_get_client
def user_get_storage_usage(client, owner_id, image_id=None, session=None):
return client.user_get_storage_usage(owner_id=owner_id, image_id=image_id)
@_get_client
def task_get(client, task_id, session=None, force_show_deleted=False):
"""Get a single task object
:returns: task dictionary
"""
return client.task_get(task_id=task_id, session=session,
force_show_deleted=force_show_deleted)
@_get_client
def task_get_all(client, filters=None, marker=None, limit=None,
sort_key='created_at', sort_dir='desc', admin_as_user=False):
"""Get all tasks that match zero or more filters.
:param filters: dict of filter keys and values.
:param marker: task id after which to start page
:param limit: maximum number of tasks to return
:param sort_key: task attribute by which results should be sorted
:param sort_dir: direction in which results should be sorted (asc, desc)
:param admin_as_user: For backwards compatibility. If true, then return to
an admin the equivalent set of tasks which it would see
if it were a regular user
:returns: tasks set
"""
return client.task_get_all(filters=filters, marker=marker, limit=limit,
sort_key=sort_key, sort_dir=sort_dir,
admin_as_user=admin_as_user)
@_get_client
def task_create(client, values, session=None):
"""Create a task object"""
return client.task_create(values=values, session=session)
@_get_client
def task_delete(client, task_id, session=None):
"""Delete a task object"""
return client.task_delete(task_id=task_id, session=session)
@_get_client
def task_update(client, task_id, values, session=None):
return client.task_update(task_id=task_id, values=values, session=session)
# Metadef
@_get_client
def metadef_namespace_get_all(
client, marker=None, limit=None, sort_key='created_at',
sort_dir=None, filters=None, session=None):
return client.metadef_namespace_get_all(
marker=marker, limit=limit,
sort_key=sort_key, sort_dir=sort_dir, filters=filters)
@_get_client
def metadef_namespace_get(client, namespace_name, session=None):
return client.metadef_namespace_get(namespace_name=namespace_name)
@_get_client
def metadef_namespace_create(client, values, session=None):
return client.metadef_namespace_create(values=values)
@_get_client
def metadef_namespace_update(
client, namespace_id, namespace_dict,
session=None):
return client.metadef_namespace_update(
namespace_id=namespace_id, namespace_dict=namespace_dict)
@_get_client
def metadef_namespace_delete(client, namespace_name, session=None):
return client.metadef_namespace_delete(
namespace_name=namespace_name)
@_get_client
def metadef_object_get_all(client, namespace_name, session=None):
return client.metadef_object_get_all(
namespace_name=namespace_name)
@_get_client
def metadef_object_get(
client,
namespace_name, object_name, session=None):
return client.metadef_object_get(
namespace_name=namespace_name, object_name=object_name)
@_get_client
def metadef_object_create(
client,
namespace_name, object_dict, session=None):
return client.metadef_object_create(
namespace_name=namespace_name, object_dict=object_dict)
@_get_client
def metadef_object_update(
client,
namespace_name, object_id,
object_dict, session=None):
return client.metadef_object_update(
namespace_name=namespace_name, object_id=object_id,
object_dict=object_dict)
@_get_client
def metadef_object_delete(
client,
namespace_name, object_name,
session=None):
return client.metadef_object_delete(
namespace_name=namespace_name, object_name=object_name)
@_get_client
def metadef_object_delete_namespace_content(
client,
namespace_name, session=None):
return client.metadef_object_delete_namespace_content(
namespace_name=namespace_name)
@_get_client
def metadef_object_count(
client,
namespace_name, session=None):
return client.metadef_object_count(
namespace_name=namespace_name)
@_get_client
def metadef_property_get_all(
client,
namespace_name, session=None):
return client.metadef_property_get_all(
namespace_name=namespace_name)
@_get_client
def metadef_property_get(
client,
namespace_name, property_name,
session=None):
return client.metadef_property_get(
namespace_name=namespace_name, property_name=property_name)
@_get_client
def metadef_property_create(
client,
namespace_name, property_dict,
session=None):
return client.metadef_property_create(
namespace_name=namespace_name, property_dict=property_dict)
@_get_client
def metadef_property_update(
client,
namespace_name, property_id,
property_dict, session=None):
return client.metadef_property_update(
namespace_name=namespace_name, property_id=property_id,
property_dict=property_dict)
@_get_client
def metadef_property_delete(
client,
namespace_name, property_name,
session=None):
return client.metadef_property_delete(
namespace_name=namespace_name, property_name=property_name)
@_get_client
def metadef_property_delete_namespace_content(
client,
namespace_name, session=None):
return client.metadef_property_delete_namespace_content(
namespace_name=namespace_name)
@_get_client
def metadef_property_count(
client,
namespace_name, session=None):
return client.metadef_property_count(
namespace_name=namespace_name)
@_get_client
def metadef_resource_type_create(client, values, session=None):
return client.metadef_resource_type_create(values=values)
@_get_client
def metadef_resource_type_get(
client,
resource_type_name, session=None):
return client.metadef_resource_type_get(
resource_type_name=resource_type_name)
@_get_client
def metadef_resource_type_get_all(client, session=None):
return client.metadef_resource_type_get_all()
@_get_client
def metadef_resource_type_delete(
client,
resource_type_name, session=None):
return client.metadef_resource_type_delete(
resource_type_name=resource_type_name)
@_get_client
def metadef_resource_type_association_get(
client,
namespace_name, resource_type_name,
session=None):
return client.metadef_resource_type_association_get(
namespace_name=namespace_name, resource_type_name=resource_type_name)
@_get_client
def metadef_resource_type_association_create(
client,
namespace_name, values, session=None):
return client.metadef_resource_type_association_create(
namespace_name=namespace_name, values=values)
@_get_client
def metadef_resource_type_association_delete(
client,
namespace_name, resource_type_name, session=None):
return client.metadef_resource_type_association_delete(
namespace_name=namespace_name, resource_type_name=resource_type_name)
@_get_client
def metadef_resource_type_association_get_all_by_namespace(
client,
namespace_name, session=None):
return client.metadef_resource_type_association_get_all_by_namespace(
namespace_name=namespace_name)
@_get_client
def metadef_tag_get_all(client, namespace_name, filters=None, marker=None,
limit=None, sort_key='created_at', sort_dir=None,
session=None):
return client.metadef_tag_get_all(
namespace_name=namespace_name, filters=filters, marker=marker,
limit=limit, sort_key=sort_key, sort_dir=sort_dir, session=session)
@_get_client
def metadef_tag_get(client, namespace_name, name, session=None):
return client.metadef_tag_get(
namespace_name=namespace_name, name=name)
@_get_client
def metadef_tag_create(
client, namespace_name, tag_dict, session=None):
return client.metadef_tag_create(
namespace_name=namespace_name, tag_dict=tag_dict)
@_get_client
def metadef_tag_create_tags(
client, namespace_name, tag_list, session=None):
return client.metadef_tag_create_tags(
namespace_name=namespace_name, tag_list=tag_list)
@_get_client
def metadef_tag_update(
client, namespace_name, id, tag_dict, session=None):
return client.metadef_tag_update(
namespace_name=namespace_name, id=id, tag_dict=tag_dict)
@_get_client
def metadef_tag_delete(
client, namespace_name, name, session=None):
return client.metadef_tag_delete(
namespace_name=namespace_name, name=name)
@_get_client
def metadef_tag_delete_namespace_content(
client, namespace_name, session=None):
return client.metadef_tag_delete_namespace_content(
namespace_name=namespace_name)
@_get_client
def metadef_tag_count(client, namespace_name, session=None):
return client.metadef_tag_count(namespace_name=namespace_name)
| rajalokan/glance | glance/db/registry/api.py | Python | apache-2.0 | 17,561 |
#! /usr/bin/python
# -*- coding: utf-8 -*-
def multiplica(multiplo1, multiplo2=2):
print multiplo1 * multiplo2
multiplica(2)
multiplica(5, 7.3)
multiplica("7", 6)
numero = 12.5
multiplica(numero, 11)
multiplica()
| Jzarecta/pythonjr | codigo/funcion_con_parametros.py | Python | gpl-2.0 | 220 |
# import the necessary packages
from .mxalexnet import MxAlexNet
from .mxvggnet import MxVGGNet
from .mxgooglenet import MxGoogLeNet
from .mxresnet import MxResNet
from .mxsqueezenet import MxSqueezeNet
from .mxagegendernet import MxAgeGenderNet | CyberLabs-BR/face_detect | pyimagesearch/nn/mxconv/__init__.py | Python | mit | 245 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# tester for Part.makeWireString
import FreeCAD
import Part
import PartDesign
print "testWire started"
# test strings
# if string contains funky characters, it has to be declared as Unicode or it
# turns into the default encoding (usually utf8). FT2 doesn't do utf8.
#String = 'Wide WMA_' # wide glyphs for tracking
#String = 'Big'
#String = u'ecAnO' # UCS-2 w/ only ASCII
#String = u'ucs2uéçÄñØ' # UCS-2
#String = 'utf8!uéçÄñØ' # UTF-8
#String = 'abcdefghijklmnopqrstuvwxyz0123456789'
#String = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
#String = 'Big Daddy' # white space
#String = 'AVWAIXA.V' # kerning
String = 'FreeCAD' # ASCII
#FontPath = '/usr/share/fonts/truetype/msttcorefonts/'
#FontName = 'Times_New_Roman_Italic.ttf'
FontPath = '/usr/share/fonts/truetype/msttcorefonts/'
FontName = 'Arial.ttf'
#FontName = 'NOTArial.ttf' # font file not found error
#FontPath = '/usr/share/fonts/truetype/msttcorefonts/'
#FontName = 'ariali.ttf' #symlink to ttf
#FontPath = '/usr/share/fonts/truetype/'
#FontName = 'Peterbuilt.ttf' # overlapping script font
#FontPath = '/usr/share/fonts/truetype/'
#FontName = 'dyspepsia.ttf' # overlapping script font # :)
Height = 2000 # out string height FCunits
Track = 0 # intercharacter spacing
print "testWire.py input String contains ", len(String), " characters."
s = Part.makeWireString(String,FontPath,FontName,Height,Track)
print "returned from makeWireString"
print "testWire.py output contains ", len(s), " WireChars."
for char in s:
for contour in char:
Part.show(contour)
print "testWire ended."
| yantrabuddhi/FreeCAD | src/Mod/Test/testmakeWireString.py | Python | lgpl-2.1 | 2,126 |
#!/usr/bin/env python3
# coding: utf-8
import BRT
from collections import namedtuple
import configparser
import os
import logging
from os.path import expanduser
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--submit', help='Execute the submission', action='store_true')
parser.add_argument('-q', '--quiet', help='Jast do the job. Stay quiet', action='store_true')
parser.add_argument('-v', '--verbose', help='Print more status info', action='store_true')
parser.add_argument('-d', '--debug', help='Print debugging info', action='store_true')
args = parser.parse_args()
if args.verbose :
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
if args.debug :
logging.basicConfig(level=os.environ.get("LOGLEVEL", "DEBUG"))
log = logging.getLogger(__name__)
VStar=namedtuple('VStar', 'name comm expos')
config = configparser.ConfigParser()
config.read(expanduser('~/.config/telescope.ini'))
log.info('Log in to telescope.org ...')
brt=BRT.Telescope(config['telescope.org']['user'], config['telescope.org']['password'])
BRT.astrometryAPIkey=config['astrometry.net']['apikey']
def qprint(*ar, **kwar):
if not args.quiet:
print(*ar, **kwar)
def vprint(*ar, **kwar):
if args.verbose and not args.quiet:
print(*ar, **kwar)
obslst=[
VStar('S Ori', comm='Mira AAVSO', expos=120),
VStar('CH Cyg', comm='Symbiotic AAVSO', expos=60),
VStar('SS Cyg', comm='Mira', expos=180),
VStar('EU Cyg', comm='Mira', expos=180),
VStar('IP Cyg', comm='Mira', expos=180),
VStar('V686 Cyg', comm='Mira', expos=180),
#VStar('AS Lac', comm='Mira', expos=120),
VStar('BI Her', comm='Mira', expos=180),
VStar('DX Vul', comm='Mira', expos=180),
VStar('DQ Vul', comm='Mira', expos=180),
VStar('EQ Lyr', comm='Mira', expos=180),
VStar('LX Cyg', comm='AAVSO', expos=180),
]
log.info('Getting observing queue ...')
reqlst=brt.get_user_requests(sort='completion')
q=[r for r in reqlst if int(r['status'])<8]
qn=[r['objectname'] for r in q]
missing = [vs for vs in obslst if vs.name not in qn]
if missing :
if args.submit:
qprint('Submitting missing jobs:')
else:
qprint('Dry run. Add -s to the command line to do actual submissions.')
for vs in missing:
qprint(f'{vs.name.split()[0]:>8} {vs.name.split()[1]} exp:{vs.expos:3.1f}s {vs.comm}', end='')
if args.submit :
r, i = brt.submitVarStar(vs.name, expos=vs.expos, comm=vs.comm)
if r :
qprint(f' => id: {i}', end='')
else :
qprint(f' Failure:{i}', end='')
qprint()
else :
qprint('No missing jobs. Nothing to do!')
log.info('Done.')
| jochym/brt | submit_batch.py | Python | gpl-2.0 | 2,720 |
def find_longest_subsequence(integers):
runs = defaultdict()
for i in integers:
if i-1 in runs.keys():
run = runs[i-1]
run.append(i)
if i+1 in runs.keys():
run = runs[i+1]
run.append(i)
print runs
find_longest_subsequence([1,2,3,4,7,5,12,13])
| mre/the-coding-interview | problems/longest-subsequence/find_longest_subsequence2.py | Python | mit | 283 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-30 20:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hamask', '0006_auto_20170530_1415'),
]
operations = [
migrations.AlterField(
model_name='workout',
name='order',
field=models.PositiveIntegerField(),
),
migrations.AlterField(
model_name='workout_group',
name='order',
field=models.PositiveIntegerField(),
),
]
| rawenihcam/BER-SERKR | hamask/migrations/0007_auto_20170530_1638.py | Python | mit | 605 |
#!/usr/bin/python
import os, subprocess
import textwrap, argparse
import itertools
from collections import OrderedDict
if __name__ == '__main__':
argument_parser = argparse.ArgumentParser(
prog='mapps_blcs.py',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
blcs mapping.... map some synsets from one version of WN to another... maps are from http://nlp.lsi.upc.edu/tools/download-map.php
--------------------------------
example of use $python3 %(prog)s
'''))
argument_parser.add_argument('--selected', dest='selected', action='append', required=True, choices=['15','16','17','171','20','21','30'], type=str , help='selected mapp (required, without extension), it can be more than one')
argument_parser.add_argument('--blcsfile', dest='blcsfile', required=True, type=str , help='blcs file (required)')
argument_parser.add_argument('--filter_pos', dest='filter_pos', required=False, type=str, action='append', choices=['n','v','r','a'], help='filter pos (optional)')
argument_parser.add_argument('--show_correspondences', dest='show_correspondences', action='store_true', help='Show complet correspondence. Format: "old_synset":"new_synset" or "no_correspondence"')
argument_parser.add_argument('--no_matches', dest='no_matches', action='store_true', help='Show also no matches in correspondence mode.')
args = argument_parser.parse_args()
if args.filter_pos is None:
args.filter_pos=['n','v','r','a']
# create output dir if not exists
if not os.path.exists('out'):
os.makedirs('out')
# STARTS create mapping dir if not exists ################################################
if not os.path.exists('mappings-upc-2007'):
os.makedirs('mappings-upc-2007')
import urllib2
import StringIO
import tarfile
import gzip
outFilePath = "/tmp/mapps.tmp.tar"
response = urllib2.urlopen("http://nlp.lsi.upc.edu/tools/download-map.php")
compressedFile = StringIO.StringIO(response.read())
decompressedFile = gzip.GzipFile(fileobj=compressedFile)
with open(outFilePath, 'w') as outfile:
outfile.write(decompressedFile.read())
tar = tarfile.open(outFilePath, "r:")
tar.extractall()
tar.close()
# END create mapping dir if not exists ###################################################
if len(args.selected) != 2:
print("The program need select TWO mapps.\n")
exit()
if args.selected[0] == args.selected[1]:
print("The program need DIFFERENT mapps.\n")
exit()
# read files ####################################################################################################################################################
mapps = {}
mapps[args.selected[0]] = {}
mapps[args.selected[1]] = {}
for pos in [['noun','n'],['verb','v'],['adv','r'],['adj','a']]:
sfile = open("mappings-upc-2007/mapping-"+args.selected[0]+"-"+args.selected[1]+"/wn"+args.selected[0]+"-"+args.selected[1]+"."+pos[0],'r')
content_file = sfile.readlines()
for line in content_file:
fields = line.split()
mapps[args.selected[0]][fields[0]+"-"+pos[1]] = []
i = 1
while i < len(fields[1:]):
mapps[args.selected[0]][fields[0]+"-"+pos[1]].append([fields[i]+"-"+pos[1],fields[i+1]])
i += 2
for pos in [['noun','n'],['verb','v'],['adv','r'],['adj','a']]:
sfile = open("mappings-upc-2007/mapping-"+args.selected[1]+"-"+args.selected[0]+"/wn"+args.selected[1]+"-"+args.selected[0]+"."+pos[0],'r')
content_file = sfile.readlines()
for line in content_file:
fields = line.split()
mapps[args.selected[1]][fields[0]+"-"+pos[1]] = []
i = 1
while i < len(fields[1:]):
mapps[args.selected[1]][fields[0]+"-"+pos[1]].append([fields[i]+"-"+pos[1],fields[i+1]])
i += 2
# check coincidences between mappings ###########################################################################################################################
coincidences = {}
for key_map_0,list_map_0 in mapps[args.selected[0]].items():
for elem_map_0 in list_map_0:
for elem_map_1 in mapps[args.selected[1]][elem_map_0[0]]:
if key_map_0 == elem_map_1[0]:
coincidences[key_map_0] = elem_map_0[0]
# output to file ################################################################################################################################################
sfile = open(args.blcsfile,'r')
content_file = sfile.readlines()
for line in content_file:
if line.strip().split("-")[1] in args.filter_pos:
if args.show_correspondences:
try:
print(line.strip()+":"+coincidences[line.strip()])
except KeyError:
if args.no_matches:
print(line.strip()+":NO_CORRESPONDENCE")
else:
try:
print(coincidences[line.strip()])
except KeyError:
no_var = 0
if args.no_matches and args.show_correspondences:
print("length coincidences: "+str(len(coincidences)))
| daniparera/MCR | BLC/analisys/mapps_blcs.py | Python | gpl-2.0 | 5,450 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sm.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| arashrasoulzadeh/codepackWEB | manage.py | Python | mit | 800 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for TPUClusterResolver."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.contrib.cluster_resolver.python.training.tpu_cluster_resolver import TPUClusterResolver
from tensorflow.python.platform import test
from tensorflow.python.training import server_lib
from tensorflow.python.util import compat
mock = test.mock
class MockRequestClass(object):
def __init__(self, name, tpu_map):
self._name = name
self._tpu_map = tpu_map
def execute(self):
if self._name in self._tpu_map:
return self._tpu_map[self._name]
else:
raise KeyError('Resource %s was not found' % self._name)
class MockNodeClass(object):
def __init__(self, tpu_map):
self._tpu_map = tpu_map
def get(self, name):
return MockRequestClass(name, self._tpu_map)
def mock_request_compute_metadata(cls, *args, **kwargs):
del cls, kwargs # Unused.
if args[0] == 'project/project-id':
return 'test-project'
elif args[0] == 'instance/zone':
return 'projects/test-project/locations/us-central1-c'
elif args[0] == 'instance/network-interfaces/0/ip':
return '10.128.1.2'
return ''
class TPUClusterResolverTest(test.TestCase):
def _verifyClusterSpecEquality(self, cluster_spec, expected_proto):
"""Verifies that the ClusterSpec generates the correct proto.
We are testing this four different ways to ensure that the ClusterSpec
returned by the TPUClusterResolver behaves identically to a normal
ClusterSpec when passed into the generic ClusterSpec libraries.
Args:
cluster_spec: ClusterSpec returned by the TPUClusterResolver
expected_proto: Expected protobuf
"""
self.assertProtoEquals(expected_proto, cluster_spec.as_cluster_def())
self.assertProtoEquals(
expected_proto,
server_lib.ClusterSpec(cluster_spec).as_cluster_def())
self.assertProtoEquals(expected_proto,
server_lib.ClusterSpec(
cluster_spec.as_cluster_def()).as_cluster_def())
self.assertProtoEquals(expected_proto,
server_lib.ClusterSpec(
cluster_spec.as_dict()).as_cluster_def())
def mock_service_client(self, tpu_map=None):
if tpu_map is None:
tpu_map = {}
mock_locations = mock.MagicMock()
mock_locations.nodes.return_value = MockNodeClass(tpu_map)
mock_project = mock.MagicMock()
mock_project.locations.return_value = mock_locations
mock_client = mock.MagicMock()
mock_client.projects.return_value = mock_project
return mock_client
@mock.patch.object(TPUClusterResolver, '_requestComputeMetadata',
mock_request_compute_metadata)
def testRetrieveProjectAndZoneFromMetadata(self):
tpu_map = {
'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': {
'ipAddress': '10.1.2.3',
'port': '8470',
'health': 'HEALTHY'
}
}
tpu_cluster_resolver = TPUClusterResolver(
project=None,
zone=None,
tpu=['test-tpu-1'],
credentials=None,
service=self.mock_service_client(tpu_map=tpu_map))
actual_cluster_spec = tpu_cluster_resolver.cluster_spec()
expected_proto = """
job {
name: 'coordinator'
tasks { key: 0 value: '10.128.1.2:%s' }
}
job {
name: 'worker'
tasks { key: 0 value: '10.1.2.3:8470' }
}
""" % tpu_cluster_resolver._coordinator_port
self._verifyClusterSpecEquality(actual_cluster_spec, str(expected_proto))
@mock.patch.object(TPUClusterResolver, '_requestComputeMetadata',
mock_request_compute_metadata)
def testRetrieveProjectAndZoneFromMetadataNoCoordinator(self):
tpu_map = {
'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': {
'ipAddress': '10.1.2.3',
'port': '8470',
'health': 'HEALTHY'
}
}
tpu_cluster_resolver = TPUClusterResolver(
project=None,
zone=None,
tpu=['test-tpu-1'],
coordinator_name=None,
credentials=None,
service=self.mock_service_client(tpu_map=tpu_map))
actual_cluster_spec = tpu_cluster_resolver.cluster_spec()
expected_proto = """
job { name: 'worker' tasks { key: 0 value: '10.1.2.3:8470' } }
"""
self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto)
def testSimpleSuccessfulRetrieval(self):
tpu_map = {
'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': {
'ipAddress': '10.1.2.3',
'port': '8470',
'health': 'HEALTHY'
}
}
tpu_cluster_resolver = TPUClusterResolver(
project='test-project',
zone='us-central1-c',
tpu=['test-tpu-1'],
coordinator_address='10.128.1.5:10203',
credentials=None,
service=self.mock_service_client(tpu_map=tpu_map))
actual_cluster_spec = tpu_cluster_resolver.cluster_spec()
expected_proto = """
job { name: 'coordinator' tasks { key: 0 value: '10.128.1.5:10203' } }
job { name: 'worker' tasks { key: 0 value: '10.1.2.3:8470' } }
"""
self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto)
def testNewNetworkEndpointFormat(self):
tpu_map = {
'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': {
'health': 'HEALTHY',
'networkEndpoints': [{
'ipAddress': '10.2.3.4',
'port': 8470,
}]
}
}
tpu_cluster_resolver = TPUClusterResolver(
project='test-project',
zone='us-central1-c',
tpu='test-tpu-1',
coordinator_address='10.128.1.5:10203',
credentials=None,
service=self.mock_service_client(tpu_map=tpu_map))
actual_cluster_spec = tpu_cluster_resolver.cluster_spec()
expected_proto = """
job { name: 'coordinator' tasks { key: 0 value: '10.128.1.5:10203' } }
job { name: 'worker' tasks { key: 0 value: '10.2.3.4:8470' } }
"""
self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto)
self.assertEqual('grpc://10.2.3.4:8470', tpu_cluster_resolver.master())
@mock.patch.object(TPUClusterResolver, '_requestComputeMetadata',
mock_request_compute_metadata)
def testPodResolution(self):
tpu_map = {
'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': {
'health':
'HEALTHY',
'networkEndpoints': [
{
'ipAddress': '10.2.3.4',
'port': 8470,
},
{
'ipAddress': '10.2.3.5',
'port': 8470,
},
{
'ipAddress': '10.2.3.6',
'port': 8470,
},
{
'ipAddress': '10.2.3.7',
'port': 8470,
},
]
}
}
tpu_cluster_resolver = TPUClusterResolver(
tpu='test-tpu-1',
credentials=None,
service=self.mock_service_client(tpu_map=tpu_map))
actual_cluster_spec = tpu_cluster_resolver.cluster_spec()
expected_proto = """
job {
name: 'coordinator',
tasks { key: 0 value: '10.128.1.2:%s'}
}
job {
name: 'worker'
tasks { key: 0 value: '10.2.3.4:8470' }
tasks { key: 1 value: '10.2.3.5:8470' }
tasks { key: 2 value: '10.2.3.6:8470' }
tasks { key: 3 value: '10.2.3.7:8470' }
}
""" % tpu_cluster_resolver._coordinator_port
self._verifyClusterSpecEquality(actual_cluster_spec, str(expected_proto))
def testPodResolutionNoCoordinator(self):
tpu_map = {
'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': {
'health':
'HEALTHY',
'networkEndpoints': [
{
'ipAddress': '10.2.3.4',
'port': 8470,
},
{
'ipAddress': '10.2.3.5',
'port': 8470,
},
{
'ipAddress': '10.2.3.6',
'port': 8470,
},
{
'ipAddress': '10.2.3.7',
'port': 8470,
},
]
}
}
tpu_cluster_resolver = TPUClusterResolver(
project='test-project',
zone='us-central1-c',
tpu='test-tpu-1',
coordinator_name=None,
credentials=None,
service=self.mock_service_client(tpu_map=tpu_map))
actual_cluster_spec = tpu_cluster_resolver.cluster_spec()
expected_proto = """
job {
name: 'worker'
tasks { key: 0 value: '10.2.3.4:8470' }
tasks { key: 1 value: '10.2.3.5:8470' }
tasks { key: 2 value: '10.2.3.6:8470' }
tasks { key: 3 value: '10.2.3.7:8470' }
}
"""
self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto)
def testGetMasterNoEntries(self):
tpu_map = {}
with self.assertRaises(ValueError):
TPUClusterResolver(
project='test-project',
zone='us-central1-c',
tpu=[],
coordinator_name=None,
credentials=None,
service=self.mock_service_client(tpu_map=tpu_map))
# TODO(saeta): Convert to parameterized test when included in OSS TF.
def verifyShouldResolve(self, tpu, should_resolve):
tpu_cluster_resolver = TPUClusterResolver(
project='test-project',
zone='us-central1-c',
tpu=tpu,
coordinator_name=None,
credentials=None,
service=self.mock_service_client(tpu_map={}))
self.assertEqual(should_resolve, tpu_cluster_resolver._shouldResolve(),
"TPU: '%s'" % tpu)
def testShouldResolveNoName(self):
self.verifyShouldResolve('', False)
def testShouldResolveLocal(self):
self.verifyShouldResolve('local', False)
def testShouldResolveGrpc(self):
self.verifyShouldResolve('grpc://10.1.2.3:8470', False)
def testShouldResolveBns(self):
self.verifyShouldResolve('/bns/foo/bar', False)
def testShouldResolveName(self):
self.verifyShouldResolve('mytpu', True)
def testShouldResolveList(self):
self.verifyShouldResolve(['myothertpu'], True)
def testShouldResolveGrpcPrefix(self):
self.verifyShouldResolve('grpctpu', True)
def testNoCallComputeMetadata(self):
tpu_cluster_resolver = TPUClusterResolver(tpu='/bns/foo/bar')
self.assertEqual(
compat.as_bytes('/bns/foo/bar'), tpu_cluster_resolver.master())
self.assertEqual(
server_lib.ClusterSpec({}), tpu_cluster_resolver.cluster_spec())
def testGkeEnvironment(self):
os.environ['KUBE_GOOGLE_CLOUD_TPU_ENDPOINTS'] = 'grpc://10.120.27.5:8470'
self.assertTrue('KUBE_GOOGLE_CLOUD_TPU_ENDPOINTS' in os.environ)
tpu_cluster_resolver = TPUClusterResolver()
self.assertTrue(tpu_cluster_resolver._inGke())
self.assertEqual(
compat.as_bytes('grpc://10.120.27.5:8470'),
compat.as_bytes(tpu_cluster_resolver._gkeMaster()))
self.assertEqual(
compat.as_bytes('grpc://10.120.27.5:8470'),
compat.as_bytes(tpu_cluster_resolver.get_master()))
del os.environ['KUBE_GOOGLE_CLOUD_TPU_ENDPOINTS']
if __name__ == '__main__':
test.main()
| Xeralux/tensorflow | tensorflow/contrib/cluster_resolver/python/training/tpu_cluster_resolver_test.py | Python | apache-2.0 | 12,171 |
"""Testing facility for conkit.io.A3mIO"""
__author__ = "Felix Simkovic"
__date__ = "11 Sep 2016"
import os
import unittest
from conkit.io.a3m import A3mParser
from conkit.io._iotools import create_tmp_f
class TestA3mParser(unittest.TestCase):
def test_read_1(self):
msa = """>d1a1x__ b.63.1.1 (-) p13-MTCP1 {Human (Homo sapiens)}
PPDHLWVHQEGIYRDEYQRTWVAVVEEETSFLRARVQQIQVPLGDAARPSHLLTSQL
>gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]
HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL
>gi|7305557|ref|NP_038800.1|:(8-103) T-cell leukemia/lymphoma 1B, 3 [Mus musculus]
PPRFLVCTRDDIYEDENGRQWVVAKVETSRSpygsrietcITVHLQHMTTIPQEPTPQQPINNNSL
>gi|11415028|ref|NP_068801.1|:(2-106) T-cell lymphoma-1; T-cell lymphoma-1A [Homo sapiens]
HPDRLWAWEKFVYLDEKQHAWLPLTIEikDRLQLRVLLRREDVVLGRPMTPTQIGPSLL
>gi|7305561|ref|NP_038804.1|:(7-103) T-cell leukemia/lymphoma 1B, 5 [Mus musculus]
----------GIYEDEHHRVWIAVNVETSHSSHgnrietcvtVHLQHMTTLPQEPTPQQPINNNSL
>gi|7305553|ref|NP_038801.1|:(5-103) T-cell leukemia/lymphoma 1B, 1 [Mus musculus]
LPVYLVSVRLGIYEDEHHRVWIVANVETshSSHGNRRRTHVTVHLWKLIPQQVIPFNplnydFL
>gi|27668591|ref|XP_234504.1|:(7-103) similar to Chain A, Crystal Structure Of Murine Tcl1
-PDRLWLWEKHVYLDEFRRSWLPIVIKSNGKFQVIMRQKDVILGDSMTPSQLVPYEL
>gi|27668589|ref|XP_234503.1|:(9-91) similar to T-cell leukemia/lymphoma 1B, 5;
-PHILTLRTHGIYEDEHHRLWVVLDLQAShlSFSNRLLIYLTVYLQqgvafplESTPPSPMNLNGL
>gi|7305559|ref|NP_038802.1|:(8-102) T-cell leukemia/lymphoma 1B, 4 [Mus musculus]
PPCFLVCTRDDIYEDEHGRQWVAAKVETSSHSPycskietcvtVHLWQMTTLFQEPSPDSLKTFNFL
>gi|7305555|ref|NP_038803.1|:(9-102) T-cell leukemia/lymphoma 1B, 2 [Mus musculus]
---------PGFYEDEHHRLWMVAKLETCSHSPycnkietcvtVHLWQMTRYPQEPAPYNPMNYNFL
"""
f_name = create_tmp_f(content=msa)
parser = A3mParser()
with open(f_name, 'r') as f_in:
sequence_file = parser.read(f_in, remove_inserts=True) # <------------
for i, sequence_entry in enumerate(sequence_file):
if i == 0:
self.assertEqual('d1a1x__ b.63.1.1 (-) p13-MTCP1 {Human (Homo sapiens)}', sequence_entry.id)
self.assertEqual('PPDHLWVHQEGIYRDEYQRTWVAVVEEETSFLRARVQQIQVPLGDAARPSHLLTSQL', sequence_entry.seq)
elif i == 1:
self.assertEqual('gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]',
sequence_entry.id)
self.assertEqual('HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL', sequence_entry.seq)
elif i == 2:
self.assertEqual('gi|7305557|ref|NP_038800.1|:(8-103) T-cell leukemia/lymphoma 1B, 3 [Mus musculus]',
sequence_entry.id)
self.assertEqual('PPRFLVCTRDDIYEDENGRQWVVAKVETSRSITVHLQHMTTIPQEPTPQQPINNNSL', sequence_entry.seq)
elif i == 3:
self.assertEqual(
'gi|11415028|ref|NP_068801.1|:(2-106) T-cell lymphoma-1; T-cell lymphoma-1A [Homo sapiens]',
sequence_entry.id)
self.assertEqual('HPDRLWAWEKFVYLDEKQHAWLPLTIEDRLQLRVLLRREDVVLGRPMTPTQIGPSLL', sequence_entry.seq)
elif i == 4:
self.assertEqual('gi|7305561|ref|NP_038804.1|:(7-103) T-cell leukemia/lymphoma 1B, 5 [Mus musculus]',
sequence_entry.id)
self.assertEqual('----------GIYEDEHHRVWIAVNVETSHSSHVHLQHMTTLPQEPTPQQPINNNSL', sequence_entry.seq)
elif i == 5:
self.assertEqual('gi|7305553|ref|NP_038801.1|:(5-103) T-cell leukemia/lymphoma 1B, 1 [Mus musculus]',
sequence_entry.id)
self.assertEqual('LPVYLVSVRLGIYEDEHHRVWIVANVETSSHGNRRRTHVTVHLWKLIPQQVIPFNFL', sequence_entry.seq)
elif i == 6:
self.assertEqual(
'gi|27668591|ref|XP_234504.1|:(7-103) similar to Chain A, Crystal Structure Of Murine Tcl1',
sequence_entry.id)
self.assertEqual('-PDRLWLWEKHVYLDEFRRSWLPIVIKSNGKFQVIMRQKDVILGDSMTPSQLVPYEL', sequence_entry.seq)
elif i == 7:
self.assertEqual('gi|27668589|ref|XP_234503.1|:(9-91) similar to T-cell leukemia/lymphoma 1B, 5;',
sequence_entry.id)
self.assertEqual('-PHILTLRTHGIYEDEHHRLWVVLDLQASSFSNRLLIYLTVYLQESTPPSPMNLNGL', sequence_entry.seq)
elif i == 8:
self.assertEqual('gi|7305559|ref|NP_038802.1|:(8-102) T-cell leukemia/lymphoma 1B, 4 [Mus musculus]',
sequence_entry.id)
self.assertEqual('PPCFLVCTRDDIYEDEHGRQWVAAKVETSSHSPVHLWQMTTLFQEPSPDSLKTFNFL', sequence_entry.seq)
elif i == 9:
self.assertEqual('gi|7305555|ref|NP_038803.1|:(9-102) T-cell leukemia/lymphoma 1B, 2 [Mus musculus]',
sequence_entry.id)
self.assertEqual('---------PGFYEDEHHRLWMVAKLETCSHSPVHLWQMTRYPQEPAPYNPMNYNFL', sequence_entry.seq)
os.unlink(f_name)
def test_read_2(self):
msa = """>d1a1x__ b.63.1.1 (-) p13-MTCP1 {Human (Homo sapiens)}
PPDHLWVHQEGIYRDEYQRTWVAVVEEETSFLRARVQQIQVPLGDAARPSHLLTSQL
>gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]
HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL
>gi|7305557|ref|NP_038800.1|:(8-103) T-cell leukemia/lymphoma 1B, 3 [Mus musculus]
PPRFLVCTRDDIYEDENGRQWVVAKVETSRSpygsrietcITVHLQHMTTIPQEPTPQQPINNNSL
>gi|11415028|ref|NP_068801.1|:(2-106) T-cell lymphoma-1; T-cell lymphoma-1A [Homo sapiens]
HPDRLWAWEKFVYLDEKQHAWLPLTIEikDRLQLRVLLRREDVVLGRPMTPTQIGPSLL
>gi|7305561|ref|NP_038804.1|:(7-103) T-cell leukemia/lymphoma 1B, 5 [Mus musculus]
----------GIYEDEHHRVWIAVNVETSHSSHgnrietcvtVHLQHMTTLPQEPTPQQPINNNSL
>gi|7305553|ref|NP_038801.1|:(5-103) T-cell leukemia/lymphoma 1B, 1 [Mus musculus]
LPVYLVSVRLGIYEDEHHRVWIVANVETshSSHGNRRRTHVTVHLWKLIPQQVIPFNplnydFL
>gi|27668591|ref|XP_234504.1|:(7-103) similar to Chain A, Crystal Structure Of Murine Tcl1
-PDRLWLWEKHVYLDEFRRSWLPIVIKSNGKFQVIMRQKDVILGDSMTPSQLVPYEL
>gi|27668589|ref|XP_234503.1|:(9-91) similar to T-cell leukemia/lymphoma 1B, 5;
-PHILTLRTHGIYEDEHHRLWVVLDLQAShlSFSNRLLIYLTVYLQqgvafplESTPPSPMNLNGL
>gi|7305559|ref|NP_038802.1|:(8-102) T-cell leukemia/lymphoma 1B, 4 [Mus musculus]
PPCFLVCTRDDIYEDEHGRQWVAAKVETSSHSPycskietcvtVHLWQMTTLFQEPSPDSLKTFNFL
>gi|7305555|ref|NP_038803.1|:(9-102) T-cell leukemia/lymphoma 1B, 2 [Mus musculus]
---------PGFYEDEHHRLWMVAKLETCSHSPycnkietcvtVHLWQMTRYPQEPAPYNPMNYNFL
"""
f_name = create_tmp_f(content=msa)
parser = A3mParser()
with open(f_name, 'r') as f_in:
sequence_file = parser.read(f_in, remove_inserts=False) # <------------
for i, sequence_entry in enumerate(sequence_file):
if i == 0:
self.assertEqual('d1a1x__ b.63.1.1 (-) p13-MTCP1 {Human (Homo sapiens)}', sequence_entry.id)
self.assertEqual(
'PPDHLWVHQEGIYRDEYQRTWVAVVEE--E--T--SF---------LR----------ARVQQIQVPLG-------DAARPSHLLTS-----QL',
sequence_entry.seq)
elif i == 1:
self.assertEqual('gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]',
sequence_entry.id)
self.assertEqual(
'HPNRLWIWEKHVYLDEFRRSWLPVVIK--S--N--EK---------FQ----------VILRQEDVTLG-------EAMSPSQLVPY-----EL',
sequence_entry.seq)
elif i == 2:
self.assertEqual('gi|7305557|ref|NP_038800.1|:(8-103) T-cell leukemia/lymphoma 1B, 3 [Mus musculus]',
sequence_entry.id)
self.assertEqual(
'PPRFLVCTRDDIYEDENGRQWVVAKVE--T--S--RSpygsrietcIT----------VHLQHMTTIPQ-------EPTPQQPINNN-----SL',
sequence_entry.seq)
elif i == 3:
self.assertEqual(
'gi|11415028|ref|NP_068801.1|:(2-106) T-cell lymphoma-1; T-cell lymphoma-1A [Homo sapiens]',
sequence_entry.id)
self.assertEqual(
'HPDRLWAWEKFVYLDEKQHAWLPLTIEikD--R--LQ---------LR----------VLLRREDVVLG-------RPMTPTQIGPS-----LL',
sequence_entry.seq)
elif i == 4:
self.assertEqual('gi|7305561|ref|NP_038804.1|:(7-103) T-cell leukemia/lymphoma 1B, 5 [Mus musculus]',
sequence_entry.id)
self.assertEqual(
'----------GIYEDEHHRVWIAVNVE--T--S--HS---------SHgnrietcvt-VHLQHMTTLPQ-------EPTPQQPINNN-----SL',
sequence_entry.seq)
elif i == 5:
self.assertEqual('gi|7305553|ref|NP_038801.1|:(5-103) T-cell leukemia/lymphoma 1B, 1 [Mus musculus]',
sequence_entry.id)
self.assertEqual(
'LPVYLVSVRLGIYEDEHHRVWIVANVE--TshS--SH---------GN----------RRRTHVTVHLW-------KLIPQQVIPFNplnydFL',
sequence_entry.seq)
elif i == 6:
self.assertEqual(
'gi|27668591|ref|XP_234504.1|:(7-103) similar to Chain A, Crystal Structure Of Murine Tcl1',
sequence_entry.id)
self.assertEqual(
'-PDRLWLWEKHVYLDEFRRSWLPIVIK--S--N--GK---------FQ----------VIMRQKDVILG-------DSMTPSQLVPY-----EL',
sequence_entry.seq)
elif i == 7:
self.assertEqual('gi|27668589|ref|XP_234503.1|:(9-91) similar to T-cell leukemia/lymphoma 1B, 5;',
sequence_entry.id)
self.assertEqual(
'-PHILTLRTHGIYEDEHHRLWVVLDLQ--A--ShlSF---------SN----------RLLIYLTVYLQqgvafplESTPPSPMNLN-----GL',
sequence_entry.seq)
elif i == 8:
self.assertEqual('gi|7305559|ref|NP_038802.1|:(8-102) T-cell leukemia/lymphoma 1B, 4 [Mus musculus]',
sequence_entry.id)
self.assertEqual(
'PPCFLVCTRDDIYEDEHGRQWVAAKVE--T--S--SH---------SPycskietcvtVHLWQMTTLFQ-------EPSPDSLKTFN-----FL',
sequence_entry.seq)
elif i == 9:
self.assertEqual('gi|7305555|ref|NP_038803.1|:(9-102) T-cell leukemia/lymphoma 1B, 2 [Mus musculus]',
sequence_entry.id)
self.assertEqual(
'---------PGFYEDEHHRLWMVAKLE--T--C--SH---------SPycnkietcvtVHLWQMTRYPQ-------EPAPYNPMNYN-----FL',
sequence_entry.seq)
os.unlink(f_name)
def test_read_3(self):
msa = """>d1a1x__ b.63.1.1 (-) p13-MTCP1 {Human (Homo sapiens)}
PPDHLWVHQEGIYRDEYQRTWVAVVEEETSFLRARVQQIQVPLGDAARPSHLLTSQL
>gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]
HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL
>gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]
HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL
>gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]
HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL
"""
f_name = create_tmp_f(content=msa)
parser = A3mParser()
with open(f_name, 'r') as f_in:
sequence_file = parser.read(f_in, remove_inserts=False) # <------------
for i, sequence_entry in enumerate(sequence_file):
if i == 0:
self.assertEqual('d1a1x__ b.63.1.1 (-) p13-MTCP1 {Human (Homo sapiens)}', sequence_entry.id)
self.assertEqual('PPDHLWVHQEGIYRDEYQRTWVAVVEEETSFLRARVQQIQVPLGDAARPSHLLTSQL', sequence_entry.seq)
elif i == 1:
self.assertEqual('gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]',
sequence_entry.id[:80])
self.assertEqual(79, len(sequence_entry.id))
self.assertEqual('HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL', sequence_entry.seq)
elif i == 1:
self.assertEqual('gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]',
sequence_entry.id[:80])
self.assertGreater(79, len(sequence_entry.id))
self.assertEqual('HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL', sequence_entry.seq)
elif i == 1:
self.assertEqual('gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]',
sequence_entry.id[:80])
self.assertGreater(79, len(sequence_entry.id))
self.assertEqual('HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL', sequence_entry.seq)
os.unlink(f_name)
def test_write_1(self):
msa = [
">d1a1x__ b.63.1.1 (-) p13-MTCP1 {Human (Homo sapiens)}",
"PPDHLWVHQEGIYRDEYQRTWVAVVEEETSFLRARVQQIQVPLGDAARPSHLLTSQL",
">gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]",
"HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL",
">gi|7305557|ref|NP_038800.1|:(8-103) T-cell leukemia/lymphoma 1B, 3 [Mus musculus]",
"PPRFLVCTRDDIYEDENGRQWVVAKVETSRSpygsrietcITVHLQHMTTIPQEPTPQQPINNNSL",
">gi|11415028|ref|NP_068801.1|:(2-106) T-cell lymphoma-1; T-cell lymphoma-1A [Homo sapiens]",
"HPDRLWAWEKFVYLDEKQHAWLPLTIEikDRLQLRVLLRREDVVLGRPMTPTQIGPSLL",
">gi|7305561|ref|NP_038804.1|:(7-103) T-cell leukemia/lymphoma 1B, 5 [Mus musculus]",
"----------GIYEDEHHRVWIAVNVETSHSSHgnrietcvtVHLQHMTTLPQEPTPQQPINNNSL",
">gi|7305553|ref|NP_038801.1|:(5-103) T-cell leukemia/lymphoma 1B, 1 [Mus musculus]",
"LPVYLVSVRLGIYEDEHHRVWIVANVETshSSHGNRRRTHVTVHLWKLIPQQVIPFNplnydFL",
">gi|27668591|ref|XP_234504.1|:(7-103) similar to Chain A, Crystal Structure Of Murine Tcl1",
"-PDRLWLWEKHVYLDEFRRSWLPIVIKSNGKFQVIMRQKDVILGDSMTPSQLVPYEL",
">gi|27668589|ref|XP_234503.1|:(9-91) similar to T-cell leukemia/lymphoma 1B, 5;",
"-PHILTLRTHGIYEDEHHRLWVVLDLQAShlSFSNRLLIYLTVYLQqgvafplESTPPSPMNLNGL",
">gi|7305559|ref|NP_038802.1|:(8-102) T-cell leukemia/lymphoma 1B, 4 [Mus musculus]",
"PPCFLVCTRDDIYEDEHGRQWVAAKVETSSHSPycskietcvtVHLWQMTTLFQEPSPDSLKTFNFL",
">gi|7305555|ref|NP_038803.1|:(9-102) T-cell leukemia/lymphoma 1B, 2 [Mus musculus]",
"---------PGFYEDEHHRLWMVAKLETCSHSPycnkietcvtVHLWQMTRYPQEPAPYNPMNYNFL",
]
f_name_in = create_tmp_f(content='\n'.join(msa))
f_name_out = create_tmp_f()
parser = A3mParser()
with open(f_name_in, 'r') as f_in, open(f_name_out, 'w') as f_out:
sequence_file = parser.read(f_in, remove_inserts=True)
parser.write(f_out, sequence_file)
ref = [
">d1a1x__ b.63.1.1 (-) p13-MTCP1 {Human (Homo sapiens)}",
"PPDHLWVHQEGIYRDEYQRTWVAVVEEETSFLRARVQQIQVPLGDAARPSHLLTSQL",
">gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]",
"HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL",
">gi|7305557|ref|NP_038800.1|:(8-103) T-cell leukemia/lymphoma 1B, 3 [Mus musculus]",
"PPRFLVCTRDDIYEDENGRQWVVAKVETSRSITVHLQHMTTIPQEPTPQQPINNNSL",
">gi|11415028|ref|NP_068801.1|:(2-106) T-cell lymphoma-1; T-cell lymphoma-1A [Homo sapiens]",
"HPDRLWAWEKFVYLDEKQHAWLPLTIEDRLQLRVLLRREDVVLGRPMTPTQIGPSLL",
">gi|7305561|ref|NP_038804.1|:(7-103) T-cell leukemia/lymphoma 1B, 5 [Mus musculus]",
"----------GIYEDEHHRVWIAVNVETSHSSHVHLQHMTTLPQEPTPQQPINNNSL",
">gi|7305553|ref|NP_038801.1|:(5-103) T-cell leukemia/lymphoma 1B, 1 [Mus musculus]",
"LPVYLVSVRLGIYEDEHHRVWIVANVETSSHGNRRRTHVTVHLWKLIPQQVIPFNFL",
">gi|27668591|ref|XP_234504.1|:(7-103) similar to Chain A, Crystal Structure Of Murine Tcl1",
"-PDRLWLWEKHVYLDEFRRSWLPIVIKSNGKFQVIMRQKDVILGDSMTPSQLVPYEL",
">gi|27668589|ref|XP_234503.1|:(9-91) similar to T-cell leukemia/lymphoma 1B, 5;",
"-PHILTLRTHGIYEDEHHRLWVVLDLQASSFSNRLLIYLTVYLQESTPPSPMNLNGL",
">gi|7305559|ref|NP_038802.1|:(8-102) T-cell leukemia/lymphoma 1B, 4 [Mus musculus]",
"PPCFLVCTRDDIYEDEHGRQWVAAKVETSSHSPVHLWQMTTLFQEPSPDSLKTFNFL",
">gi|7305555|ref|NP_038803.1|:(9-102) T-cell leukemia/lymphoma 1B, 2 [Mus musculus]",
"---------PGFYEDEHHRLWMVAKLETCSHSPVHLWQMTRYPQEPAPYNPMNYNFL",
]
with open(f_name_out, 'r') as f_in:
output = f_in.read().splitlines()
self.assertEqual(ref, output)
map(os.unlink, [f_name_in, f_name_out])
def test_write_2(self):
msa = [
">d1a1x__ b.63.1.1 (-) p13-MTCP1 {Human (Homo sapiens)}",
"PPDHLWVHQEGIYRDEYQRTWVAVVEEETSFLRARVQQIQVPLGDAARPSHLLTSQL",
">gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]",
"HPNRLWIWEKHVYLDEFRRSWLPVVIKSNEKFQVILRQEDVTLGEAMSPSQLVPYEL",
">gi|7305557|ref|NP_038800.1|:(8-103) T-cell leukemia/lymphoma 1B, 3 [Mus musculus]",
"PPRFLVCTRDDIYEDENGRQWVVAKVETSRSpygsrietcITVHLQHMTTIPQEPTPQQPINNNSL",
">gi|11415028|ref|NP_068801.1|:(2-106) T-cell lymphoma-1; T-cell lymphoma-1A [Homo sapiens]",
"HPDRLWAWEKFVYLDEKQHAWLPLTIEikDRLQLRVLLRREDVVLGRPMTPTQIGPSLL",
">gi|7305561|ref|NP_038804.1|:(7-103) T-cell leukemia/lymphoma 1B, 5 [Mus musculus]",
"----------GIYEDEHHRVWIAVNVETSHSSHgnrietcvtVHLQHMTTLPQEPTPQQPINNNSL",
">gi|7305553|ref|NP_038801.1|:(5-103) T-cell leukemia/lymphoma 1B, 1 [Mus musculus]",
"LPVYLVSVRLGIYEDEHHRVWIVANVETshSSHGNRRRTHVTVHLWKLIPQQVIPFNplnydFL",
">gi|27668591|ref|XP_234504.1|:(7-103) similar to Chain A, Crystal Structure Of Murine Tcl1",
"-PDRLWLWEKHVYLDEFRRSWLPIVIKSNGKFQVIMRQKDVILGDSMTPSQLVPYEL",
">gi|27668589|ref|XP_234503.1|:(9-91) similar to T-cell leukemia/lymphoma 1B, 5;",
"-PHILTLRTHGIYEDEHHRLWVVLDLQAShlSFSNRLLIYLTVYLQqgvafplESTPPSPMNLNGL",
">gi|7305559|ref|NP_038802.1|:(8-102) T-cell leukemia/lymphoma 1B, 4 [Mus musculus]",
"PPCFLVCTRDDIYEDEHGRQWVAAKVETSSHSPycskietcvtVHLWQMTTLFQEPSPDSLKTFNFL",
">gi|7305555|ref|NP_038803.1|:(9-102) T-cell leukemia/lymphoma 1B, 2 [Mus musculus]",
"---------PGFYEDEHHRLWMVAKLETCSHSPycnkietcvtVHLWQMTRYPQEPAPYNPMNYNFL",
]
f_name_in = create_tmp_f(content='\n'.join(msa))
f_name_out = create_tmp_f()
parser = A3mParser()
with open(f_name_in, 'r') as f_in, open(f_name_out, 'w') as f_out:
sequence_file = parser.read(f_in, remove_inserts=False)
parser.write(f_out, sequence_file)
ref = [
">d1a1x__ b.63.1.1 (-) p13-MTCP1 {Human (Homo sapiens)}",
"PPDHLWVHQEGIYRDEYQRTWVAVVEE--E--T--SF---------LR----------ARVQQIQVPLG-------DAARPSHLLTS-----QL",
">gi|6678257|ref|NP_033363.1|:(7-103) T-cell lymphoma breakpoint 1 [Mus musculus]",
"HPNRLWIWEKHVYLDEFRRSWLPVVIK--S--N--EK---------FQ----------VILRQEDVTLG-------EAMSPSQLVPY-----EL",
">gi|7305557|ref|NP_038800.1|:(8-103) T-cell leukemia/lymphoma 1B, 3 [Mus musculus]",
"PPRFLVCTRDDIYEDENGRQWVVAKVE--T--S--RSpygsrietcIT----------VHLQHMTTIPQ-------EPTPQQPINNN-----SL",
">gi|11415028|ref|NP_068801.1|:(2-106) T-cell lymphoma-1; T-cell lymphoma-1A [Homo sapiens]",
"HPDRLWAWEKFVYLDEKQHAWLPLTIEikD--R--LQ---------LR----------VLLRREDVVLG-------RPMTPTQIGPS-----LL",
">gi|7305561|ref|NP_038804.1|:(7-103) T-cell leukemia/lymphoma 1B, 5 [Mus musculus]",
"----------GIYEDEHHRVWIAVNVE--T--S--HS---------SHgnrietcvt-VHLQHMTTLPQ-------EPTPQQPINNN-----SL",
">gi|7305553|ref|NP_038801.1|:(5-103) T-cell leukemia/lymphoma 1B, 1 [Mus musculus]",
"LPVYLVSVRLGIYEDEHHRVWIVANVE--TshS--SH---------GN----------RRRTHVTVHLW-------KLIPQQVIPFNplnydFL",
">gi|27668591|ref|XP_234504.1|:(7-103) similar to Chain A, Crystal Structure Of Murine Tcl1",
"-PDRLWLWEKHVYLDEFRRSWLPIVIK--S--N--GK---------FQ----------VIMRQKDVILG-------DSMTPSQLVPY-----EL",
">gi|27668589|ref|XP_234503.1|:(9-91) similar to T-cell leukemia/lymphoma 1B, 5;",
"-PHILTLRTHGIYEDEHHRLWVVLDLQ--A--ShlSF---------SN----------RLLIYLTVYLQqgvafplESTPPSPMNLN-----GL",
">gi|7305559|ref|NP_038802.1|:(8-102) T-cell leukemia/lymphoma 1B, 4 [Mus musculus]",
"PPCFLVCTRDDIYEDEHGRQWVAAKVE--T--S--SH---------SPycskietcvtVHLWQMTTLFQ-------EPSPDSLKTFN-----FL",
">gi|7305555|ref|NP_038803.1|:(9-102) T-cell leukemia/lymphoma 1B, 2 [Mus musculus]",
"---------PGFYEDEHHRLWMVAKLE--T--C--SH---------SPycnkietcvtVHLWQMTRYPQ-------EPAPYNPMNYN-----FL",
]
with open(f_name_out, 'r') as f_in:
output = f_in.read().splitlines()
self.assertEqual(ref, output)
map(os.unlink, [f_name_in, f_name_out])
if __name__ == "__main__":
unittest.main(verbosity=2)
| fsimkovic/conkit | conkit/io/tests/test_a3m.py | Python | bsd-3-clause | 21,377 |
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for arista acl rendering module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
from lib import arista
from lib import nacaddr
from lib import naming
from lib import policy
import mock
GOOD_HEADER = """
header {
comment:: "this is a test extended acl"
target:: arista test-filter extended
}
"""
GOOD_HEADER_2 = """
header {
comment:: "this is a test acl"
target:: arista test-filter
}
"""
GOOD_TERM = """
term good-term {
protocol:: tcp
option:: tcp-established
action:: accept
}
"""
GOOD_TERM_1 = """
term good-term-1 {
protocol:: tcp
option:: tcp-established
policer:: batman
action:: accept
}
"""
GOOD_TERM_2 = """
term good-term-2 {
source-address:: SOME_HOST
destination-port:: SSH
protocol:: tcp
action:: accept
}
"""
GOOD_TERM_3 = """
term good-term-3 {
source-address:: SOME_HOST2
destination-port:: GOPENFLOW
protocol:: tcp
action:: accept
}
"""
SUPPORTED_TOKENS = {
'action',
'address',
'comment',
'destination_address',
'destination_address_exclude',
'destination_port',
'dscp_match',
'expiration',
'icmp_code',
'icmp_type',
'stateless_reply',
'logging',
'name',
'option',
'owner',
'platform',
'platform_exclude',
'protocol',
'source_address',
'source_address_exclude',
'source_port',
'translated',
'verbatim',
}
SUPPORTED_SUB_TOKENS = {
'action': {'accept', 'deny', 'reject', 'next',
'reject-with-tcp-rst'},
'icmp_type': {
'alternate-address',
'certification-path-advertisement',
'certification-path-solicitation',
'conversion-error',
'destination-unreachable',
'echo-reply',
'echo-request',
'mobile-redirect',
'home-agent-address-discovery-reply',
'home-agent-address-discovery-request',
'icmp-node-information-query',
'icmp-node-information-response',
'information-request',
'inverse-neighbor-discovery-advertisement',
'inverse-neighbor-discovery-solicitation',
'mask-reply',
'mask-request',
'information-reply',
'mobile-prefix-advertisement',
'mobile-prefix-solicitation',
'multicast-listener-done',
'multicast-listener-query',
'multicast-listener-report',
'multicast-router-advertisement',
'multicast-router-solicitation',
'multicast-router-termination',
'neighbor-advertisement',
'neighbor-solicit',
'packet-too-big',
'parameter-problem',
'redirect',
'redirect-message',
'router-advertisement',
'router-renumbering',
'router-solicit',
'router-solicitation',
'source-quench',
'time-exceeded',
'timestamp-reply',
'timestamp-request',
'unreachable',
'version-2-multicast-listener-report',
},
'option': {'established',
'tcp-established'}
}
# Print a info message when a term is set to expire in that many weeks.
# This is normally passed from command line.
EXP_INFO = 2
class AristaTest(unittest.TestCase):
def setUp(self):
self.naming = mock.create_autospec(naming.Naming)
def testExtendedEosSyntax(self):
# Extended access-lists should not use the "extended" argument to ip
# access-list.
acl = arista.Arista(
policy.ParsePolicy(GOOD_HEADER + GOOD_TERM, self.naming), EXP_INFO)
self.assertTrue('ip access-list test-filter' in str(acl))
def testBuildTokens(self):
pol1 = arista.Arista(policy.ParsePolicy(GOOD_HEADER + GOOD_TERM,
self.naming), EXP_INFO)
st, sst = pol1._BuildTokens()
self.assertEquals(st, SUPPORTED_TOKENS)
self.assertEquals(sst, SUPPORTED_SUB_TOKENS)
def testBuildWarningTokens(self):
pol1 = arista.Arista(policy.ParsePolicy(GOOD_HEADER + GOOD_TERM_1,
self.naming), EXP_INFO)
st, sst = pol1._BuildTokens()
self.assertEquals(st, SUPPORTED_TOKENS)
self.assertEquals(sst, SUPPORTED_SUB_TOKENS)
def testStandardTermHost(self):
self.naming.GetNetAddr.return_value = [nacaddr.IP('10.1.1.0/24')]
self.naming.GetServiceByProto.return_value = ['22', '6537']
pol = policy.ParsePolicy(GOOD_HEADER_2 + GOOD_TERM_2 + GOOD_TERM_3,
self.naming)
acl = arista.Arista(pol, EXP_INFO)
expected = 'ip access-list test-filter'
self.failUnless(expected in str(acl), '[%s]' % str(acl))
expected = ' permit tcp 10.1.1.0/24 any eq ssh'
self.failUnless(expected in str(acl), str(acl))
expected = ' permit tcp 10.1.1.0/24 any eq 6537'
self.failUnless(expected in str(acl), str(acl))
self.naming.GetNetAddr.assert_has_calls([mock.call('SOME_HOST'),
mock.call('SOME_HOST2')])
self.naming.GetServiceByProto.assert_has_calls(
[mock.call('SSH', 'tcp'), mock.call('GOPENFLOW', 'tcp')])
if __name__ == '__main__':
unittest.main()
| rarcotvmw/capirca | tests/lib/arista_test.py | Python | apache-2.0 | 5,777 |
result = {}
for i in range(int(input())):
x = int(input())
if x not in result.keys():
result[x] = f(x)
print(result[x])
| TheNovel/stepik-programming-on-python | week3/3.02 dict/step07 func.py | Python | mit | 140 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the USBStor Windows Registry plugin."""
import unittest
from plaso.lib import definitions
from plaso.parsers.winreg_plugins import usbstor
from tests.parsers.winreg_plugins import test_lib
class USBStorPlugin(test_lib.RegistryPluginTestCase):
"""Tests for the USBStor Windows Registry plugin."""
def testFilters(self):
"""Tests the FILTERS class attribute."""
plugin = usbstor.USBStorPlugin()
key_path = 'HKEY_LOCAL_MACHINE\\System\\ControlSet001\\Enum\\USBSTOR'
self._AssertFiltersOnKeyPath(plugin, key_path)
self._AssertNotFiltersOnKeyPath(plugin, 'HKEY_LOCAL_MACHINE\\Bogus')
def testProcess(self):
"""Tests the Process function."""
test_file_entry = self._GetTestFileEntry(['SYSTEM'])
key_path = 'HKEY_LOCAL_MACHINE\\System\\ControlSet001\\Enum\\USBSTOR'
win_registry = self._GetWinRegistryFromFileEntry(test_file_entry)
registry_key = win_registry.GetKeyByPath(key_path)
plugin = usbstor.USBStorPlugin()
storage_writer = self._ParseKeyWithPlugin(
registry_key, plugin, file_entry=test_file_entry)
self.assertEqual(storage_writer.number_of_events, 5)
self.assertEqual(storage_writer.number_of_extraction_warnings, 0)
self.assertEqual(storage_writer.number_of_recovery_warnings, 0)
events = list(storage_writer.GetEvents())
expected_event_values = {
'date_time': '2012-04-07 10:31:37.6408714',
'data_type': 'windows:registry:usbstor',
'device_type': 'Disk',
'display_name': 'HP v100w USB Device',
'key_path': key_path,
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
'parser': plugin.plugin_name,
'product': 'Prod_v100w',
'revision': 'Rev_1024',
'serial': 'AA951D0000007252&0',
'subkey_name': 'Disk&Ven_HP&Prod_v100w&Rev_1024',
'timestamp_desc': definitions.TIME_DESCRIPTION_WRITTEN,
'vendor': 'Ven_HP'}
self.CheckEventValues(storage_writer, events[0], expected_event_values)
if __name__ == '__main__':
unittest.main()
| kiddinn/plaso | tests/parsers/winreg_plugins/usbstor.py | Python | apache-2.0 | 2,151 |
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
"""
-------
main.py
-------
Main methods (views + routes) implemented in the API.
.. moduleauthor:: Fabio Madeira
:module_version: 1.0
:created_on: 28-02-2015
"""
import webapp2
import logging
import os
import jinja2
import urllib
from tools import *
from google.appengine.ext.webapp import template
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader('templates'),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
default_search = '"PLoS One"[jour]'
default_feeds = 10
default_rssguid= "1h9kEWSfxImUd3q0TuDX7eLhEJoM4-k3pB8scCPrUmcSn3lkLl"
class MainPage(webapp2.RequestHandler):
def get(self, search_output="", rssguid_output="", twitter_output=""):
"""Renders a simple api doc with the implemented methods."""
# template = JINJA_ENVIRONMENT.get_template('api.html')
template_values = {}
template_values['baseurl'] = ""
template_values['default_search'] = default_search
template_values['default_feeds'] = str(default_feeds)
template_values['default_rssguid'] = default_rssguid
template_values['search_output'] = search_output
template_values['rssguid_output'] = rssguid_output
template_values['twitter_output'] = twitter_output
path = os.path.join(os.path.dirname(__file__), 'api.html')
self.response.write(template.render(path, template_values))
class Search(webapp2.RequestHandler):
def post(self):
search = self.request.get("search", default_search)
search = urllib.quote_plus(search)
return webapp2.redirect('/search/pubmed/string=%s' % search)
class Rss(webapp2.RequestHandler):
def post(self):
search = self.request.get("search", default_search)
feeds = self.request.get("feeds", default_feeds)
return webapp2.redirect('/rss/pubmed/string=%s&feeds=%s' % (search, feeds))
class Twitter(webapp2.RequestHandler):
def post(self):
rssguid = self.request.get("rssguid", default_rssguid)
return webapp2.redirect('/twitter_bot&rss_guid=%s' % (rssguid))
class SearchPubmed(webapp2.RequestHandler):
def get(self, string):
"""Return output from Pubmed - based on eutils API."""
if string:
return webapp2.redirect('/search_output=%s' % string)
else:
self.abort(500)
class RssPubmed(webapp2.RequestHandler):
"""Generate a rss feed from Pubmed - based on the main page search."""
def get(self, string, feeds=50):
if string:
rss_guid = generate_rss_from_pubmed(string, feeds=feeds)
return webapp2.redirect('/rssguid_output=%s' % rss_guid)
else:
self.abort(500)
class RssBot(webapp2.RequestHandler):
"""
Consumes a feed and checks if there are new entries in db.
If so, gets a shortened url and tweets the new status.
"""
def get(self, rss_guid=None):
try:
tweets = twitter_bot(rss_guid=rss_guid)
# template = JINJA_ENVIRONMENT.get_template('papers.html')
template_values = {}
template_values['baseurl'] = ""
template_values['twitter_output'] = tweets
path = os.path.join(os.path.dirname(__file__), 'papers.html')
self.response.write(template.render(path, template_values))
except:
self.abort(500)
def handle_404(request, response, exception):
logging.exception(exception)
response.write('Sorry, nothing at this URL!')
response.set_status(404)
def handle_500(request, response, exception):
logging.exception(exception)
response.write('A server error occurred!')
response.set_status(500)
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
app = webapp2.WSGIApplication(routes=[
webapp2.Route(r'/', handler='main.MainPage', name='home'),
webapp2.Route(r'/search_output=<search_output:[^/]+>', handler='main.MainPage', name='search_output'),
webapp2.Route(r'/rssguid_output=<rssguid_output:[^/]+>', handler='main.MainPage', name='rssguid_output'),
webapp2.Route(r'/search', handler='main.Search'),
webapp2.Route(r'/rss', handler='main.Rss'),
webapp2.Route(r'/twitter', handler='main.Twitter'),
webapp2.Route(r'/search/pubmed/string=<string:[^/]+>', handler='main.SearchPubmed', name='string'),
webapp2.Route(r'/search/pubmed/<string:[^/]+>', handler='main.SearchPubmed', name='string'),
webapp2.Route(r'/rss/pubmed/string=<string:[^/]+>&feeds=<feeds:[^/]+>', handler='main.RssPubmed', name='string'),
webapp2.Route(r'/rss/pubmed/<string:[^/]+>&<feeds:[^/]+>', handler='main.RssPubmed', name='string'),
webapp2.Route(r'/rss/pubmed/string=<string:[^/]+>', handler='main.RssPubmed', name='string'),
webapp2.Route(r'/rss/pubmed/<string:[^/]+>', handler='main.RssPubmed', name='string'),
webapp2.Route(r'/twitter_bot&rss_guid=<rss_guid:[^/]+>', handler='main.RssBot', name='rss_guid'),
webapp2.Route(r'/twitter_bot&<rss_guid:[^/]+>', handler='main.RssBot', name='rss_guid'),
webapp2.Route(r'/twitter_bot', handler='main.RssBot', name='rss_guid'),
], debug=debug)
app.error_handlers[404] = handle_404
app.error_handlers[500] = handle_500
| biomadeira/gae_pubmed2rss | main.py | Python | mit | 5,320 |
# Need to import the plotting package:
import matplotlib.pyplot as plt
from pylab import *
import numpy as np
# Read the file.
f2 = open('data.dat', 'r')
# read the whole file into a single variable,
# which is a list of every row of the file.
lines = f2.readlines()
f2.close()
# initialize some variable to be lists:
x1 = []
y1 = []
# scan the rows of the file stored in lines,
# and put the values into some variables:
for line in lines:
p = line.split(',')
x1.append(float(p[0]))
y1.append(float(p[1]))
xv = np.array(x1)
yv = np.array(y1)
# now, plot the data:
plt.plot(xv, yv)
# Set up
xlabel('Iterations')
ylabel('Value')
title('GA behaivor')
grid(True)
plt.show()
| VictorRodriguez/personal | GP_tutorial/homework3/file_plot.py | Python | apache-2.0 | 697 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code for backpropagation using the tape utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import operator
import sys
import six
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.eager import context
from tensorflow.python.eager import execute
from tensorflow.python.eager import imperative_grad
from tensorflow.python.eager import tape
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops.unconnected_gradients import UnconnectedGradients
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util import tf_inspect
from tensorflow.python.util.lazy_loader import LazyLoader
from tensorflow.python.util.tf_export import tf_export
# Note that we need to lazy load the following two modules to avoid creating
# circular dependencies.
# TODO(b/119775953): fix the circular dependencies.
pfor_ops = LazyLoader(
"pfor_ops", globals(),
"tensorflow.python.ops.parallel_for.control_flow_ops")
function = LazyLoader("function", globals(),
"tensorflow.python.eager.function")
_op_attr_type_cache = {}
def op_attr_type(op_type, attr_name):
try:
return _op_attr_type_cache[(op_type, attr_name)]
except KeyError:
h = context.context()._handle # pylint: disable=protected-access
attr_type = pywrap_tensorflow.TFE_OpNameGetAttrType(h, op_type, attr_name)
_op_attr_type_cache[(op_type, attr_name)] = attr_type
return attr_type
def make_attr(attr_type, value):
if attr_type == pywrap_tensorflow.TF_ATTR_TYPE:
return dtypes.as_dtype(value)
elif attr_type == [pywrap_tensorflow.TF_ATTR_TYPE]:
return [dtypes.as_dtype(v) for v in value]
elif attr_type == pywrap_tensorflow.TF_ATTR_SHAPE:
return tensor_shape.as_shape(value).as_proto()
elif attr_type == [pywrap_tensorflow.TF_ATTR_SHAPE]:
return [tensor_shape.as_shape(v).as_proto() for v in value]
return value
class _MockOp(object):
"""Pretends to be a tf.Operation for the gradient functions."""
def __init__(self, attrs, inputs, outputs, typ):
self.attrs = attrs
self.inputs = inputs
self.outputs = outputs
self.type = typ
def get_attr(self, attr):
typ = op_attr_type(self.type, attr)
for i in range(0, len(self.attrs), 2):
if self.attrs[i] == attr:
return make_attr(typ, self.attrs[i + 1])
raise KeyError(attr)
def _get_control_flow_context(self):
raise NotImplementedError(
"tf.GradientTape.gradients() does not support graph control flow "
"operations like tf.cond or tf.while at this time. Use tf.gradients() "
"instead. If you need this feature, please file a feature request at "
"https://github.com/tensorflow/tensorflow/issues/new"
)
def _gradient_function(op_name, attr_tuple, num_inputs, inputs, outputs,
out_grads):
"""Calls the gradient function of the op.
Args:
op_name: the name of the op to be differentiated.
attr_tuple: the attrs, as a tuple.
num_inputs: the number of inputs to the op.
inputs: inputs to the original operation.
outputs: outputs to the original operation.
out_grads: gradients of the operation wrt its outputs.
Returns:
The gradients with respect to the inputs of the function, as a list.
"""
mock_op = _MockOp(attr_tuple, inputs, outputs, op_name)
grad_fn = ops._gradient_registry.lookup(op_name) # pylint: disable=protected-access
if grad_fn is None:
return [None] * num_inputs
return grad_fn(mock_op, *out_grads)
pywrap_tensorflow.TFE_Py_RegisterGradientFunction(_gradient_function)
def _record_gradient(op_name, inputs, attrs, results, name):
return pywrap_tensorflow.TFE_Py_RecordGradient(op_name, inputs, attrs,
results, name)
execute.record_gradient = _record_gradient
def implicit_val_and_grad(f):
"""Returns a function which differentiates f with respect to variables.
The wrapped function returns the value and the gradient of f when called with
the same arguments. The gradient is with respect to all trainable TFE
variables accessed by `f`.
This function is useful when the exact set of variables to differentiate with
is not known ahead of time.
Example:
```python
dense_layer = tf.layers.Dense(1)
def loss(x, y):
return tf.reduce_sum(tf.square(dense_layer(x) - y))
# Obtain the gradient function.
val_grad_fn = tfe.implicit_value_and_gradients(loss)
# Invoke the gradient function with concrete values of x and y.
x = tf.constant([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
y = tf.constant([[10.0], [20.0]])
value, grads_and_vars = val_grad_fn(x, y)
print('Value of loss: %s' % value)
# Apply the gradients to Variables.
optimizer = tf.train.GradientDescentOptimizer(0.1)
optimizer.apply_gradients(grads_and_vars)
```
Args:
f: function to be differentiated. If `f` returns a scalar, this scalar will
be differentiated. If `f` returns a tensor or list of tensors, by default
a scalar will be computed by adding all their values to produce a single
scalar.
Returns:
A function which, when called, returns a tuple pair.
Its first element is the value to which the function evaluates.
Its second element is list of (gradient, variable) pairs.
Raises:
ValueError: if `f` returns None.
"""
# TODO(cais): Remove calls to tf.constant() once the gradients functions
# accept lists and np.ndarrays.
def grad_fn(*args, **kwds):
"""Computes the gradient of the wrapped function."""
this_tape = tape.push_new_tape()
try:
end_node = f(*args, **kwds)
if end_node is None:
raise ValueError("Cannot differentiate a function that returns None; "
"did you forget to return a value from {}?".format(
f.__name__))
finally:
tape.pop_tape(this_tape)
# Note: variables are returned in construction order. This ensures unique
# order across executions.
variables = this_tape.watched_variables()
if not variables:
raise ValueError("No trainable variables were accessed while the "
"function was being computed.")
sources = [v.handle for v in variables]
grad = imperative_grad.imperative_grad(this_tape, nest.flatten(end_node),
sources)
return end_node, list(zip(grad, variables))
return grad_fn
def implicit_grad(f):
"""Returns a function which differentiates f with respect to variables.
The wrapped function returns the gradient of f when called with the same
arguments. The gradient is with respect to all trainable TFE variables
accessed by `f`.
This function is useful when the exact set of variables to differentiate with
is not known ahead of time.
Example:
```python
dense_layer = tf.layers.Dense(1)
def loss(x, y):
return tf.reduce_sum(tf.square(dense_layer(x) - y))
# Obtain the gradient function.
grad_fn = tfe.implicit_gradients(loss)
# Invoke the gradient function with concrete values of x and y.
x = tf.constant([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
y = tf.constant([[10.0], [20.0]])
grads_and_vars = grad_fn(x, y)
# Apply the gradients to Variables.
optimizer = tf.train.GradientDescentOptimizer(0.1)
optimizer.apply_gradients(grads_and_vars)
```
Args:
f: function to be differentiated. If `f` returns a scalar, this scalar will
be differentiated. If `f` returns a tensor or list of tensors, by default
a scalar will be computed by adding all their values to produce a single
scalar.
Returns:
A function which, when called, returns a list of (gradient, variable) pairs.
"""
# TODO(cais): Remove calls to tf.constant() once the gradients functions
# accept lists and np.ndarrays.
def grad_fn(*args, **kwds):
"""Computes the gradient of the wrapped function."""
return implicit_val_and_grad(f)(*args, **kwds)[1]
return grad_fn
def _get_arg_spec(f, params, param_args):
"""The positions of the parameters of f to be differentiated in param_args."""
try:
args = tf_inspect.getfullargspec(f).args
except TypeError as e:
# TypeError can happen when f is a callable object.
if params is None:
return range(len(param_args))
elif all(isinstance(x, int) for x in params):
return params
raise ValueError("Either callable provided is not a function or could not "
"inspect its arguments by name: %s. Original error: %s"
% (f, e))
if params is None:
if not args:
return range(len(param_args))
return range(len(args))
elif all(isinstance(x, six.string_types) for x in params):
return [args.index(n) for n in params]
elif all(isinstance(x, int) for x in params):
return params
else:
raise ValueError(
"params must be all strings or all integers; got %s." % params)
def gradients_function(f, params=None):
"""Returns a function which differentiates f with respect to params.
Example:
```python
# f(x, y) = (x ^ 3) * y - x * (y ^ 2)
# Therefore, the 1st order derivatives are:
# df / dx = 3 * (x ^ 2) * y - y ^ 2
# df / dy = x ^ 3 - 2 * x * y
# The 2nd order derivatives with respect to x is:
# d^2 f / (dx)^2 = 6 * x * y
def f(x, y):
return x * x * x * y - x * y * y
# Obtain a function that returns 1st order gradients.
grad_fn = tfe.gradients_function(f)
x = 2.0
y = 3.0
# Invoke the 1st order gradient function.
x_grad, y_grad = grad_fn(x, y)
assert x_grad.numpy() == 3 * (2 ** 2) * 3 - 3 ** 2
assert y_grad.numpy() == (2 ** 3) - 2 * 2 * 3
# Obtain a function that returns the 2nd order gradient with respect to x.
gradgrad_fn = tfe.gradients_function(lambda x, y: grad_fn(x, y)[0])
# Invoke the 2nd order gradient function.
x_gradgrad = gradgrad_fn(x, y)[0]
assert x_gradgrad.numpy() == 6 * 2 * 3
# To obtain a callable that returns the gradient(s) of `f` with respect to a
# subset of its inputs, use the `params` keyword argument with
# `gradients_function()`.
ygrad_fn = tfe.gradients_function(f, params=[1])
(y_grad,) = ygrad_fn(x, y)
assert y_grad.numpy() == (2 ** 3) - 2 * 2 * 3
```
Note that only tensors with real or complex dtypes are differentiable.
Args:
f: function to be differentiated. If `f` returns a scalar, this scalar will
be differentiated. If `f` returns a tensor or list of tensors, by default
a scalar will be computed by adding all their values to produce a single
scalar. If desired, the tensors can be elementwise multiplied by the
tensors passed as the `dy` keyword argument to the returned gradient
function.
params: list of parameter names of f or list of integers indexing the
parameters with respect to which we'll differentiate. Passing None
differentiates with respect to all parameters.
Returns:
function which, when called, returns the value of f and the gradient
of `f` with respect to all of `params`. The function takes an extra optional
keyword argument `dy`. Setting it allows computation of vector jacobian
products for vectors other than the vector of ones.
Raises:
ValueError: if the params are not all strings or all integers.
"""
def decorated(*args, **kwds):
"""Computes the gradient of the decorated function."""
_, grad = val_and_grad_function(f, params=params)(*args, **kwds)
return grad
return decorated
def _ensure_unique_tensor_objects(parameter_positions, args):
"""Make each of the parameter_positions in args a unique ops.Tensor object.
Ensure that each parameter is treated independently.
For example:
def f(x, y): return x * y
g = gradients_function(f)
one = tf.constant(1.)
g(one, one) should return [1., 1.]
(even though the two arguments are the same Tensor object).
Args:
parameter_positions: List of indices into args defining the arguments to
differentiate against.
args: A list of arguments to the function to be differentiated.
Returns:
args, possibly edited in-place.
"""
s = set()
for (i, t) in enumerate(args):
if i in parameter_positions:
tid = ops.tensor_id(t)
if tid in s:
args[i] = gen_array_ops.identity(args[i])
else:
s.add(tid)
return args
def val_and_grad_function(f, params=None):
"""Returns a function that computes f and its derivative w.r.t. params.
Example:
```python
# f(x, y) = (x ^ 3) * y - x * (y ^ 2)
# Therefore, the 1st order derivatives are:
# df / dx = 3 * (x ^ 2) * y - y ^ 2
# df / dy = x ^ 3 - 2 * x * y
def f(x, y):
return x * x * x * y - x * y * y
# Obtain a function that returns the function value and the 1st order
# gradients.
val_grads_fn = tfe.value_and_gradients_function(f)
x = 2.0
y = 3.0
# Invoke the value-and-gradients function.
f_val, (x_grad, y_grad) = val_grads_fn(x, y)
assert f_val.numpy() == (2 ** 3) * 3 - 2 * (3 ** 2)
assert x_grad.numpy() == 3 * (2 ** 2) * 3 - 3 ** 2
assert y_grad.numpy() == (2 ** 3) - 2 * 2 * 3
# To obtain a callable that returns the value of `f` and the gradient(s) of
# `f` with respect to a subset of its inputs, use the `params` keyword
# argument with `value_and_gradients_function()`.
val_ygrad_fn = tfe.value_and_gradients_function(f, params=[1])
f_val, (y_grad,) = val_ygrad_fn(x, y)
assert f_val.numpy() == (2 ** 3) * 3 - 2 * (3 ** 2)
assert y_grad.numpy() == (2 ** 3) - 2 * 2 * 3
```
Args:
f: function to be differentiated. If `f` returns a scalar, this scalar will
be differentiated. If `f` returns a tensor or list of tensors, by default
a scalar will be computed by adding all their values to produce a single
scalar. If desired, the tensors can be elementwise multiplied by the
tensors passed as the `dy` keyword argument to the returned gradient
function.
params: list of parameter names of f or list of integers indexing the
parameters with respect to which we'll differentiate. Passing `None`
differentiates with respect to all parameters.
Returns:
function which, when called, returns the value of f and the gradient
of f with respect to all of `params`. The function takes an extra optional
keyword argument "dy". Setting it allows computation of vector jacobian
products for vectors other than the vector of ones.
Raises:
ValueError: if the params are not all strings or all integers.
"""
def decorated(*args, **kwds):
"""Computes the value and gradient of the decorated function."""
dy = kwds.pop("dy", None)
if kwds:
raise ValueError("Functions to be differentiated cannot "
"receive keyword arguments.")
val, vjp = make_vjp(f, params)(*args, **kwds)
return val, vjp(dy=dy)
return decorated
def make_vjp(f, params=None, persistent=True):
"""Returns a function that computes f and its vjp w.r.t.
params.
The term "vjp" here is an abbreviation for vector-jacobian product.
Args:
f: the function to be differentiated.
params: the parameters (numbers or names) to differentiate with respect to.
A value of None will differentiate with respect to all parameters.
persistent: Boolean controlling whether the VJP function can be re-used.
Must be True or False.
Returns:
A function, which when called, returns a tuple (value, vjp), where:
- value is the result of calling f.
- vjp is a function, which takes a vector as an argument and
returns the product of that vector with the Jacobian of f.
Providing no argument to vjp is equivalent to providing a
vector of ones.
For example,
```python
def f(x):
return x * x
wrapped_fn = tfe.make_vjp(f)
result, vjp = wrapped_fn(tf.constant(3.0))
# result is 9.0
vjp() # the vjp function rturns 6.0
Raises:
ValueError: if `f` returns None.
"""
def decorated(*args, **kwds):
"""Computes the value and gradient of the decorated function."""
parameter_positions = _get_arg_spec(f, params, args)
assert not kwds, "The gradient function can't take keyword arguments."
this_tape = tape.push_new_tape(persistent=persistent)
try:
sources = []
args = [
ops.convert_to_tensor(args[i])
if i in parameter_positions else args[i]
for i in range(len(args))
]
args = _ensure_unique_tensor_objects(parameter_positions, args)
for i in parameter_positions:
sources.append(args[i])
tape.watch(this_tape, args[i])
result = f(*args)
if result is None:
raise ValueError("Cannot differentiate a function that returns None; "
"did you forget to return a value from {}?".format(
f.__name__))
flat_result = nest.flatten(result)
flat_result = [gen_array_ops.identity(x) for x in flat_result]
result = nest.pack_sequence_as(result, flat_result)
finally:
tape.pop_tape(this_tape)
def vjp(dy=None):
if dy is not None:
dy = [ops.convert_to_tensor(x) for x in nest.flatten(dy)]
return imperative_grad.imperative_grad(
this_tape, nest.flatten(result), sources, output_gradients=dy)
return result, vjp
return decorated
def _aggregate_grads(gradients):
"""Aggregate gradients from multiple sources.
Args:
gradients: A list of 'Tensor' or 'IndexedSlices' gradients.
Returns:
If 'gradients' only has 'Tensor', returns an aggregated 'Tensor'.
Otherwise returns an aggregated 'IndexedSlices'.
"""
assert gradients, "No gradients to aggregate"
if len(gradients) == 1:
return gradients[0]
if all(isinstance(g, ops.Tensor) for g in gradients):
return gen_math_ops.add_n(gradients)
else:
assert all(isinstance(g, (ops.Tensor, ops.IndexedSlices))
for g in gradients)
indexed_slices_list = []
for grad in gradients:
# TODO(xpan): Support nested IndexedSlices and core IndexedSlices
if isinstance(grad, ops.Tensor):
indexed_slices = ops.IndexedSlices(
grad,
math_ops.range(grad.shape[0]),
constant_op.constant(grad.shape.as_list()))
indexed_slices_list.append(indexed_slices)
else:
indexed_slices_list.append(grad)
# Dense shapes from all gradients should be the same.
dense_shape = indexed_slices_list[0].dense_shape
# For simplicity now, always cast to int64.
indices = array_ops.concat([math_ops.cast(x.indices, dtypes.int64)
for x in indexed_slices_list], 0)
values = array_ops.concat([x.values for x in indexed_slices_list], 0)
return ops.IndexedSlices(values, indices, dense_shape)
def _num_elements(grad):
"""The number of elements in the `grad` tensor."""
if isinstance(grad, ops.Tensor):
shape_tuple = grad._shape_tuple() # pylint: disable=protected-access
if shape_tuple is None or None in shape_tuple:
return 0
return functools.reduce(operator.mul, shape_tuple, 1)
if isinstance(grad, ops.IndexedSlices):
return functools.reduce(operator.mul, grad.values._shape_tuple(), 1) # pylint: disable=protected-access
raise ValueError("`grad` not a Tensor or IndexedSlices.")
def _fast_fill(value, shape, dtype):
return array_ops.fill(
constant_op.constant(shape, dtype=dtypes.int32),
constant_op.constant(value, dtype=dtype))
def _zeros(shape, dtype):
"""Helper to return (possibly cached) zero tensors in eager mode."""
if (dtype == dtypes.variant
or dtype == dtypes.string
or dtype == dtypes.resource):
# TODO(apassos): need to save enough information about variant tensors to do
# a zeros
return None
ctx = context.context()
if not ctx.executing_eagerly():
return array_ops.zeros(shape, dtype)
device = ctx.device_name
cache_key = shape, dtype, device
cached = ctx.zeros_cache().get(cache_key)
if cached is None:
if dtypes.as_dtype(dtype).is_bool:
value = False
else:
value = 0
cached = _fast_fill(value, shape, dtype)
ctx.zeros_cache().put(cache_key, cached)
return cached
def _ones(shape, dtype):
if dtypes.as_dtype(dtype) == dtypes.string:
return None
if not context.context().executing_eagerly():
return array_ops.ones(shape, dtype)
if dtypes.as_dtype(dtype).is_bool:
value = True
else:
value = 1
if shape == (): # pylint: disable=g-explicit-bool-comparison
return constant_op.constant(value, dtype=dtype)
return _fast_fill(value, shape, dtype)
_default_vspace = imperative_grad.VSpace(
num_elements_fn=_num_elements,
aggregate_fn=_aggregate_grads,
zeros_fn=_zeros,
ones_fn=_ones,
graph_shape_fn=gen_array_ops.shape)
pywrap_tensorflow.TFE_Py_RegisterVSpace(_default_vspace)
def _handle_or_self(x):
"""If x is ResourceVariable, return its handle, else x."""
if resource_variable_ops.is_resource_variable(x):
x = x.handle
return x
@tf_export("GradientTape")
class GradientTape(object):
"""Record operations for automatic differentiation.
Operations are recorded if they are executed within this context manager and
at least one of their inputs is being "watched".
Trainable variables (created by `tf.Variable` or `tf.get_variable`, where
`trainable=True` is default in both cases) are automatically watched. Tensors
can be manually watched by invoking the `watch` method on this context
manager.
For example, consider the function `y = x * x`. The gradient at `x = 3.0` can
be computed as:
```python
x = tf.constant(3.0)
with tf.GradientTape() as g:
g.watch(x)
y = x * x
dy_dx = g.gradient(y, x) # Will compute to 6.0
```
GradientTapes can be nested to compute higher-order derivatives. For example,
```python
x = tf.constant(3.0)
with tf.GradientTape() as g:
g.watch(x)
with tf.GradientTape() as gg:
gg.watch(x)
y = x * x
dy_dx = gg.gradient(y, x) # Will compute to 6.0
d2y_dx2 = g.gradient(dy_dx, x) # Will compute to 2.0
```
By default, the resources held by a GradientTape are released as soon as
GradientTape.gradient() method is called. To compute multiple gradients over
the same computation, create a persistent gradient tape. This allows multiple
calls to the gradient() method as resources are released when the tape object
is garbage collected. For example:
```python
x = tf.constant(3.0)
with tf.GradientTape(persistent=True) as g:
g.watch(x)
y = x * x
z = y * y
dz_dx = g.gradient(z, x) # 108.0 (4*x^3 at x = 3)
dy_dx = g.gradient(y, x) # 6.0
del g # Drop the reference to the tape
```
By default GradientTape will automatically watch any trainable variables that
are accessed inside the context. If you want fine grained control over which
variables are watched you can disable automatic tracking by passing
`watch_accessed_variables=False` to the tape constructor:
```python
with tf.GradientTape(watch_accessed_variables=False) as tape:
tape.watch(variable_a)
y = variable_a ** 2 # Gradients will be available for `variable_a`.
z = variable_b ** 3 # No gradients will be avaialble since `variable_b` is
# not being watched.
```
Note that when using models you should ensure that your variables exist when
using `watch_accessed_variables=False`. Otherwise it's quite easy to make your
first iteration not have any gradients:
```python
a = tf.keras.layers.Dense(32)
b = tf.keras.layers.Dense(32)
with tf.GradientTape(watch_accessed_variables=False) as tape:
tape.watch(a.variables) # Since `a.build` has not been called at this point
# `a.variables` will return an empty list and the
# tape will not be watching anything.
result = b(a(inputs))
tape.gradient(result, a.variables) # The result of this computation will be
# a list of `None`s since a's variables
# are not being watched.
```
Note that only tensors with real or complex dtypes are differentiable.
"""
def __init__(self, persistent=False, watch_accessed_variables=True):
"""Creates a new GradientTape.
Args:
persistent: Boolean controlling whether a persistent gradient tape
is created. False by default, which means at most one call can
be made to the gradient() method on this object.
watch_accessed_variables: Boolean controlling whether the tape will
automatically `watch` any (trainable) variables accessed while the tape
is active. Defaults to True meaning gradients can be requested from any
result computed in the tape derived from reading a trainable `Variable`.
If False users must explicitly `watch` any `Variable`s they want to
request gradients from.
"""
self._tape = None
self._persistent = persistent
self._watch_accessed_variables = watch_accessed_variables
self._recording = False
self._created_eagerly = context.executing_eagerly()
if self._created_eagerly:
context.context().start_step()
def __enter__(self):
"""Enters a context inside which operations are recorded on this tape."""
self._push_tape()
return self
def __exit__(self, typ, value, traceback):
"""Exits the recording context, no further operations are traced."""
if self._recording:
self._pop_tape()
def _push_tape(self):
if self._recording:
raise ValueError("Tape is already recording.")
if self._tape is None:
self._tape = tape.push_new_tape(
persistent=self._persistent,
watch_accessed_variables=self._watch_accessed_variables)
else:
tape.push_tape(self._tape)
self._recording = True
def _pop_tape(self):
if not self._recording:
raise ValueError("Tape is not recording.")
tape.pop_tape(self._tape)
self._recording = False
def __del__(self):
if self._created_eagerly:
try:
context.context().end_step()
except AttributeError:
pass
except TypeError:
pass
def watch(self, tensor):
"""Ensures that `tensor` is being traced by this tape.
Args:
tensor: a Tensor or list of Tensors.
"""
for t in nest.flatten(tensor):
if hasattr(t, "handle"):
# There are many variable-like objects, all of them currently have
# `handle` attribute that points to a tensor. If this changes, internals
# of watch_variable need to change as well.
tape.watch_variable(self._tape, t)
else:
tape.watch(self._tape, t)
@tf_contextlib.contextmanager
def stop_recording(self):
"""Temporarily stops recording operations on this tape.
Operations executed while this context manager is active will not be
recorded on the tape. This is useful for reducing the memory used by tracing
all computations.
For example:
```
with tf.GradientTape(persistent=True) as t:
loss = compute_loss(model)
with t.stop_recording():
# The gradient computation below is not traced, saving memory.
grads = t.gradient(loss, model.variables)
```
Yields:
None
Raises:
RuntimeError: if the tape is not currently recording.
"""
if self._tape is None:
raise RuntimeError(
"Trying to stop recording a tape which is not recording.")
self._pop_tape()
try:
yield
finally:
self._push_tape()
def reset(self):
"""Clears all information stored in this tape.
Equivalent to exiting and reentering the tape context manager with a new
tape. For example, the two following code blocks are equivalent:
```
with tf.GradientTape() as t:
loss = loss_fn()
with tf.GradientTape() as t:
loss += other_loss_fn()
t.gradient(loss, ...) # Only differentiates other_loss_fn, not loss_fn
# The following is equivalent to the above
with tf.GradientTape() as t:
loss = loss_fn()
t.reset()
loss += other_loss_fn()
t.gradient(loss, ...) # Only differentiates other_loss_fn, not loss_fn
```
This is useful if you don't want to exit the context manager for the tape,
or can't because the desired reset point is inside a control flow construct:
```
with tf.GradientTape() as t:
loss = ...
if loss > k:
t.reset()
```
"""
self._pop_tape()
self._tape = None
self._push_tape()
def watched_variables(self):
"""Returns variables watched by this tape in order of construction."""
return self._tape.watched_variables()
def gradient(self,
target,
sources,
output_gradients=None,
unconnected_gradients=UnconnectedGradients.NONE):
"""Computes the gradient using operations recorded in context of this tape.
Args:
target: Tensor (or list of tensors) to be differentiated.
sources: a list or nested structure of Tensors or Variables. `target`
will be differentiated against elements in `sources`.
output_gradients: a list of gradients, one for each element of
target. Defaults to None.
unconnected_gradients: a value which can either hold 'none' or 'zero' and
alters the value which will be returned if the target and sources are
unconnected. The possible values and effects are detailed in
'UnconnectedGradients' and it defaults to 'none'.
Returns:
a list or nested structure of Tensors (or IndexedSlices, or None),
one for each element in `sources`. Returned structure is the same as
the structure of `sources`.
Raises:
RuntimeError: if called inside the context of the tape, or if called more
than once on a non-persistent tape.
ValueError: if the target is a variable or if unconnected gradients is
called with an unknown value.
"""
if self._tape is None:
raise RuntimeError("GradientTape.gradient can only be called once on "
"non-persistent tapes.")
if self._recording:
if not self._persistent:
self._pop_tape()
else:
logging.log_first_n(logging.WARN,
"Calling GradientTape.gradient on a persistent "
"tape inside it's context is significantly less "
"efficient than calling it outside the context (it "
"causes the gradient ops to be recorded on the "
"tape, leading to increased CPU and memory usage). "
"Only call GradientTape.gradient inside the "
"context if you actually want to trace the "
"gradient in order to compute higher order "
"derrivatives.", 1)
flat_targets = []
for t in nest.flatten(target):
if resource_variable_ops.is_resource_variable(t):
with self:
t = ops.convert_to_tensor(t)
flat_targets.append(t)
flat_sources = nest.flatten(sources)
flat_sources = [_handle_or_self(x) for x in flat_sources]
if output_gradients is not None:
output_gradients = [None if x is None else ops.convert_to_tensor(x)
for x in nest.flatten(output_gradients)]
flat_grad = imperative_grad.imperative_grad(
self._tape,
flat_targets,
flat_sources,
output_gradients=output_gradients,
unconnected_gradients=unconnected_gradients)
if not self._persistent:
self._tape = None
grad = nest.pack_sequence_as(sources, flat_grad)
return grad
def jacobian(self,
target,
sources,
unconnected_gradients=UnconnectedGradients.NONE,
parallel_iterations=None,
experimental_use_pfor=True):
"""Computes the jacobian using operations recorded in context of this tape.
See http://en.wikipedia.org/wiki/jacobian_matrix_and_determinant for the
definition of a Jacobian.
Example usage:
with tf.GradientTape() as g:
x = tf.constant([1.0, 2.0])
g.watch(x)
y = x * x
jacobian = g.jacobian(y, x)
# jacobian value is [[2., 0.], [0., 4.]]
Args:
target: Tensor to be differentiated.
sources: a list or nested structure of Tensors or Variables. `target`
will be differentiated against elements in `sources`.
unconnected_gradients: a value which can either hold 'none' or 'zero' and
alters the value which will be returned if the target and sources are
unconnected. The possible values and effects are detailed in
'UnconnectedGradients' and it defaults to 'none'.
parallel_iterations: A knob to control how many iterations are dispatched
in parallel. This knob can be used to control the total memory usage.
experimental_use_pfor: If true, vectorizes the jacobian computation. Else
falls back to a sequential while_loop. Vectorization can sometimes fail
or lead to excessive memory usage. This option can be used to disable
vectorization in such cases.
Returns:
a list or nested structure of Tensors (or IndexedSlices, or None),
one for each element in `sources`. Returned structure is the same as
the structure of `sources`.
Raises:
RuntimeError: If called on a non-persistent tape with eager execution
enabled and without enabling experimental_use_pfor.
ValueError: If vectorization of jacobian computation fails.
"""
flat_sources = nest.flatten(sources)
target_static_shape = target.shape
target_shape = array_ops.shape(target)
# Note that we push and pop the tape here and below. This is needed since we
# need gradients through the enclosed operations.
self._push_tape()
target = array_ops.reshape(target, [-1])
self._pop_tape()
def loop_fn(i):
self._push_tape()
y = array_ops.gather(target, i)
self._pop_tape()
return self.gradient(y, flat_sources,
unconnected_gradients=unconnected_gradients)
try:
target_size = int(target.shape[0])
except TypeError:
target_size = array_ops.shape(target)[0]
if experimental_use_pfor:
try:
output = pfor_ops.pfor(loop_fn, target_size,
parallel_iterations=parallel_iterations)
except ValueError as err:
six.reraise(
ValueError,
ValueError(
str(err) + "\nEncountered an exception while vectorizing the "
"jacobian computation. Vectorization can be disabled by setting"
" experimental_use_pfor to False."),
sys.exc_info()[2])
else:
if context.executing_eagerly() and not self._persistent:
raise RuntimeError(
"GradientTape must be created with persistent=True"
" to compute the jacobian with eager execution enabled and with "
" experimental_use_pfor set to False.")
output = pfor_ops.for_loop(
loop_fn, [target.dtype] * len(flat_sources), target_size,
parallel_iterations=parallel_iterations)
for i, out in enumerate(output):
if out is not None:
new_shape = array_ops.concat(
[target_shape, array_ops.shape(out)[1:]], axis=0)
out = array_ops.reshape(out, new_shape)
if context.executing_eagerly():
out.set_shape(target_static_shape.concatenate(flat_sources[i].shape))
output[i] = out
return nest.pack_sequence_as(sources, output)
def batch_jacobian(self,
target,
source,
unconnected_gradients=UnconnectedGradients.NONE,
parallel_iterations=None,
experimental_use_pfor=True):
"""Computes and stacks per-example jacobians.
See http://en.wikipedia.org/wiki/jacobian_matrix_and_determinant for the
definition of a Jacobian. This function is essentially an efficient
implementation of the following:
`tf.stack([self.jacobian(y[i], x[i]) for i in range(x.shape[0])])`.
Note that compared to `GradientTape.jacobian` which computes gradient of
each output value w.r.t each input value, this function is useful when
`target[i,...] is independent of `source[j,...]` for `j != i`. This
independence assumption allows more efficient computation as compared to
`GradientTape.jacobian`. The output, as well as intermediate activations,
are lower dimensional and avoid a bunch of redundant zeros which would
result in the jacobian computation given the independence assumption.
Example usage:
with tf.GradientTape() as g:
x = tf.constant([[1, 2], [3, 4]], dtype=tf.float32)
g.watch(x)
y = x * x
batch_jacobian = g.batch_jacobian(y, x)
# batch_jacobian is [[[2, 0], [0, 4]], [[6, 0], [0, 8]]]
Args:
target: A tensor with rank 2 or higher and with shape [b, y1, ..., y_n].
`target[i,...]` should only depend on `source[i,...]`.
source: A tensor with rank 2 or higher and with shape [b, x1, ..., x_m].
unconnected_gradients: a value which can either hold 'none' or 'zero' and
alters the value which will be returned if the target and sources are
unconnected. The possible values and effects are detailed in
'UnconnectedGradients' and it defaults to 'none'.
parallel_iterations: A knob to control how many iterations are dispatched
in parallel. This knob can be used to control the total memory usage.
experimental_use_pfor: If true, uses pfor for computing the Jacobian. Else
uses a tf.while_loop.
Returns:
A tensor `t` with shape [b, y_1, ..., y_n, x1, ..., x_m] where `t[i, ...]`
is the jacobian of `target[i, ...]` w.r.t. `source[i, ...]`, i.e. stacked
per-example jacobians.
Raises:
RuntimeError: If called on a non-persistent tape with eager execution
enabled and without enabling experimental_use_pfor.
ValueError: If vectorization of jacobian computation fails or if first
dimension of `target` and `source` do not match.
"""
target_shape = target.shape
if target_shape.rank is None:
dim = Dimension(None)
else:
dim = target_shape.dims[0]
if not (target_shape.with_rank_at_least(2) and
source.shape.with_rank_at_least(2) and
dim.is_compatible_with(source.shape[0])):
raise ValueError(
"Need first dimension of target shape (%s) and "
"source shape (%s) to match." % (target.shape, source.shape))
if target_shape.is_fully_defined():
batch_size = int(target_shape[0])
target_row_size = target_shape.num_elements() // batch_size
else:
target_shape = array_ops.shape(target)
batch_size = target_shape[0]
target_row_size = array_ops.size(target) // batch_size
source_shape = array_ops.shape(source)
# Flatten target to 2-D.
# Note that we push and pop the tape here and below. This is needed since we
# need gradients through the enclosed operations.
self._push_tape()
with ops.control_dependencies(
[check_ops.assert_equal(batch_size, source_shape[0])]):
target = array_ops.reshape(target, [batch_size, target_row_size])
self._pop_tape()
def loop_fn(i):
self._push_tape()
y = array_ops.gather(target, i, axis=1)
self._pop_tape()
return self.gradient(y, source,
unconnected_gradients=unconnected_gradients)
if experimental_use_pfor:
try:
output = pfor_ops.pfor(loop_fn, target_row_size,
parallel_iterations=parallel_iterations)
except ValueError as err:
six.reraise(
ValueError,
ValueError(
str(err) + "\nEncountered an exception while vectorizing the "
"batch_jacobian computation. Vectorization can be disabled by "
"setting experimental_use_pfor to False."),
sys.exc_info()[2])
else:
if context.executing_eagerly() and not self._persistent:
raise RuntimeError(
"GradientTape must be created with persistent=True"
" to compute the batch_jacobian with eager execution enabled and "
" with experimental_use_pfor set to False.")
output = pfor_ops.for_loop(loop_fn, target.dtype, target_row_size,
parallel_iterations=parallel_iterations)
if output is None:
return None
output = array_ops.reshape(output,
[target_row_size, batch_size, -1])
output = array_ops.transpose(output, [1, 0, 2])
new_shape = array_ops.concat([target_shape, source_shape[1:]], axis=0)
return array_ops.reshape(output, new_shape)
| apark263/tensorflow | tensorflow/python/eager/backprop.py | Python | apache-2.0 | 42,517 |
from polyphony import module, pure
from polyphony import testbench
from polyphony import is_worker_running
from polyphony.io import Port
from polyphony.typing import int8
from polyphony.timing import clksleep, wait_value
@module
class Submodule:
@pure
def __init__(self, param):
self.i = Port(int8, 'in')
self.o = Port(int8, 'out')
self.param = param
self.append_worker(self.sub_worker)
def sub_worker(self):
while is_worker_running():
v = self.i.rd() * self.param
self.o.wr(v)
@module
class Nesting03:
@pure
def __init__(self):
self.sub1 = Submodule(2)
self.sub2 = Submodule(3)
self.append_worker(self.worker)
self.start = Port(bool, 'in', init=False)
self.result = Port(bool, 'out', init=False, protocol='valid')
def worker(self):
wait_value(True, self.start)
self.sub1.i.wr(10)
self.sub2.i.wr(20)
clksleep(10)
result1 = self.sub1.o.rd() == 20
result2 = self.sub2.o.rd() == 60
self.result.wr(result1 and result2)
@testbench
def test(m):
m.start.wr(True)
assert True == m.result.rd()
m = Nesting03()
test(m)
| ktok07b6/polyphony | tests/pure/nesting03.py | Python | mit | 1,211 |
__author__ = "Can Ozbek"
import pandas as pd
import numpy as np
import pylab
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix
import ml_aux_functions as ml_aux
#Read the files
df = pd.read_pickle("/Users/ahmetcanozbek/Desktop/660Stuff/msd.pkl")
df_train = pd.read_pickle("/Users/ahmetcanozbek/Desktop/660Stuff/msd_train.pkl") # 80%
df_test = pd.read_pickle("/Users/ahmetcanozbek/Desktop/660Stuff/msd_test.pkl") # 20%
df_train_t1 = pd.read_pickle("/Users/ahmetcanozbek/Desktop/660Stuff/msd_train_t1.pkl")
df_train_t2 = pd.read_pickle("/Users/ahmetcanozbek/Desktop/660Stuff/msd_train_t2.pkl")
df_train_t3 = pd.read_pickle("/Users/ahmetcanozbek/Desktop/660Stuff/msd_train_t3.pkl")
df_train_t4 = pd.read_pickle("/Users/ahmetcanozbek/Desktop/660Stuff/msd_train_t4.pkl")
df_train_t5 = pd.read_pickle("/Users/ahmetcanozbek/Desktop/660Stuff/msd_train_t5.pkl")
print "Reading Done."
| nishantnath/MusicPredictiveAnalysis_EE660_USCFall2015 | Code/Machine_Learning_Algos/10k_Tests/ml_classification_adaboost_onemillion.py | Python | mit | 908 |
from __future__ import unicode_literals
from django.core.files.uploadedfile import SimpleUploadedFile
from django.utils import six
from djblets.features.testing import override_feature_check
from djblets.webapi.errors import (INVALID_ATTRIBUTE, INVALID_FORM_DATA,
PERMISSION_DENIED)
from djblets.webapi.testing.decorators import webapi_test_template
from reviewboard.diffviewer.features import dvcs_feature
from reviewboard.diffviewer.models import DiffSet
from reviewboard.reviews.models import DefaultReviewer
from reviewboard.webapi.errors import DIFF_TOO_BIG
from reviewboard.webapi.resources import resources
from reviewboard.webapi.tests.base import BaseWebAPITestCase
from reviewboard.webapi.tests.mimetypes import (diff_item_mimetype,
diff_list_mimetype)
from reviewboard.webapi.tests.mixins import (BasicTestsMetaclass,
ReviewRequestChildItemMixin,
ReviewRequestChildListMixin)
from reviewboard.webapi.tests.mixins_extra_data import (ExtraDataItemMixin,
ExtraDataListMixin)
from reviewboard.webapi.tests.urls import (get_diff_item_url,
get_diff_list_url)
@six.add_metaclass(BasicTestsMetaclass)
class ResourceListTests(ExtraDataListMixin, ReviewRequestChildListMixin,
BaseWebAPITestCase):
"""Testing the DiffResource list APIs."""
fixtures = ['test_users', 'test_scmtools']
sample_api_url = 'review-requests/<id>/diffs/'
resource = resources.diff
def setup_review_request_child_test(self, review_request):
return get_diff_list_url(review_request), diff_list_mimetype
def compare_item(self, item_rsp, diffset):
self.assertEqual(item_rsp['id'], diffset.pk)
self.assertEqual(item_rsp['name'], diffset.name)
self.assertEqual(item_rsp['revision'], diffset.revision)
self.assertEqual(item_rsp['basedir'], diffset.basedir)
self.assertEqual(item_rsp['base_commit_id'], diffset.base_commit_id)
self.assertEqual(item_rsp['extra_data'], diffset.extra_data)
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name,
populate_items):
review_request = self.create_review_request(
create_repository=True,
with_local_site=with_local_site,
submitter=user,
publish=True)
if populate_items:
items = [self.create_diffset(review_request)]
else:
items = []
return (get_diff_list_url(review_request, local_site_name),
diff_list_mimetype,
items)
#
# HTTP POST tests
#
def setup_basic_post_test(self, user, with_local_site, local_site_name,
post_valid_data):
repository = self.create_repository(tool_name='Test')
review_request = self.create_review_request(
with_local_site=with_local_site,
repository=repository,
submitter=user)
diff = SimpleUploadedFile('diff', self.DEFAULT_GIT_README_DIFF,
content_type='text/x-patch')
if post_valid_data:
post_data = {
'path': diff,
'basedir': '/trunk',
'base_commit_id': '1234',
}
else:
post_data = {}
return (get_diff_list_url(review_request, local_site_name),
diff_item_mimetype,
post_data,
[review_request])
def check_post_result(self, user, rsp, review_request):
self.assertIn('diff', rsp)
item_rsp = rsp['diff']
draft = review_request.get_draft()
self.assertIsNotNone(draft)
diffset = DiffSet.objects.get(pk=item_rsp['id'])
self.assertEqual(diffset, draft.diffset)
self.compare_item(item_rsp, diffset)
def test_post_with_missing_data(self):
"""Testing the POST review-requests/<id>/diffs/ API
with Invalid Form Data
"""
repository = self.create_repository(tool_name='Test')
review_request = self.create_review_request(
repository=repository,
submitter=self.user)
rsp = self.api_post(get_diff_list_url(review_request),
expected_status=400)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], INVALID_FORM_DATA.code)
self.assertIn('path', rsp['fields'])
# Now test with a valid path and an invalid basedir.
# This is necessary because basedir is "optional" as defined by
# the resource, but may be required by the form that processes the
# diff.
review_request = self.create_review_request(
repository=repository,
submitter=self.user)
diff = SimpleUploadedFile('diff', self.DEFAULT_GIT_README_DIFF,
content_type='text/x-patch')
rsp = self.api_post(
get_diff_list_url(review_request),
{'path': diff},
expected_status=400)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], INVALID_FORM_DATA.code)
self.assertIn('basedir', rsp['fields'])
def test_post_too_big(self):
"""Testing the POST review-requests/<id>/diffs/ API
with diff exceeding max size
"""
repository = self.create_repository()
review_request = self.create_review_request(
repository=repository,
submitter=self.user)
diff = SimpleUploadedFile('diff', self.DEFAULT_GIT_README_DIFF,
content_type='text/x-patch')
with self.siteconfig_settings({'diffviewer_max_diff_size': 2},
reload_settings=False):
rsp = self.api_post(
get_diff_list_url(review_request),
{
'path': diff,
'basedir': "/trunk",
},
expected_status=400)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], DIFF_TOO_BIG.code)
self.assertIn('reason', rsp)
self.assertIn('max_size', rsp)
self.assertEqual(rsp['max_size'], 2)
def test_post_not_owner(self):
"""Testing the POST review-requests/<id>/diffs/ API
without owner
"""
repository = self.create_repository(tool_name='Test')
review_request = self.create_review_request(repository=repository)
diff = SimpleUploadedFile('diff', self.DEFAULT_GIT_README_DIFF,
content_type='text/x-patch')
rsp = self.api_post(
get_diff_list_url(review_request),
{
'path': diff,
'basedir': '/trunk',
},
expected_status=403)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], PERMISSION_DENIED.code)
def test_post_no_repository(self):
"""Testing the POST review-requests/<id>/diffs API
with a ReviewRequest that has no repository
"""
review_request = self.create_review_request(submitter=self.user)
diff = SimpleUploadedFile('diff', self.DEFAULT_GIT_README_DIFF,
content_type='text/x-patch')
rsp = self.api_post(
get_diff_list_url(review_request),
{
'path': diff,
'basedir': '/trunk',
},
expected_status=400)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], INVALID_ATTRIBUTE.code)
@webapi_test_template
def test_post_with_history(self):
"""Testing the POST <URL> API with a diff and a review request created
with history support
"""
review_request = self.create_review_request(submitter=self.user,
create_repository=True,
create_with_history=True)
diff = SimpleUploadedFile('diff',
self.DEFAULT_GIT_FILEDIFF_DATA_DIFF,
content_type='text/x-patch')
with override_feature_check(dvcs_feature.feature_id, enabled=True):
rsp = self.api_post(
get_diff_list_url(review_request),
{
'path': diff,
},
expected_status=400)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], INVALID_FORM_DATA.code)
self.assertEqual(
rsp['reason'],
'This review request was created with support for multiple '
'commits.\n\n'
'Create an empty diff revision and upload commits to that '
'instead.')
@webapi_test_template
def test_post_empty_with_history(self):
"""Testing the POST <URL> API creates an empty DiffSet for a review
request created with history support with the DVCS feature enabled
"""
review_request = self.create_review_request(submitter=self.user,
create_repository=True,
create_with_history=True)
with override_feature_check(dvcs_feature.feature_id, enabled=True):
rsp = self.api_post(get_diff_list_url(review_request), {},
expected_mimetype=diff_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
item_rsp = rsp['diff']
diff = DiffSet.objects.get(pk=item_rsp['id'])
self.compare_item(item_rsp, diff)
self.assertEqual(diff.files.count(), 0)
self.assertEqual(diff.revision, 1)
@webapi_test_template
def test_post_empty_dvcs_disabled(self):
"""Testing the POST <URL> API without a diff with the DVCS feature
disabled
"""
review_request = self.create_review_request(submitter=self.user,
create_repository=True,
create_with_history=False)
with override_feature_check(dvcs_feature.feature_id, enabled=False):
rsp = self.api_post(get_diff_list_url(review_request), {},
expected_status=400)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], INVALID_FORM_DATA.code)
self.assertEqual(rsp['fields'], {
'path': ['This field is required.'],
})
@webapi_test_template
def test_post_adds_default_reviewers(self):
"""Testing the POST <URL> API adds default reviewers"""
review_request = self.create_review_request(submitter=self.user,
create_repository=True)
# Create the state needed for the default reviewer.
group = self.create_review_group(name='group1')
default_reviewer = DefaultReviewer.objects.create(
name='default1',
file_regex='.')
default_reviewer.groups.add(group)
default_reviewer.repository.add(review_request.repository)
# Post the diff.
diff = SimpleUploadedFile('diff', self.DEFAULT_GIT_README_DIFF,
content_type='text/x-patch')
rsp = self.api_post(
get_diff_list_url(review_request),
{'path': diff},
expected_mimetype=diff_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
draft = review_request.get_draft()
self.assertEqual(list(draft.target_groups.all()), [group])
@webapi_test_template
def test_post_adds_default_reviewers_first_time_only(self):
"""Testing the POST <URL> API doesn't add default reviewers a second
time
"""
review_request = self.create_review_request(submitter=self.user,
create_repository=True)
# Create the initial diffset. This should prevent a default
# reviewer from being applied, since we're not publishing the first
# diff on a review request.
self.create_diffset(review_request=review_request)
# Create the state needed for the default reviewer.
group = self.create_review_group(name='group1')
default_reviewer = DefaultReviewer.objects.create(
name='default1',
file_regex='.')
default_reviewer.groups.add(group)
default_reviewer.repository.add(review_request.repository)
# Post the diff.
diff = SimpleUploadedFile('diff', self.DEFAULT_GIT_README_DIFF,
content_type='text/x-patch')
rsp = self.api_post(
get_diff_list_url(review_request),
{'path': diff},
expected_mimetype=diff_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
draft = review_request.get_draft()
self.assertEqual(list(draft.target_groups.all()), [])
@six.add_metaclass(BasicTestsMetaclass)
class ResourceItemTests(ExtraDataItemMixin, ReviewRequestChildItemMixin,
BaseWebAPITestCase):
"""Testing the DiffResource item APIs."""
fixtures = ['test_users', 'test_scmtools']
sample_api_url = 'review-requests/<id>/diffs/<revision>/'
resource = resources.diff
def setup_review_request_child_test(self, review_request):
if not review_request.repository:
review_request.repository = self.create_repository()
review_request.save()
diffset = self.create_diffset(review_request)
return (get_diff_item_url(review_request, diffset.revision),
diff_item_mimetype)
def setup_http_not_allowed_item_test(self, user):
review_request = self.create_review_request(create_repository=True,
publish=True)
return get_diff_item_url(review_request, 1)
def compare_item(self, item_rsp, diffset):
self.assertEqual(item_rsp['id'], diffset.pk)
self.assertEqual(item_rsp['name'], diffset.name)
self.assertEqual(item_rsp['revision'], diffset.revision)
self.assertEqual(item_rsp['basedir'], diffset.basedir)
self.assertEqual(item_rsp['base_commit_id'], diffset.base_commit_id)
self.assertEqual(item_rsp['extra_data'], diffset.extra_data)
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name):
review_request = self.create_review_request(
create_repository=True,
with_local_site=with_local_site,
submitter=user)
diffset = self.create_diffset(review_request)
return (get_diff_item_url(review_request, diffset.revision,
local_site_name),
diff_item_mimetype,
diffset)
def test_get_not_modified(self):
"""Testing the GET review-requests/<id>/diffs/<revision>/ API
with Not Modified response
"""
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request)
self._testHttpCaching(
get_diff_item_url(review_request, diffset.revision),
check_etags=True)
@webapi_test_template
def test_get_with_patch_and_commit_history(self):
"""Testing the GET <API> API with Accept: x-patch and commit history
contains only cumulative diff
"""
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request=review_request)
self.create_diffcommit(
diffset=diffset,
commit_id='r1',
parent_id='r0',
diff_contents=(
b'diff --git a/ABC b/ABC\n'
b'index 94bdd3e..197009f 100644\n'
b'--- ABC\n'
b'+++ ABC\n'
b'@@ -1,1 +1,1 @@\n'
b'-line!\n'
b'+line..\n'
))
self.create_diffcommit(
diffset=diffset,
commit_id='r2',
parent_id='r1',
diff_contents=(
b'diff --git a/README b/README\n'
b'index 94bdd3e..197009f 100644\n'
b'--- README\n'
b'+++ README\n'
b'@@ -1,1 +1,1 @@\n'
b'-Hello, world!\n'
b'+Hi, world!\n'
))
self.create_diffcommit(
diffset=diffset,
commit_id='r4',
parent_id='r3',
diff_contents=(
b'diff --git a/README b/README\n'
b'index 197009f..87abad9 100644\n'
b'--- README\n'
b'+++ README\n'
b'@@ -1,1 +1,1 @@\n'
b'-Hi, world!\n'
b'+Yo, world.\n'
))
cumulative_diff = (
b'diff --git a/ABC b/ABC\n'
b'index 94bdd3e..197009f 100644\n'
b'--- ABC\n'
b'+++ ABC\n'
b'@@ -1,1 +1,1 @@\n'
b'-line!\n'
b'+line..\n'
b'diff --git a/README b/README\n'
b'index 94bdd3e..87abad9 100644\n'
b'--- README\n'
b'+++ README\n'
b'@@ -1,1 +1,1 @@\n'
b'-Hello, world!\n'
b'+Yo, world.\n'
)
diffset.finalize_commit_series(
cumulative_diff=cumulative_diff,
validation_info=None,
validate=False,
save=True)
with override_feature_check(dvcs_feature.feature_id, enabled=True):
rsp = self.api_get(get_diff_item_url(review_request,
diffset.revision),
HTTP_ACCEPT='text/x-patch',
expected_json=False,
expected_mimetype='text/x-patch')
self.assertEqual(rsp, cumulative_diff)
@webapi_test_template
def test_get_links_fields_dvcs_enabled(self):
"""Testing the GET <URL> API does includes DVCS-specific fields and
links when the DVCS feature is enabled
"""
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request)
with override_feature_check(dvcs_feature.feature_id, enabled=True):
rsp = self.api_get(get_diff_item_url(review_request,
diffset.revision),
expected_mimetype=diff_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertIn('diff', rsp)
item_rsp = rsp['diff']
self.assertIn('links', item_rsp)
self.assertIn('commits', item_rsp['links'])
self.assertIn('commit_count', item_rsp)
@webapi_test_template
def test_get_links_fields_dvcs_disabled(self):
"""Testing the GET <URL> API does not includes DVCS-specific fields and
links when the DVCS feature is enabled
"""
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request)
with override_feature_check(dvcs_feature.feature_id, enabled=False):
rsp = self.api_get(get_diff_item_url(review_request,
diffset.revision),
expected_mimetype=diff_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertIn('diff', rsp)
item_rsp = rsp['diff']
self.assertIn('links', item_rsp)
self.assertNotIn('commits', item_rsp['links'])
self.assertNotIn('commit_count', item_rsp)
#
# HTTP PUT tests
#
def setup_basic_put_test(self, user, with_local_site, local_site_name,
put_valid_data):
review_request = self.create_review_request(
create_repository=True,
with_local_site=with_local_site,
submitter=user)
diffset = self.create_diffset(review_request)
return (get_diff_item_url(review_request, diffset.revision,
local_site_name),
diff_item_mimetype,
{},
diffset,
[])
def check_put_result(self, user, item_rsp, diffset):
diffset = DiffSet.objects.get(pk=diffset.pk)
self.compare_item(item_rsp, diffset)
| chipx86/reviewboard | reviewboard/webapi/tests/test_diff.py | Python | mit | 21,247 |
# -*- coding: utf-8 -*-
# Copyright 2015-2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import mock
from odoo.addons.connector_carepoint.models import account_invoice_line
from ..common import SetUpCarepointBase
model = 'odoo.addons.connector_carepoint.models.account_invoice_line'
class EndTestException(Exception):
pass
class AccountInvoiceLineTestBase(SetUpCarepointBase):
def setUp(self):
super(AccountInvoiceLineTestBase, self).setUp()
self.model = 'carepoint.account.invoice.line'
self.mock_env = self.get_carepoint_helper(
self.model
)
@property
def record(self):
""" Model record fixture """
return {
'rxdisp_id': 12345,
'primary_pay_date': '2016-01-23 01:23:45',
't_patient_pay_sub': '10.23',
}
class TestAccountInvoiceLineUnit(AccountInvoiceLineTestBase):
def setUp(self):
super(TestAccountInvoiceLineUnit, self).setUp()
self.Unit = account_invoice_line.AccountInvoiceLineUnit
self.unit = self.Unit(self.mock_env)
def test_import_invoice_lines_for_procurement_unit_for_adapter(self):
""" It should get unit for adapter """
with mock.patch.object(self.unit, 'unit_for') as mk:
mk.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._import_invoice_lines_for_procurement(True)
mk.assert_called_once_with(
account_invoice_line.CarepointCRUDAdapter
)
def test_import_invoice_lines_for_procurement_unit_for_importer(self):
""" It should get unit for importer """
with mock.patch.object(self.unit, 'unit_for') as mk:
mk.side_effect = [None, EndTestException]
with self.assertRaises(EndTestException):
self.unit._import_invoice_lines_for_procurement(True)
mk.assert_called_with(
account_invoice_line.AccountInvoiceLineImporter
)
def test_import_invoice_lines_for_procurement_search(self):
""" It should search adapter for unit """
expect = 'expect'
with mock.patch.object(self.unit, 'unit_for') as mk:
mk().search.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._import_invoice_lines_for_procurement(expect)
mk().search.assert_called_once_with(
rxdisp_id=expect,
)
def test_import_invoice_lines_for_procurement_imports(self):
""" It should run importer on records """
with mock.patch.object(self.unit, 'unit_for') as mk:
expect = mock.MagicMock()
adapter = mock.MagicMock()
adapter.search.return_value = [True]
mk.side_effect = [adapter, expect]
expect.run.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._import_invoice_lines_for_procurement(True)
expect.run.assert_called_once_with(
adapter.search()[0]
)
class TestAccountInvoiceLineImportMapper(AccountInvoiceLineTestBase):
def setUp(self):
super(TestAccountInvoiceLineImportMapper, self).setUp()
self.Unit = account_invoice_line.AccountInvoiceLineImportMapper
self.unit = self.Unit(self.mock_env)
def test_carepoint_id(self):
""" It should return correct attribute """
expect = {'carepoint_id': self.record['rxdisp_id']}
res = self.unit.carepoint_id(self.record)
self.assertDictEqual(expect, res)
def test_invoice_id_get_binder(self):
""" It should get binder for record type """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.invoice_id(self.record)
self.unit.binder_for.assert_called_once_with(
'carepoint.procurement.order'
)
def test_invoice_id_to_odoo(self):
""" It should get Odoo record for binding """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for().to_odoo.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.invoice_id(self.record)
self.unit.binder_for().to_odoo.assert_called_once_with(
self.record['rxdisp_id'], browse=True,
)
def test_invoice_id_search(self):
""" It should search for invoice from origin """
with mock.patch.object(self.unit, 'binder_for'):
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.side_effect = EndTestException
proc_id = self.unit.binder_for().to_odoo()
with self.assertRaises(EndTestException):
self.unit.invoice_id(self.record)
env['account.invoice'].search.assert_called_once_with(
[('origin', '=', proc_id.sale_line_id.order_id.name)],
limit=1,
)
def test_invoice_id_existing_invoice(self):
""" It should return existing matches invoice """
expect = mock.MagicMock()
with mock.patch.object(self.unit, 'binder_for'):
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.return_value = [expect]
res = self.unit.invoice_id(self.record)
expect = {
'invoice_id': expect.id,
}
self.assertDictEqual(res, expect)
def test_invoice_id_new_invoice_prepare_invoice(self):
""" It should prepare invoice from sale order if not existing """
with mock.patch.object(self.unit, 'binder_for') as mk:
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.return_value = []
prep = mk().to_odoo().sale_line_id.order_id._prepare_invoice
prep.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.invoice_id(self.record)
def test_invoice_id_new_invoice_updates_invoice_date(self):
""" It should inject remote date into invoice vals """
with mock.patch.object(self.unit, 'binder_for') as mk:
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.return_value = []
prep = mk().to_odoo().sale_line_id.order_id._prepare_invoice
self.unit.invoice_id(self.record)
prep().update.assert_called_once_with({
'date_invoice': self.record['primary_pay_date'],
})
def test_invoice_id_new_invoice_create(self):
""" It should create invoice with proper vals """
with mock.patch.object(self.unit, 'binder_for') as mk:
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.return_value = []
prep = mk().to_odoo().sale_line_id.order_id._prepare_invoice
self.unit.invoice_id(self.record)
env['account.invoice'].create.assert_called_once_with(prep())
def test_invoice_id_new_invoice_create_return(self):
""" It should return result of create in values """
with mock.patch.object(self.unit, 'binder_for'):
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.return_value = []
res = self.unit.invoice_id(self.record)
expect = {'invoice_id': env['account.invoice'].create().id}
self.assertDictEqual(expect, res)
def test_sale_line_ids_get_binder(self):
""" It should get binder for record type """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.sale_line_ids(self.record)
self.unit.binder_for.assert_called_once_with(
'carepoint.procurement.order'
)
def test_sale_line_ids_to_odoo(self):
""" It should get Odoo record for binding """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for().to_odoo.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.sale_line_ids(self.record)
self.unit.binder_for().to_odoo.assert_called_once_with(
self.record['rxdisp_id'], browse=True,
)
def test_sale_line_ids_return(self):
""" It should return proper values dict """
with mock.patch.object(self.unit, 'binder_for') as mk:
res = self.unit.sale_line_ids(self.record)
expect = {
'sale_line_ids': [(6, 0, [mk().to_odoo().sale_line_id.id])]
}
self.assertDictEqual(expect, res)
def test_invoice_line_data_get_binder(self):
""" It should get binder for record type """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.invoice_line_data(self.record)
self.unit.binder_for.assert_called_once_with(
'carepoint.procurement.order'
)
def test_invoice_line_data_to_odoo(self):
""" It should get Odoo record for binding """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for().to_odoo.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.invoice_line_data(self.record)
self.unit.binder_for().to_odoo.assert_called_once_with(
self.record['rxdisp_id'], browse=True,
)
def test_invoice_line_data_sets_price_unit(self):
""" It should set the price_unit on sale line to circumvent lack
of price data in the remote sales records
"""
qty = 20
with mock.patch.object(self.unit, 'binder_for'):
line_id = self.unit.binder_for().to_odoo().sale_line_id
line_id.product_uom_qty = qty
self.unit.invoice_line_data(self.record)
self.assertEqual(
float(self.record['t_patient_pay_sub']) / qty,
line_id.price_unit
)
def test_invoice_line_data_prepares_invoice_line(self):
""" It should prepare invoice line based on sale line """
qty = 20
with mock.patch.object(self.unit, 'binder_for'):
line_id = self.unit.binder_for().to_odoo().sale_line_id
line_id.product_uom_qty = qty
self.unit.invoice_line_data(self.record)
line_id._prepare_invoice_line.assert_called_once_with(qty)
def test_invoice_line_data_return(self):
""" It should prepare invoice line based on sale line """
qty = 20
with mock.patch.object(self.unit, 'binder_for'):
line_id = self.unit.binder_for().to_odoo().sale_line_id
line_id.product_uom_qty = qty
res = self.unit.invoice_line_data(self.record)
self.assertEqual(line_id._prepare_invoice_line(), res)
class TestAccountInvoiceLineImporter(AccountInvoiceLineTestBase):
def setUp(self):
super(TestAccountInvoiceLineImporter, self).setUp()
self.Unit = account_invoice_line.AccountInvoiceLineImporter
self.unit = self.Unit(self.mock_env)
self.unit.carepoint_record = self.record
def test_import_dependencies(self):
""" It should import all depedencies """
with mock.patch.object(self.unit, '_import_dependency') as mk:
self.unit._import_dependencies()
mk.assert_has_calls([
mock.call(
self.record['rxdisp_id'],
'carepoint.procurement.order',
),
])
def test_after_import_get_binder_procurement(self):
""" It should get binder for record type """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.binder_for.assert_called_once_with(
'carepoint.procurement.order'
)
def test_after_import_to_odoo_procurement(self):
""" It should get Odoo record for binding """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for().to_odoo.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.binder_for().to_odoo.assert_called_once_with(
self.record['rxdisp_id'], browse=True,
)
def test_after_import_get_binder_sale(self):
""" It should get binder for record type """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for.side_effect = [mock.MagicMock(),
EndTestException]
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.binder_for.assert_called_with(
'carepoint.sale.order'
)
def test_after_import_to_backend_sale(self):
""" It should get backend record for binding """
proc = mock.MagicMock()
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for().to_odoo.return_value = proc
self.unit.binder_for().to_backend.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.binder_for().to_backend.assert_called_with(
proc.sale_line_id.order_id.id,
)
def test_after_import_gets_proc_unit(self):
""" It should get unit for model """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT
):
self.unit.unit_for.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.unit_for.assert_called_with(
account_invoice_line.ProcurementOrderUnit,
model='carepoint.procurement.order',
)
def test_after_import_gets_order_line_cnt(self):
""" It should get count of order lines for sale """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT
):
self.unit.unit_for()._get_order_line_count.side_effect = \
EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.unit_for()._get_order_line_count.assert_called_with(
self.unit.binder_for().to_backend()
)
def test_after_import_gets_ref_for_cp_state(self):
""" It should get reference for carepoint state record """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT,
session=mock.DEFAULT, _get_binding=mock.DEFAULT,
):
invoice_id = self.unit._get_binding().invoice_id
self.unit.unit_for()._get_order_line_count.return_value = 1
invoice_id.invoice_line_ids = [True]
self.unit.env.ref.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.env.ref.assert_called_with(
'connector_carepoint.state_%d' % (
self.unit.binder_for().to_odoo().sale_line_id.
order_id.carepoint_order_state_cn
)
)
def test_after_import_invoice_write_new_state(self):
""" It should write to invoice new states provided by remote system """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT,
session=mock.DEFAULT, _get_binding=mock.DEFAULT,
):
invoice_id = self.unit._get_binding().invoice_id
self.unit.unit_for()._get_order_line_count.return_value = 1
invoice_id.invoice_line_ids = [True]
invoice_id.write.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
invoice_id.write.assert_called_once_with({
'state': self.unit.env.ref().invoice_state,
})
def test_after_import_invoice_create_moves(self):
""" It should create accounting moves for newly paid invoices """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT,
session=mock.DEFAULT, _get_binding=mock.DEFAULT,
):
invoice_id = self.unit._get_binding().invoice_id
self.unit.unit_for()._get_order_line_count.return_value = 1
invoice_id.invoice_line_ids = [True]
self.unit.env.ref().invoice_state = 'paid'
invoice_id.action_move_create.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
def test_after_import_invoice_validate(self):
""" It should validate newly paid invoices """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT,
session=mock.DEFAULT, _get_binding=mock.DEFAULT,
):
invoice_id = self.unit._get_binding().invoice_id
self.unit.unit_for()._get_order_line_count.return_value = 1
invoice_id.invoice_line_ids = [True]
self.unit.env.ref().invoice_state = 'paid'
invoice_id.invoice_validate.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
def test_after_import_invoice_validate(self):
""" It should pay and reconcile invoices when residual on invoice """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT,
session=mock.DEFAULT, _get_binding=mock.DEFAULT,
):
invoice_id = self.unit._get_binding().invoice_id
invoice_id.residual = 1
self.unit.unit_for()._get_order_line_count.return_value = 1
invoice_id.invoice_line_ids = [True]
self.unit.env.ref().invoice_state = 'paid'
invoice_id.pay_and_reconcile.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
invoice_id.pay_and_reconcile.assert_called_once_with(
self.unit.backend_record.default_payment_journal,
date=invoice_id.date_invoice,
)
| laslabs/odoo-connector-carepoint | connector_carepoint/tests/models/test_account_invoice_line.py | Python | agpl-3.0 | 19,528 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import re
import os
from random import randint
from collections import OrderedDict
from datetime import datetime
from six.moves.urllib.parse import urlparse
import json
from operator import itemgetter, attrgetter
import pkg_resources
from tg import tmpl_context as c, app_globals as g, response
from tg import request
from paste.deploy.converters import asbool, aslist
from tg import expose, redirect, flash, validate, config, jsonify
from tg.decorators import with_trailing_slash, without_trailing_slash
from webob import exc
from bson import ObjectId
from ming.orm.ormsession import ThreadLocalORMSession
from ming.odm import session
import PIL
from allura.app import Application, DefaultAdminController, SitemapEntry
from allura.lib import helpers as h
from allura import version
from allura import model as M
from allura.lib.security import has_access, require_access, is_site_admin
from allura.lib.widgets import form_fields as ffw
from allura.lib import exceptions as forge_exc
from allura.lib import plugin
from allura.controllers import BaseController
from allura.lib.decorators import require_post
from allura.tasks import export_tasks
from allura.lib.widgets.project_list import ProjectScreenshots
from . import widgets as aw
import six
log = logging.getLogger(__name__)
class W:
label_edit = ffw.LabelEdit()
group_card = aw.GroupCard()
permission_card = aw.PermissionCard()
new_group_settings = aw.NewGroupSettings()
screenshot_admin = aw.ScreenshotAdmin()
screenshot_list = ProjectScreenshots(draggable=True)
metadata_admin = aw.MetadataAdmin()
audit = aw.AuditLog()
page_list = ffw.PageList()
class AdminApp(Application):
'''This is the admin app. It is pretty much required for
a functioning allura project.
'''
__version__ = version.__version__
_installable_tools = None
max_instances = 0
tool_label = 'admin'
icons = {
24: 'images/admin_24.png',
32: 'images/admin_32.png',
48: 'images/admin_48.png'
}
exportable = True
has_notifications = False
def __init__(self, project, config):
Application.__init__(self, project, config)
self.root = ProjectAdminController()
self.api_root = ProjectAdminRestController()
self.admin = AdminAppAdminController(self)
self.templates = pkg_resources.resource_filename(
'allura.ext.admin', 'templates')
self.sitemap = [SitemapEntry('Admin', '.')]
def is_visible_to(self, user):
'''Whether the user can view the app.'''
return has_access(c.project, 'create')(user=user)
@staticmethod
def installable_tools_for(project):
tools = []
for name, App in g.entry_points['tool'].items():
cfg = M.AppConfig(project_id=project._id, tool_name=name)
if App._installable(name, project.neighborhood, project.app_configs):
tools.append(dict(name=name, app=App))
# prevent from saving temporary config to db
session(cfg).expunge(cfg)
tools.sort(key=lambda t: (t['app'].status_int(), t['app'].ordinal or 0))
return [t for t in tools
if t['app'].status in project.allowed_tool_status]
@staticmethod
def exportable_tools_for(project):
tools = []
for tool in project.app_configs:
if project.app_instance(tool).exportable:
tools.append(tool)
return sorted(tools, key=lambda t: t.options.mount_point)
def main_menu(self):
'''Apps should provide their entries to be added to the main nav
:return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>`
'''
return [SitemapEntry('Admin', '.')]
@h.exceptionless([], log)
def sidebar_menu(self):
links = []
admin_url = c.project.url() + 'admin/'
if c.project.is_nbhd_project:
links.append(SitemapEntry('Add Project', c.project.url()
+ 'add_project', ui_icon=g.icons['add']))
nbhd_admin_url = c.project.neighborhood.url() + '_admin/'
links = links + [
SitemapEntry('Neighborhood'),
SitemapEntry('Overview', nbhd_admin_url + 'overview'),
SitemapEntry('Awards', nbhd_admin_url + 'accolades')]
else:
links += [
SitemapEntry('Welcome', admin_url),
SitemapEntry('Metadata', admin_url + 'overview', className="admin-nav-metadata"),
]
if c.project.neighborhood.name != "Users":
links += [
SitemapEntry('Screenshots', admin_url + 'screenshots'),
SitemapEntry('Categorization', admin_url + 'trove')
]
if plugin.ProjectRegistrationProvider.get().registration_date(c.project) < datetime(2016, 6, 1):
# only show transitional Tools page to older projects that may be used to it
# no point is showing it to new projects
links.append(SitemapEntry('Tools', admin_url + 'tools_moved'))
if asbool(config.get('bulk_export_enabled', True)):
links.append(SitemapEntry('Export', admin_url + 'export'))
if c.project.is_root and has_access(c.project, 'admin')():
links.append(
SitemapEntry('User Permissions', admin_url + 'groups/', className="admin-nav-user-perms"))
if not c.project.is_root and has_access(c.project, 'admin')():
links.append(
SitemapEntry('Permissions', admin_url + 'permissions/'))
if len(c.project.neighborhood_invitations):
links.append(
SitemapEntry('Invitation(s)', admin_url + 'invitations'))
links.append(SitemapEntry('Audit Trail', admin_url + 'audit/'))
if c.project.is_nbhd_project:
links.append(SitemapEntry('Statistics', nbhd_admin_url + 'stats/'))
links.append(None)
links.append(SitemapEntry('Help', nbhd_admin_url + 'help/'))
for ep_name in sorted(g.entry_points['admin'].keys()):
admin_extension = g.entry_points['admin'][ep_name]
admin_extension().update_project_sidebar_menu(links)
return links
def admin_menu(self):
return []
def install(self, project):
pass
def bulk_export(self, f, export_path='', with_attachments=False):
json.dump(self.project, f, cls=jsonify.JSONEncoder, indent=2)
class AdminExtensionLookup:
@expose()
def _lookup(self, name, *remainder):
for ep_name in sorted(g.entry_points['admin'].keys()):
admin_extension = g.entry_points['admin'][ep_name]
controller = admin_extension().project_admin_controllers.get(name)
if controller:
return controller(), remainder
raise exc.HTTPNotFound(name)
class ProjectAdminController(BaseController):
def _check_security(self):
require_access(c.project, 'admin')
def __init__(self):
self.permissions = PermissionsController()
self.groups = GroupsController()
self.audit = AuditController()
self.ext = AdminExtensionLookup()
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_admin.html')
def index(self, **kw):
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_invitations.html')
def invitations(self):
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_overview.html')
def overview(self, **kw):
c.metadata_admin = W.metadata_admin
# need this because features field expects data in specific format
metadata_admin_value = h.fixed_attrs_proxy(
c.project,
features=[{'feature': f} for f in c.project.features])
allow_project_delete = asbool(config.get('allow_project_delete', True))
return dict(allow_project_delete=allow_project_delete,
metadata_admin_value=metadata_admin_value,
)
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_screenshots.html')
def screenshots(self, **kw):
c.screenshot_admin = W.screenshot_admin
c.screenshot_list = W.screenshot_list
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_trove.html')
def trove(self):
c.label_edit = W.label_edit
base_troves_by_name = {t.shortname: t
for t in M.TroveCategory.query.find(dict(trove_parent_id=0))}
first_troves = aslist(config.get('trovecategories.admin.order', 'topic,license,os'), ',')
base_troves = [
base_troves_by_name.pop(t) for t in first_troves
] + sorted(list(base_troves_by_name.values()), key=attrgetter('fullname'))
trove_recommendations = {}
for trove in base_troves:
config_name = f'trovecategories.admin.recommended.{trove.shortname}'
recommendation_pairs = aslist(config.get(config_name, []), ',')
trove_recommendations[trove.shortname] = OrderedDict()
for pair in recommendation_pairs:
trove_id, label = pair.split('=')
trove_recommendations[trove.shortname][trove_id] = label
return dict(base_troves=base_troves,
trove_recommendations=trove_recommendations)
@expose('jinja:allura.ext.admin:templates/project_tools_moved.html')
def tools_moved(self, **kw):
return {}
@expose()
@require_post()
def update_labels(self, labels=None, **kw):
require_access(c.project, 'admin')
c.project.labels = labels.split(',')
M.AuditLog.log('updated labels')
redirect('trove')
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_install_tool.html')
def install_tool(self, tool_name=None, **kw):
if tool_name == 'subproject':
tool = {
'tool_label': 'Sub Project',
'default_mount_label': 'SubProject',
'default_mount_point': 'subproject'
}
options = []
else:
tool = g.entry_points['tool'][tool_name]
options = tool.options_on_install()
return dict(
tool_name=tool_name,
tool=tool,
options=options,
existing_mount_points=c.project.mount_points()
)
@expose()
def _lookup(self, name, *remainder):
app = c.project.app_instance(name)
if app is None:
raise exc.HTTPNotFound(name)
return app.admin, remainder
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_permissions.html')
def groups(self, **kw):
return dict()
@expose()
@require_post()
@validate(W.metadata_admin, error_handler=overview)
@h.vardec
def update(self, name=None,
short_description=None,
summary='',
icon=None,
category=None,
external_homepage='',
video_url='',
support_page='',
support_page_url='',
twitter_handle='',
facebook_page='',
removal='',
moved_to_url='',
tracking_id='',
features=None,
**kw):
require_access(c.project, 'update')
flash_status = 'success'
flash_message = 'Form values saved'
if removal != c.project.removal:
M.AuditLog.log('change project removal status to %s', removal)
c.project.removal = removal
c.project.removal_changed_date = datetime.utcnow()
if 'delete_icon' in kw:
M.ProjectFile.query.remove(dict(project_id=c.project._id, category=re.compile(r'^icon')))
c.project.set_tool_data('allura', icon_original_size=None, icon_sha256=None)
M.AuditLog.log('remove project icon')
g.post_event('project_updated')
redirect('overview')
elif 'delete' in kw:
allow_project_delete = asbool(
config.get('allow_project_delete', True))
if allow_project_delete or not c.project.is_root:
M.AuditLog.log('delete project')
plugin.ProjectRegistrationProvider.get().delete_project(
c.project, c.user)
redirect('overview')
elif 'undelete' in kw:
M.AuditLog.log('undelete project')
plugin.ProjectRegistrationProvider.get().undelete_project(
c.project, c.user)
redirect('overview')
if name and name != c.project.name:
M.AuditLog.log('change project name to %s', name)
c.project.name = name
if short_description != c.project.short_description:
M.AuditLog.log('change short description to %s', short_description)
c.project.short_description = short_description
if summary != c.project.summary:
M.AuditLog.log('change summary to %s', summary)
c.project.summary = summary
category = category and ObjectId(category) or None
if category != c.project.category_id:
M.AuditLog.log('change category to %s', category)
c.project.category_id = category
if external_homepage != c.project.external_homepage:
M.AuditLog.log('change external home page to %s',
external_homepage)
c.project.external_homepage = external_homepage
if video_url != c.project.video_url:
M.AuditLog.log('change video url to %s', video_url)
c.project.video_url = video_url
if support_page != c.project.support_page:
M.AuditLog.log('change project support page to %s', support_page)
c.project.support_page = support_page
old_twitter = c.project.social_account('Twitter')
if not old_twitter or twitter_handle != old_twitter.accounturl:
M.AuditLog.log('change project twitter handle to %s',
twitter_handle)
c.project.set_social_account('Twitter', twitter_handle)
old_facebook = c.project.social_account('Facebook')
if not old_facebook or facebook_page != old_facebook.accounturl:
if not facebook_page or 'facebook.com' in urlparse(facebook_page).netloc:
M.AuditLog.log(
'change project facebook page to %s', facebook_page)
c.project.set_social_account('Facebook', facebook_page)
if support_page_url != c.project.support_page_url:
M.AuditLog.log('change project support page url to %s',
support_page_url)
c.project.support_page_url = support_page_url
if moved_to_url != c.project.moved_to_url:
M.AuditLog.log('change project moved to url to %s', moved_to_url)
c.project.moved_to_url = moved_to_url
if tracking_id != c.project.tracking_id:
M.AuditLog.log('change project tracking ID to %s', tracking_id)
c.project.tracking_id = tracking_id
features = [f['feature'].strip() for f in features or []
if f.get('feature', '').strip()]
if features != c.project.features:
M.AuditLog.log('change project features to %s', features)
c.project.features = features
if icon is not None and icon != b'':
if c.project.icon:
M.ProjectFile.query.remove(dict(project_id=c.project._id, category=re.compile(r'^icon')))
save_icon = c.project.save_icon(icon.filename, icon.file, content_type=icon.type)
if not save_icon:
M.AuditLog.log('could not update project icon')
flash_message = f'{flash_message}, but image upload failed'
flash_status = 'warning'
else:
M.AuditLog.log('update project icon')
g.post_event('project_updated')
flash(flash_message, flash_status)
redirect('overview')
def _add_trove(self, type, new_trove):
current_troves = getattr(c.project, 'trove_%s' % type)
trove_obj = M.TroveCategory.query.get(trove_cat_id=int(new_trove))
error_msg = None
if type in ['license', 'audience', 'developmentstatus', 'language'] and len(current_troves) >= 6:
error_msg = 'You may not have more than 6 of this category.'
elif type in ['topic'] and len(current_troves) >= 3:
error_msg = 'You may not have more than 3 of this category.'
elif trove_obj is not None:
if trove_obj._id not in current_troves:
current_troves.append(trove_obj._id)
M.AuditLog.log('add trove %s: %s', type, trove_obj.fullpath)
# just in case the event handling is super fast
ThreadLocalORMSession.flush_all()
c.project.last_updated = datetime.utcnow()
g.post_event('project_updated')
else:
error_msg = 'This category has already been assigned to the project.'
return (trove_obj, error_msg)
@expose('json:')
@require_post()
def add_trove_js(self, type, new_trove, **kw):
require_access(c.project, 'update')
trove_obj, error_msg = self._add_trove(type, new_trove)
return dict(trove_full_path=trove_obj.fullpath_within_type, trove_cat_id=trove_obj.trove_cat_id, error_msg=error_msg)
@expose()
@require_post()
def add_trove(self, type, new_trove, **kw):
require_access(c.project, 'update')
trove_obj, error_msg = self._add_trove(type, new_trove)
if error_msg:
flash(error_msg, 'error')
redirect('trove')
@expose()
@require_post()
def delete_trove(self, type, trove, **kw):
require_access(c.project, 'update')
trove_obj = M.TroveCategory.query.get(trove_cat_id=int(trove))
current_troves = getattr(c.project, 'trove_%s' % type)
if trove_obj is not None and trove_obj._id in current_troves:
M.AuditLog.log('remove trove %s: %s', type, trove_obj.fullpath)
current_troves.remove(trove_obj._id)
# just in case the event handling is super fast
ThreadLocalORMSession.flush_all()
c.project.last_updated = datetime.utcnow()
g.post_event('project_updated')
redirect('trove')
@expose()
@require_post()
@validate(W.screenshot_admin)
def add_screenshot(self, screenshot=None, caption=None, **kw):
require_access(c.project, 'update')
screenshots = c.project.get_screenshots()
if len(screenshots) >= 6:
flash('You may not have more than 6 screenshots per project.',
'error')
elif screenshot is not None and screenshot != '':
future_bmp = False
e_filename, e_fileext = os.path.splitext(screenshot.filename)
for screen in screenshots:
c_filename, c_fileext = os.path.splitext(screen.filename)
if c_fileext == '.png' and e_fileext.lower() == '.bmp' and e_filename == c_filename:
future_bmp = True
# If both filename(without ext.) equals and exiting file ext. is png and given file ext is bmp, there will be two similar png files.
if screen.filename == screenshot.filename or future_bmp:
screenshot.filename = re.sub(r'(.*)\.(.*)', r'\1-' + str(randint(1000,9999)) + r'.\2', screenshot.filename)
# if filename already exists append a random number
break
M.AuditLog.log('add screenshot')
sort = 1 + max([ss.sort or 0 for ss in screenshots] or [0])
M.ProjectFile.save_image(
screenshot.filename, screenshot.file, content_type=screenshot.type,
save_original=True,
original_meta=dict(
project_id=c.project._id,
category='screenshot',
caption=caption,
sort=sort),
square=True, thumbnail_size=(150, 150),
thumbnail_meta=dict(project_id=c.project._id, category='screenshot_thumb'), convert_bmp=True)
g.post_event('project_updated')
redirect('screenshots')
@expose()
@require_post()
def sort_screenshots(self, **kw):
"""Sort project screenshots.
Called via ajax when screenshots are reordered via drag/drop on
the Screenshots admin page.
``kw`` is a mapping of (screenshot._id, sort_order) pairs.
"""
for s in c.project.get_screenshots():
if str(s._id) in kw:
s.sort = int(kw[str(s._id)])
g.post_event('project_updated')
@expose()
@require_post()
def delete_screenshot(self, id=None, **kw):
require_access(c.project, 'update')
if id is not None and id != '':
M.AuditLog.log('remove screenshot')
M.ProjectFile.query.remove(
dict(project_id=c.project._id, _id=ObjectId(id)))
g.post_event('project_updated')
redirect('screenshots')
@expose()
@require_post()
def edit_screenshot(self, id=None, caption=None, **kw):
require_access(c.project, 'update')
if id is not None and id != '':
M.ProjectFile.query.get(
project_id=c.project._id, _id=ObjectId(id)).caption = caption
g.post_event('project_updated')
redirect('screenshots')
@expose()
@require_post()
def join_neighborhood(self, nid):
require_access(c.project, 'admin')
if not nid:
n = M.Neighborhood.query.get(name='Projects')
c.project.neighborhood_id = n._id
flash('Joined %s' % n.name)
redirect(c.project.url() + 'admin/')
nid = ObjectId(str(nid))
if nid not in c.project.neighborhood_invitations:
flash('No invitation to that neighborhood', 'error')
redirect('.')
c.project.neighborhood_id = nid
n = M.Neighborhood.query.get(_id=nid)
flash('Joined %s' % n.name)
redirect('invitations')
def _update_mounts(self, subproject=None, tool=None, new=None, **kw):
'''
Returns the new App or Subproject, if one was installed.
Returns None otherwise.
'''
if subproject is None:
subproject = []
if tool is None:
tool = []
new_app = None
for sp in subproject:
p = M.Project.query.get(shortname=sp['shortname'],
neighborhood_id=c.project.neighborhood_id)
if sp.get('delete'):
require_access(c.project, 'admin')
M.AuditLog.log('delete subproject %s', sp['shortname'])
p.removal = 'deleted'
plugin.ProjectRegistrationProvider.get().delete_project(
p, c.user)
elif not new:
M.AuditLog.log('update subproject %s', sp['shortname'])
p.name = sp['name']
p.ordinal = int(sp['ordinal'])
for p in tool:
if p.get('delete'):
require_access(c.project, 'admin')
M.AuditLog.log('uninstall tool %s', p['mount_point'])
c.project.uninstall_app(p['mount_point'])
elif not new:
M.AuditLog.log('update tool %s', p['mount_point'])
options = c.project.app_config(p['mount_point']).options
options.mount_label = p['mount_label']
options.ordinal = int(p['ordinal'])
if new and new.get('install'):
ep_name = new.get('ep_name', None)
if not ep_name:
require_access(c.project, 'create')
mount_point = new['mount_point'].lower() or h.nonce()
M.AuditLog.log('create subproject %s', mount_point)
sp = c.project.new_subproject(mount_point)
sp.name = new['mount_label']
if 'ordinal' in new:
sp.ordinal = int(new['ordinal'])
else:
sp.ordinal = c.project.last_ordinal_value() + 1
new_app = sp
else:
require_access(c.project, 'admin')
installable_tools = AdminApp.installable_tools_for(c.project)
if not ep_name.lower() in [t['name'].lower() for t in installable_tools]:
flash('Installation limit exceeded.', 'error')
return
mount_point = new['mount_point'] or ep_name
M.AuditLog.log('install tool %s', mount_point)
App = g.entry_points['tool'][ep_name]
# pass only options which app expects
config_on_install = {
k: v for (k, v) in kw.items()
if k in [o.name for o in App.options_on_install()]
}
new_app = c.project.install_app(
ep_name,
mount_point,
mount_label=new['mount_label'],
ordinal=int(new['ordinal']) if 'ordinal' in new else None,
**config_on_install)
g.post_event('project_updated')
g.post_event('project_menu_updated')
return new_app
@h.vardec
@expose()
@require_post()
def update_mounts(self, subproject=None, tool=None, new=None, page=0, limit=200, **kw):
if new and new['ep_name'] == 'subproject':
new['ep_name'] = ""
try:
new_app = self._update_mounts(subproject, tool, new, **kw)
if new_app:
if getattr(new_app, 'tool_label', '') == 'External Link':
flash(f'{new_app.tool_label} installed successfully.')
else:
new_url = new_app.url
if callable(new_url): # subprojects have a method instead of property
new_url = new_url()
redirect(new_url)
except forge_exc.ForgeError as exc:
flash(f'{exc.__class__.__name__}: {exc.args[0]}',
'error')
if request.referer is not None and tool is not None and 'delete' in tool[0] and \
re.search(c.project.url() + r'(admin\/|)' + tool[0]['mount_point']+ r'\/*',
six.ensure_text(request.referer)):
# Redirect to root when deleting currect module
redirect('../')
redirect(six.ensure_text(request.referer or '/'))
@expose('jinja:allura.ext.admin:templates/export.html')
def export(self, tools=None, with_attachments=False):
if not asbool(config.get('bulk_export_enabled', True)):
raise exc.HTTPNotFound()
if request.method == 'POST':
try:
ProjectAdminRestController().export(tools, send_email=True, with_attachments=with_attachments)
except (exc.HTTPBadRequest, exc.HTTPServiceUnavailable) as e:
flash(str(e), 'error')
redirect('.')
else:
flash(
'Export scheduled. You will recieve an email with download instructions when complete.', 'ok')
redirect('export')
exportable_tools = AdminApp.exportable_tools_for(c.project)
apps_id = [tool._id for tool in exportable_tools]
db = M.session.project_doc_session.db
files_id = db.attachment.find({"app_config_id": {"$in": apps_id}}).distinct("file_id")
try:
total_size = list(db.attachment.files.aggregate([
{
"$match": {"_id": {"$in": files_id}}
},
{
"$group": {"_id": "total", "total_size": {"$sum": "$length"}}
},
{
"$project": {"_id": 0, "total_size": {"$divide": ["$total_size", 1000000]}}
}
], cursor={}))[0].get('total_size')
except IndexError:
total_size = 0
return {
'tools': exportable_tools,
'status': c.project.bulk_export_status(),
'total_size': round(total_size, 3)
}
class ProjectAdminRestController(BaseController):
"""
Exposes RESTful API for project admin actions.
"""
def _check_security(self):
require_access(c.project, 'admin')
@expose('json:')
@require_post()
def mount_order(self, **kw):
if not kw:
raise exc.HTTPBadRequest('Expected kw params in the form of "ordinal: mount_point"')
try:
sorted_tools = sorted(list(kw.items()), key=lambda x: int(x[0]))
except ValueError:
raise exc.HTTPBadRequest('Invalid kw: expected "ordinal: mount_point"')
for ordinal, mount_point in sorted_tools:
try:
c.project.app_config(mount_point).options.ordinal = int(ordinal)
except AttributeError as e:
# Handle sub project
p = M.Project.query.get(shortname=f"{c.project.shortname}/{mount_point}",
neighborhood_id=c.project.neighborhood_id)
if p:
p.ordinal = int(ordinal)
M.AuditLog.log('Updated tool order')
g.post_event('project_menu_updated')
return {'status': 'ok'}
@expose('json:')
@require_post()
def configure_tool_grouping(self, grouping_threshold='1', **kw):
try:
grouping_threshold = int(grouping_threshold)
if grouping_threshold < 1 or grouping_threshold > 10:
raise exc.HTTPBadRequest('Invalid threshold. Expected a value between 1 and 10')
c.project.set_tool_data(
'allura', grouping_threshold=grouping_threshold)
except ValueError:
raise exc.HTTPBadRequest('Invalid threshold. Expected a value between 1 and 10')
M.AuditLog.log('Updated tool grouping threshold')
g.post_event('project_menu_updated')
return {'status': 'ok'}
@expose('json:')
def installable_tools(self, **kw):
""" List of installable tools and their default options.
"""
tools = []
for tool in AdminApp.installable_tools_for(c.project):
tools.append({
'name': tool['name'],
'description': " ".join(tool['app'].tool_description.split()),
'icons': tool['app'].icons,
'tool_label': tool['app'].tool_label,
'defaults': {
'default_options': tool['app'].default_options(),
'default_mount_label': tool['app'].default_mount_label,
'default_mount_point': tool['app'].admin_menu_delete_button,
}
})
if c.project.is_root:
# subprojects only allowed on top-level projects (no nesting)
tools.append({
'name': 'subproject',
'description': "With a Sub Project you can add an entire project just like any other tool.",
'tool_label': 'Sub Project',
'defaults': {
'default_mount_label': 'Sub',
'default_mount_point': 'sub',
}
})
return {'tools': tools}
@expose('json:')
@require_post()
def export(self, tools=None, send_email=False, with_attachments=False, **kw):
"""
Initiate a bulk export of the project data.
Must be given a list of tool mount points to include in the export.
The list can either be comma-separated or a repeated param, e.g.,
`export?tools=tickets&tools=discussion`.
If the tools are not provided, an invalid mount point is listed, or
there is some other problems with the arguments, a `400 Bad Request`
response will be returned.
If an export is already currently running for this project, a
`503 Unavailable` response will be returned.
Otherwise, a JSON object of the form
`{"status": "in progress", "filename": FILENAME}` will be returned,
where `FILENAME` is the filename of the export artifact relative to
the users shell account directory.
"""
if not asbool(config.get('bulk_export_enabled', True)):
raise exc.HTTPNotFound()
if not tools:
raise exc.HTTPBadRequest(
'Must give at least one tool mount point to export')
tools = aslist(tools, ',')
exportable_tools = AdminApp.exportable_tools_for(c.project)
allowed = {t.options.mount_point for t in exportable_tools}
if not set(tools).issubset(allowed):
raise exc.HTTPBadRequest('Invalid tool')
if c.project.bulk_export_status() == 'busy':
raise exc.HTTPServiceUnavailable(
'Export for project %s already running' % c.project.shortname)
# filename (potentially) includes a timestamp, so we have
# to pre-generate to be able to return it to the user
filename = c.project.bulk_export_filename()
export_tasks.bulk_export.post(tools, filename, send_email=send_email, with_attachments=with_attachments)
return {
'status': 'in progress',
'filename': filename,
}
@expose('json:')
def admin_options(self, mount_point=None, **kw):
"""
Returns the admin options for a given mount_point
:type mount_point: str|allura.model.project.AppConfig
"""
if not mount_point:
raise exc.HTTPBadRequest('Must provide a mount point')
tool = c.project.app_instance(mount_point)
if tool is None:
raise exc.HTTPBadRequest('The mount point you provided was invalid')
admin_menu = tool.admin_menu()
if tool.admin_menu_delete_button:
admin_menu.append(tool.admin_menu_delete_button)
return {
'options': [dict(text=m.label, href=m.url, className=m.className)
for m in admin_menu]
}
@expose('json:')
def export_status(self, **kw):
"""
Check the status of a bulk export.
Returns an object containing only one key, `status`, whose value is
either `'busy'` or `'ready'`.
"""
status = c.project.bulk_export_status()
return {'status': status or 'ready'}
@expose('json:')
@require_post()
def install_tool(self, tool=None, mount_point=None, mount_label=None, order=None, **kw):
"""API for installing tools in current project.
Requires a valid tool, mount point and mount label names.
(All arguments are required.)
Usage example::
POST to:
/rest/p/testproject/admin/install_tool/
with params:
{
'tool': 'tickets',
'mount_point': 'mountpoint',
'mount_label': 'mountlabel',
'order': 'first|last|alpha_tool'
}
Example output (in successful case)::
{
"info": "Tool tickets with mount_point mountpoint and mount_label mountlabel was created.",
"success": true
}
"""
controller = ProjectAdminController()
if not tool or not mount_point or not mount_label:
return {
'success': False,
'info': 'All arguments required.'
}
installable_tools = AdminApp.installable_tools_for(c.project)
tools_names = [t['name'] for t in installable_tools]
if not (tool in tools_names):
return {
'success': False,
'info': 'Incorrect tool name, or limit is reached.'
}
if c.project.app_instance(mount_point) is not None:
return {
'success': False,
'info': 'Mount point already exists.',
}
if order is None:
order = 'last'
mounts = [{'ordinal': ac.options.ordinal,
'label': ac.options.mount_label,
'mount': ac.options.mount_point,
'type': ac.tool_name.lower()}
for ac in c.project.app_configs]
subs = {p.shortname: p for p in M.Project.query.find({'parent_id': c.project._id})}
for sub in subs.values():
mounts.append({'ordinal': sub.ordinal,
'mount': sub.shortname,
'type': 'sub-project'})
mounts.sort(key=itemgetter('ordinal'))
if order == 'first':
ordinal = 0
elif order == 'last':
ordinal = len(mounts)
elif order == 'alpha_tool':
tool = tool.lower()
for i, mount in enumerate(mounts):
if mount['type'] == tool and mount['label'] > mount_label:
ordinal = i
break
else:
ordinal = len(mounts)
mounts.insert(ordinal, {'ordinal': ordinal, 'type': 'new'})
for i, mount in enumerate(mounts):
if mount['type'] == 'new':
pass
elif mount['type'] == 'sub-project':
subs[mount['mount']].ordinal = i
else:
c.project.app_config(mount['mount']).options.ordinal = i
data = {
'install': 'install',
'ep_name': tool,
'ordinal': ordinal,
'mount_point': mount_point,
'mount_label': mount_label
}
params = {
'new': data
}
if kw:
params.update(**kw)
try:
controller._update_mounts(**params)
except forge_exc.ForgeError as e:
return {
'success': False,
'info': str(e),
}
return {
'success': True,
'info': 'Tool %s with mount_point %s and mount_label %s was created.'
% (tool, mount_point, mount_label)
}
@expose()
def _lookup(self, *args):
if len(args) == 0:
raise exc.HTTPNotFound(args)
name, remainder = args[0], args[1:]
app = c.project.app_instance(name)
if app is None or app.admin_api_root is None:
raise exc.HTTPNotFound(name)
return app.admin_api_root, remainder
class PermissionsController(BaseController):
def _check_security(self):
# Do not allow access to 'permissions' page for root projects.
# Users should use 'groups' instead. This is to prevent creating 'private' projects
# - subprojects are still allowed.
# - tools pages are also still allowed, but are in a different controller
if c.project.is_root:
redirect('../groups')
require_access(c.project, 'admin')
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_permissions.html')
def index(self, **kw):
c.card = W.permission_card
return dict(permissions=self._index_permissions())
@without_trailing_slash
@expose()
@h.vardec
@require_post()
def update(self, card=None, **kw):
permissions = self._index_permissions()
old_permissions = dict(permissions)
for args in card:
perm = args['id']
new_group_ids = args.get('new', [])
group_ids = args.get('value', [])
if isinstance(new_group_ids, str):
new_group_ids = [new_group_ids]
if isinstance(group_ids, str):
group_ids = [group_ids]
# make sure the admin group has the admin permission
if perm == 'admin':
if c.project.is_root:
pid = c.project._id
else:
pid = c.project.parent_id
admin_group_id = str(
M.ProjectRole.query.get(project_id=pid, name='Admin')._id)
if admin_group_id not in group_ids + new_group_ids:
flash(
'You cannot remove the admin group from the admin permission.', 'warning')
group_ids.append(admin_group_id)
permissions[perm] = []
role_ids = list(map(ObjectId, group_ids + new_group_ids))
permissions[perm] = role_ids
c.project.acl = []
for perm, role_ids in permissions.items():
role_names = lambda ids: ','.join(sorted(
pr.name for pr in M.ProjectRole.query.find(dict(_id={'$in': ids}))))
old_role_ids = old_permissions.get(perm, [])
if old_role_ids != role_ids:
M.AuditLog.log('updated "%s" permissions: "%s" => "%s"',
perm, role_names(old_role_ids), role_names(role_ids))
c.project.acl += [M.ACE.allow(rid, perm) for rid in role_ids]
g.post_event('project_updated')
redirect('.')
def _index_permissions(self):
permissions = {
p: [] for p in c.project.permissions}
for ace in c.project.acl:
if ace.access == M.ACE.ALLOW:
permissions[ace.permission].append(ace.role_id)
return permissions
class GroupsController(BaseController):
def _check_security(self):
require_access(c.project, 'admin')
def _index_permissions(self):
permissions = {
p: [] for p in c.project.permissions}
for ace in c.project.acl:
if ace.access == M.ACE.ALLOW:
permissions[ace.permission].append(ace.role_id)
return permissions
def _map_group_permissions(self):
roles = c.project.named_roles
permissions = self._index_permissions()
permissions_by_role = dict()
auth_role = M.ProjectRole.authenticated()
anon_role = M.ProjectRole.anonymous()
for role in roles + [auth_role, anon_role]:
permissions_by_role[str(role._id)] = []
for perm in permissions:
perm_info = dict(has="no", text="Does not have permission %s" %
perm, name=perm)
role_ids = permissions[perm]
if role._id in role_ids:
perm_info['text'] = "Has permission %s" % perm
perm_info['has'] = "yes"
else:
for r in role.child_roles():
if r._id in role_ids:
perm_info['text'] = "Inherited permission {} from {}".format(
perm, r.name)
perm_info['has'] = "inherit"
break
if perm_info['has'] == "no":
if anon_role._id in role_ids:
perm_info[
'text'] = "Inherited permission %s from Anonymous" % perm
perm_info['has'] = "inherit"
elif auth_role._id in role_ids and role != anon_role:
perm_info[
'text'] = "Inherited permission %s from Authenticated" % perm
perm_info['has'] = "inherit"
permissions_by_role[str(role._id)].append(perm_info)
return permissions_by_role
@without_trailing_slash
@expose()
@require_post()
@h.vardec
def delete_group(self, group_name, **kw):
role = M.ProjectRole.by_name(group_name)
if not role:
flash('Group "%s" does not exist.' % group_name, 'error')
else:
role.delete()
M.AuditLog.log('delete group %s', group_name)
flash('Group "%s" deleted successfully.' % group_name)
g.post_event('project_updated')
redirect('.')
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_groups.html')
def index(self, **kw):
c.card = W.group_card
permissions_by_role = self._map_group_permissions()
auth_role = M.ProjectRole.authenticated()
anon_role = M.ProjectRole.anonymous()
roles = c.project.named_roles
roles.append(None)
return dict(roles=roles, permissions_by_role=permissions_by_role,
auth_role=auth_role, anon_role=anon_role)
@without_trailing_slash
@expose('json:')
@require_post()
@h.vardec
def change_perm(self, role_id, permission, allow="true", **kw):
if allow == "true":
M.AuditLog.log('granted permission %s to group %s', permission,
M.ProjectRole.query.get(_id=ObjectId(role_id)).name)
c.project.acl.append(M.ACE.allow(ObjectId(role_id), permission))
else:
admin_group_id = str(M.ProjectRole.by_name('Admin')._id)
if admin_group_id == role_id and permission == 'admin':
return dict(error='You cannot remove the admin permission from the admin group.')
M.AuditLog.log('revoked permission %s from group %s', permission,
M.ProjectRole.query.get(_id=ObjectId(role_id)).name)
c.project.acl.remove(M.ACE.allow(ObjectId(role_id), permission))
g.post_event('project_updated')
return self._map_group_permissions()
@without_trailing_slash
@expose('json:')
@require_post()
@h.vardec
def add_user(self, role_id, username, **kw):
if not username or username == '*anonymous':
return dict(error='You must choose a user to add.')
group = M.ProjectRole.query.get(_id=ObjectId(role_id))
user = M.User.query.get(username=username.strip(), pending=False)
if not group:
return dict(error='Could not find group with id %s' % role_id)
if not user:
return dict(error='User %s not found' % username)
user_role = M.ProjectRole.by_user(user, upsert=True)
if group._id in user_role.roles:
return dict(error=f'{user.display_name} ({username}) is already in the group {group.name}.')
M.AuditLog.log('add user %s to %s', username, group.name)
user_role.roles.append(group._id)
if group.name == 'Admin':
for ac in c.project.app_configs:
c.project.app_instance(ac).subscribe(user)
g.post_event('project_updated')
return dict(username=username, displayname=user.display_name)
@without_trailing_slash
@expose('json:')
@require_post()
@h.vardec
def remove_user(self, role_id, username, **kw):
group = M.ProjectRole.query.get(_id=ObjectId(role_id))
user = M.User.by_username(username.strip())
if group.name == 'Admin' and len(group.users_with_role()) == 1:
return dict(error='You must have at least one user with the Admin role.')
if not group:
return dict(error='Could not find group with id %s' % role_id)
if not user:
return dict(error='User %s not found' % username)
user_role = M.ProjectRole.by_user(user)
if not user_role or group._id not in user_role.roles:
return dict(error=f'{user.display_name} ({username}) is not in the group {group.name}.')
M.AuditLog.log('remove user %s from %s', username, group.name)
user_role.roles.remove(group._id)
if len(user_role.roles) == 0:
# user has no roles in this project any more, so don't leave a useless doc around
user_role.delete()
if group.name == 'Admin':
for ac in c.project.app_configs:
c.project.app_instance(ac).unsubscribe(user)
g.post_event('project_updated')
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_group.html')
def new(self):
c.form = W.new_group_settings
return dict(
group=None,
action="create")
@expose()
@require_post()
@validate(W.new_group_settings)
@h.vardec
def create(self, name=None, **kw):
if M.ProjectRole.by_name(name):
flash('%s already exists' % name, 'error')
else:
M.ProjectRole(project_id=c.project._id, name=name)
M.AuditLog.log('create group %s', name)
g.post_event('project_updated')
redirect('.')
class AuditController(BaseController):
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/audit.html')
def index(self, limit=25, page=0, **kwargs):
limit = int(limit)
page = int(page)
count = M.AuditLog.query.find(dict(project_id=c.project._id)).count()
q = M.AuditLog.query.find(dict(project_id=c.project._id))
q = q.sort('timestamp', -1)
q = q.skip(page * limit)
if count > limit:
q = q.limit(limit)
else:
limit = count
c.widget = W.audit
return dict(
entries=q.all(),
limit=limit,
page=page,
count=count)
class AdminAppAdminController(DefaultAdminController):
'''Administer the admin app'''
pass
| apache/allura | Allura/allura/ext/admin/admin_main.py | Python | apache-2.0 | 50,637 |
"""
1. Converting markdown files into jupyter notebooks
2. Remove filename headers, such as from P01-C01-xx.ipynb to xx.ipynb
"""
import notedown
import glob
import pkg_resources
import nbformat
import re
import shutil
import os
import time
import tarfile
from zipfile import ZipFile
# timeout in second to evaluate a notebook
timeout = 1000
# limit the number of lines in a cell output
max_output_length = 500
# the files will be ingored for execution
ignore_execution = ['install', 'aws']
def _replace_ext(fname, new_ext):
"""replace the file extension in a filename"""
parts = fname.split('.')
if len(parts) <= 1:
return fname
parts[-1] = new_ext
return '.'.join(parts)
def _get_new_fname(fname):
"""chapter01-something/haha.ipynb -> haha.ipynb"""
header_re = re.compile("(chapter[\d\-\w]+/)(.*)")
m = header_re.match(fname)
return m.groups()[1] if m else fname
def _has_output(notebook):
"""if a notebook contains output"""
for cell in notebook.cells:
if 'outputs' in cell and cell['outputs']:
return True
return False
def convert_md():
"""Find all markdown files, convert into jupyter notebooks
"""
converted_files = []
reader = notedown.MarkdownReader(match='strict')
files = glob.glob('*/*.md')
# evaluate the newest file first, so we can catchup error ealier
files.sort(key=os.path.getmtime, reverse=True)
do_eval = int(os.environ.get('DO_EVAL', True))
if do_eval:
do_eval = int(os.environ.get('EVAL', True))
if not do_eval:
print('=== Will skip evaluating notebooks')
for fname in files:
new_fname = _get_new_fname(fname)
# parse if each markdown file is actually a jupyter notebook
with open(fname, 'r') as fp:
data = fp.read()
valid = '```{.python .input' in data or '```python' in data
if not valid:
if new_fname != fname:
print('=== Rename %s -> %s' % (fname, new_fname))
shutil.copyfile(fname, new_fname)
converted_files.append((fname, new_fname))
continue
# read
with open(fname, 'r') as f:
notebook = reader.read(f)
if do_eval and not (_has_output(notebook) or
any([i in fname for i in ignore_execution])):
print('=== Evaluate %s with timeout %d sec'%(fname, timeout))
tic = time.time()
# update from ../data to data
for c in notebook.cells:
if c.get('cell_type', None) == 'code':
c['source'] = c['source'].replace(
'"../data', '"data').replace("'../data", "'data")
notedown.run(notebook, timeout)
print('=== Finished in %f sec'%(time.time()-tic))
# even that we will check it later, but do it ealier so we can see the
# error message before evaluating all notebooks
_check_notebook(notebook)
# write
# need to add language info to for syntax highlight
notebook['metadata'].update({'language_info':{'name':'python'}})
new_fname = _replace_ext(new_fname, 'ipynb')
print('=== Convert %s -> %s' % (fname, new_fname))
with open(new_fname, 'w') as f:
f.write(nbformat.writes(notebook))
converted_files.append((fname, new_fname))
return converted_files
def rename_ipynb():
"""renmae all ipynb files"""
renamed_files = []
for fname in glob.glob('*.ipynb'):
new_fname = _get_new_fname(fname)
if fname != new_fname:
print('=== Rename %s -> %s' % (fname, new_fname))
shutil.copyfile(fname, new_fname)
renamed_files.append((fname, new_fname))
return renamed_files
def update_links(app, docname, source):
"""Update all C01-P01-haha.md into haha.html"""
def _new_url(m):
assert len(m.groups()) == 1, m
url = m.groups()[0]
if url.startswith('./'):
url = url[2:]
if url.startswith('../'):
url = url[3:]
if _get_new_fname(url) != url:
url = _replace_ext(_get_new_fname(url), 'html')
return url
for i,j in enumerate(source):
if os.path.exists(docname+'.md') or os.path.exists(docname+'.ipynb'):
source[i] = re.sub('\]\(([\w/.-]*)\)',
lambda m : ']('+_new_url(m)+')', j)
elif os.path.exists(docname+'.rst'):
source[i] = re.sub('\<([\w/.-]*)\>`\_',
lambda m: '<'+_new_url(m)+'>`_', j)
def _check_notebook(notebook):
# TODO(mli) lint check
for cell in notebook.cells:
if 'outputs' in cell:
src = cell['source']
nlines = 0
try:
for o in cell['outputs']:
if 'text' in o:
nlines += len(o['text'].split('\n'))
assert 'traceback' not in o, '%s, %s'%(o['ename'], o['evalue'])
assert nlines < max_output_length, 'Too long cell output'
except AssertionError:
print('This cell\'s output contains error:\n')
print('-'*40)
print(src)
print('-'*40)
raise
def check_output(app, exception):
for fname in glob.glob('*.ipynb'):
print('=== Check '+fname)
with open(fname, 'r') as f:
nb = nbformat.read(f, as_version=4)
_check_notebook(nb)
def _release_notebook(dst_dir):
"""convert .md into notebooks and make a zip file"""
reader = notedown.MarkdownReader(match='strict')
files = glob.glob('*/*.md')
package_files = ['environment.yml', 'utils.py', 'README.md', 'LICENSE']
package_files.extend(glob.glob('img/*'))
package_files.extend(glob.glob('data/*'))
for fname in files:
# parse if each markdown file is actually a jupyter notebook
with open(fname, 'r') as fp:
valid = '```{.python .input' in fp.read()
if not valid:
package_files.append(fname)
continue
# read
with open(fname, 'r') as f:
notebook = reader.read(f)
# write
new_fname = _replace_ext(fname, 'ipynb')
with open(new_fname, 'w') as f:
f.write(nbformat.writes(notebook))
package_files.append(new_fname)
print('=== Packing ', package_files)
with ZipFile(os.path.join(dst_dir, 'gluon_tutorials_zh.zip'), 'w') as pkg:
for f in package_files:
pkg.write(f)
with tarfile.open(
os.path.join(dst_dir, 'gluon_tutorials_zh.tar.gz'), "w:gz") as tar:
for f in package_files:
tar.add(f)
for f in glob.glob('*/*.ipynb'):
os.remove(f)
converted_files = convert_md()
renamed_files = rename_ipynb()
ignore_list = [f for f,_ in converted_files + renamed_files]
def remove_generated_files(app, exception):
for _, f in renamed_files + converted_files:
print('=== Remove %s' % (f))
os.remove(f)
def release_notebook(app, exception):
_release_notebook(app.builder.outdir)
def generate_htaccess(app, exception):
print('=== Generate .htaccess file')
with open(app.builder.outdir + '/.htaccess', 'w') as f:
f.write('ErrorDocument 404 https://zh.gluon.ai/404.html\n')
# force to use https
f.write('RewriteEngine On\n')
f.write('RewriteCond %{SERVER_PORT} 80\n')
f.write('RewriteRule ^(.*)$ https://zh.gluon.ai/$1 [R,L]\n')
for old, new in renamed_files + converted_files:
f.write('Redirect /%s /%s\n'%(
_replace_ext(old, 'html'), _replace_ext(new, 'html')
))
| libennext/gluon-tutorials-zh | sphinx_plugin.py | Python | apache-2.0 | 7,810 |
from sys import argv
from pathlib import Path
import matplotlib as mpl
mpl.use('Agg')
import seaborn as sns
sns.set_style("darkgrid")
import matplotlib.pyplot as plt
import pandas as pd
# python plot.py "$(ls -t log/* | head -n 1)" name
# from keras.utils import plot_model
# plot_model(model, to_file='model.png', show_shapes=True, show_layer_names=False)
def plot_svg(log, name, n_epochs=None):
df = pd.read_csv(log)
graph = Path('./graph/')
loss_path = graph / (name + '_loss.svg')
acc_path = graph / (name + '_acc.svg')
n_epochs = n_epochs or df.shape[0]
print('min loss:', df['val_loss'].min())
print('max acc :', df['val_binary_accuracy'].max())
keys = ['loss', 'val_loss']
ax = df[keys][:n_epochs].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('loss(binary crossentropy)')
plt.savefig(str(loss_path))
keys = ['binary_accuracy', 'val_binary_accuracy']
ax = df[keys][:n_epochs].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('accuracy')
plt.savefig(str(acc_path))
if __name__ == '__main__':
if len(argv) == 3:
plot_svg(argv[1], argv[2])
else:
plot_svg(argv[1], argv[2], int(argv[3])) | amoshyc/tthl-code | plot.py | Python | apache-2.0 | 1,206 |
#
#
# Copyright (C) 2012 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Module containing utilities for virtual clusters.
Most functions manipulate file system paths and are no-ops when the environment
variables C{GANETI_ROOTDIR} and C{GANETI_HOSTNAME} are not set. See the
functions' docstrings for details.
"""
import os
from ganeti import compat
from ganeti import _constants
# cannot use constants, as this would cause a circular import
ETC_HOSTS = _constants.V_CLUSTER_ETC_HOSTS
_VIRT_PATH_PREFIX = _constants.V_CLUSTER_VIRT_PATH_PREFIX
_ROOTDIR_ENVNAME = _constants.V_CLUSTER_ROOTDIR_ENVNAME
_HOSTNAME_ENVNAME = _constants.V_CLUSTER_HOSTNAME_ENVNAME
#: List of paths which shouldn't be virtualized
_VPATH_WHITELIST = _constants.V_CLUSTER_VPATH_WHITELIST
def _GetRootDirectory(envname):
"""Retrieves root directory from an environment variable.
@type envname: string
@param envname: Environment variable name
@rtype: string
@return: Root directory (can be empty)
"""
path = os.getenv(envname)
if path:
if not os.path.isabs(path):
raise RuntimeError("Root directory in '%s' must be absolute: %s" %
(envname, path))
return os.path.normpath(path)
return ""
def _GetHostname(envname):
"""Retrieves virtual hostname from an environment variable.
@type envname: string
@param envname: Environment variable name
@rtype: string
@return: Host name (can be empty)
"""
return os.getenv(envname, default="")
def _CheckHostname(hostname):
"""Very basic check for hostnames.
@type hostname: string
@param hostname: Hostname
"""
if os.path.basename(hostname) != hostname:
raise RuntimeError("Hostname '%s' can not be used for a file system"
" path" % hostname)
def _PreparePaths(rootdir, hostname):
"""Checks if the root directory and hostname are acceptable.
The (node-specific) root directory must have the hostname as its last
component. The parent directory then becomes the cluster-wide root directory.
This is necessary as some components must be able to predict the root path on
a remote node (e.g. copying files via scp).
@type rootdir: string
@param rootdir: Root directory (from environment)
@type hostname: string
@param hostname: Hostname (from environment)
@rtype: tuple; (string, string, string or None)
@return: Tuple containing cluster-global root directory, node root directory
and virtual hostname
"""
if bool(rootdir) ^ bool(hostname):
raise RuntimeError("Both root directory and hostname must be specified"
" using the environment variables %s and %s" %
(_ROOTDIR_ENVNAME, _HOSTNAME_ENVNAME))
if rootdir:
assert rootdir == os.path.normpath(rootdir), "Not normalized: " + rootdir
_CheckHostname(hostname)
if os.path.basename(rootdir) != hostname:
raise RuntimeError("Last component of root directory ('%s') must match"
" hostname ('%s')" % (rootdir, hostname))
return (os.path.dirname(rootdir), rootdir, hostname)
else:
return ("", "", None)
(_VIRT_BASEDIR, _VIRT_NODEROOT, _VIRT_HOSTNAME) = \
_PreparePaths(_GetRootDirectory(_ROOTDIR_ENVNAME),
_GetHostname(_HOSTNAME_ENVNAME))
assert (compat.all([_VIRT_BASEDIR, _VIRT_NODEROOT, _VIRT_HOSTNAME]) or
not compat.any([_VIRT_BASEDIR, _VIRT_NODEROOT, _VIRT_HOSTNAME]))
def GetVirtualHostname():
"""Returns the virtual hostname.
@rtype: string or L{None}
"""
return _VIRT_HOSTNAME
def MakeNodeRoot(base, node_name):
"""Appends a node name to the base directory.
"""
_CheckHostname(node_name)
return os.path.normpath("%s/%s" % (base, node_name))
def ExchangeNodeRoot(node_name, filename,
_basedir=_VIRT_BASEDIR, _noderoot=_VIRT_NODEROOT):
"""Replaces the node-specific root directory in a path.
Replaces it with the root directory for another node. Assuming
C{/tmp/vcluster/node1} is the root directory for C{node1}, the result will be
C{/tmp/vcluster/node3} for C{node3} (as long as a root directory is specified
in the environment).
"""
if _basedir:
pure = _RemoveNodePrefix(filename, _noderoot=_noderoot)
result = "%s/%s" % (MakeNodeRoot(_basedir, node_name), pure)
else:
result = filename
return os.path.normpath(result)
def EnvironmentForHost(hostname, _basedir=_VIRT_BASEDIR):
"""Returns the environment variables for a host.
"""
if _basedir:
return {
_ROOTDIR_ENVNAME: MakeNodeRoot(_basedir, hostname),
_HOSTNAME_ENVNAME: hostname,
}
else:
return {}
def AddNodePrefix(path, _noderoot=_VIRT_NODEROOT):
"""Adds a node-specific prefix to a path in a virtual cluster.
Returned path includes user-specified root directory if specified in
environment. As an example, the path C{/var/lib/ganeti} becomes
C{/tmp/vcluster/node1/var/lib/ganeti} if C{/tmp/vcluster/node1} is the root
directory specified in the environment.
"""
assert os.path.isabs(path), "Path not absolute: " + path
if _noderoot:
result = "%s/%s" % (_noderoot, path)
else:
result = path
assert os.path.isabs(result), "Path not absolute: " + path
return os.path.normpath(result)
def _RemoveNodePrefix(path, _noderoot=_VIRT_NODEROOT):
"""Removes the node-specific prefix from a path.
This is the opposite of L{AddNodePrefix} and removes a node-local prefix
path.
"""
assert os.path.isabs(path), "Path not absolute: " + path
norm_path = os.path.normpath(path)
if _noderoot:
# Make sure path is actually below node root
norm_root = os.path.normpath(_noderoot)
root_with_sep = "%s%s" % (norm_root, os.sep)
prefix = os.path.commonprefix([root_with_sep, norm_path])
if prefix == root_with_sep:
result = norm_path[len(norm_root):]
else:
raise RuntimeError("Path '%s' is not below node root '%s'" %
(path, _noderoot))
else:
result = norm_path
assert os.path.isabs(result), "Path not absolute: " + path
return result
def MakeVirtualPath(path, _noderoot=_VIRT_NODEROOT):
"""Virtualizes a path.
A path is "virtualized" by stripping it of its node-specific directory and
prepending a prefix (L{_VIRT_PATH_PREFIX}). Use L{LocalizeVirtualPath} to
undo the process. Virtual paths are meant to be transported via RPC.
"""
assert os.path.isabs(path), "Path not absolute: " + path
if _noderoot and path not in _VPATH_WHITELIST:
return _VIRT_PATH_PREFIX + _RemoveNodePrefix(path, _noderoot=_noderoot)
else:
return path
def LocalizeVirtualPath(path, _noderoot=_VIRT_NODEROOT):
"""Localizes a virtual path.
A "virtualized" path consists of a prefix (L{LocalizeVirtualPath}) and a
local path. This function adds the node-specific directory to the local path.
Virtual paths are meant to be transported via RPC.
"""
assert os.path.isabs(path), "Path not absolute: " + path
if _noderoot and path not in _VPATH_WHITELIST:
if path.startswith(_VIRT_PATH_PREFIX):
return AddNodePrefix(path[len(_VIRT_PATH_PREFIX):], _noderoot=_noderoot)
else:
raise RuntimeError("Path '%s' is not a virtual path" % path)
else:
return path
| leshchevds/ganeti | lib/vcluster.py | Python | bsd-2-clause | 8,507 |
import json
from pcs_test.tools.command_env.mock_node_communicator import (
place_multinode_call,
)
class StatusShortcuts:
def __init__(self, calls):
self.__calls = calls
def get_full_cluster_status_plaintext(
self,
node_labels=None,
communication_list=None,
name="http.status.get_full_cluster_status_plaintext",
hide_inactive_resources=False,
verbose=False,
cmd_status="success",
cmd_status_msg="",
report_list=None,
cluster_status_plaintext="",
):
# pylint: disable=too-many-arguments
"""
Create a call for getting cluster status in plaintext
node_labels list -- create success responses from these nodes
communication_list list -- create custom responses
name string -- the key of this call
bool hide_inactive_resources -- input flag
bool verbose -- input flag
string cmd_status -- did the command succeed?
string_cmd_status_msg -- details for cmd_status
iterable report_list -- reports from a remote node
string cluster_status_plaintext -- resulting cluster status
"""
report_list = report_list or []
place_multinode_call(
self.__calls,
name,
node_labels,
communication_list,
action="remote/cluster_status_plaintext",
param_list=[
(
"data_json",
json.dumps(
dict(
hide_inactive_resources=hide_inactive_resources,
verbose=verbose,
)
),
)
],
output=json.dumps(
dict(
status=cmd_status,
status_msg=cmd_status_msg,
data=cluster_status_plaintext,
report_list=report_list,
)
),
)
| feist/pcs | pcs_test/tools/command_env/config_http_status.py | Python | gpl-2.0 | 2,031 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
In this example, we connect a signal
of a QSlider to a slot
of a QLCDNumber.
"""
import sys
from PySide.QtGui import *
from PySide.QtCore import *
class Example(QWidget):
def __init__(self):
super(Example, self).__init__()
lcd = QLCDNumber()
sld = QSlider(Qt.Horizontal)
vbox = QVBoxLayout()
vbox.addWidget(lcd)
vbox.addWidget(sld)
sld.valueChanged.connect(lcd.display)
self.setLayout(vbox)
self.setGeometry(300, 300, 250, 150)
self.setWindowTitle('Signal & slot')
def main():
app = QApplication(sys.argv)
ex = Example()
ex.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| madoodia/codeLab | pyside/signal_slot_tests/test_signal_slot.py | Python | mit | 752 |
from __future__ import unicode_literals
import os
import paramiko
from django.utils import six
from djblets.testing.decorators import add_fixtures
from reviewboard import scmtools
from reviewboard.hostingsvcs.models import HostingServiceAccount
from reviewboard.reviews.models import ReviewRequest
from reviewboard.scmtools.errors import (AuthenticationError,
UnverifiedCertificateError)
from reviewboard.scmtools.models import Repository, Tool
from reviewboard.ssh.client import SSHClient
from reviewboard.ssh.errors import (BadHostKeyError,
UnknownHostKeyError)
from reviewboard.testing.scmtool import TestTool
from reviewboard.webapi.errors import (BAD_HOST_KEY,
MISSING_USER_KEY,
REPO_AUTHENTICATION_ERROR,
UNVERIFIED_HOST_CERT,
UNVERIFIED_HOST_KEY)
from reviewboard.webapi.resources import resources
from reviewboard.webapi.tests.base import BaseWebAPITestCase
from reviewboard.webapi.tests.mimetypes import (repository_item_mimetype,
repository_list_mimetype)
from reviewboard.webapi.tests.mixins import BasicTestsMetaclass
from reviewboard.webapi.tests.urls import (get_repository_item_url,
get_repository_list_url)
# Only generate these keys once.
key1 = paramiko.RSAKey.generate(1024)
key2 = paramiko.RSAKey.generate(1024)
class BaseRepositoryTests(BaseWebAPITestCase):
"""Base class for the RepositoryResource test suites."""
fixtures = ['test_users', 'test_scmtools']
sample_repo_path = (
'file://' + os.path.abspath(
os.path.join(os.path.dirname(scmtools.__file__), 'testdata',
'git_repo')))
def _verify_repository_info(self, rsp, repo_name, repo_path, data):
self.assertEqual(rsp['stat'], 'ok')
self.assertIn('repository', rsp)
repository = Repository.objects.get(pk=rsp['repository']['id'])
self.assertEqual(rsp['repository']['path'], repo_path)
self.assertEqual(repository.path, repo_path)
if not data.get('archive_name', False):
self.assertEqual(rsp['repository']['name'], repo_name)
self.assertEqual(repository.name, repo_name)
for key, value in six.iteritems(data):
if hasattr(repository, key):
self.assertEqual(getattr(repository, key), value)
@six.add_metaclass(BasicTestsMetaclass)
class ResourceListTests(BaseRepositoryTests):
"""Testing the RepositoryResource list APIs."""
sample_api_url = 'repositories/'
resource = resources.repository
basic_post_fixtures = ['test_scmtools']
basic_post_use_admin = True
def setUp(self):
super(ResourceListTests, self).setUp()
# Some tests will temporarily replace some functions, so back them up
# so we can restore them.
self._old_check_repository = TestTool.check_repository
self._old_accept_certificate = TestTool.accept_certificate
self._old_add_host_key = SSHClient.add_host_key
self._old_replace_host_key = SSHClient.replace_host_key
def tearDown(self):
super(ResourceListTests, self).tearDown()
TestTool.check_repository = self._old_check_repository
TestTool.accept_certificate = self._old_accept_certificate
SSHClient.add_host_key = self._old_add_host_key
SSHClient.replace_host_key = self._old_replace_host_key
def compare_item(self, item_rsp, repository):
self.assertEqual(item_rsp['id'], repository.pk)
self.assertEqual(item_rsp['path'], repository.path)
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name,
populate_items):
if populate_items:
items = [
self.create_repository(
tool_name='Test', with_local_site=with_local_site)
]
else:
items = []
return (get_repository_list_url(local_site_name),
repository_list_mimetype,
items)
@add_fixtures(['test_site'])
def test_get_with_show_visible(self):
"""Testing the GET repositories/ API with show_invisible=True"""
self.create_repository(name='test1', tool_name='Test', visible=False)
self.create_repository(name='test2', tool_name='Test', visible=True)
rsp = self.api_get(get_repository_list_url(),
query={'show-invisible': True},
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 2)
self.assertEqual(rsp['repositories'][0]['name'], 'test1')
self.assertEqual(rsp['repositories'][1]['name'], 'test2')
def test_get_repositories_with_name(self):
"""Testing the GET repositories/?name= API"""
self.create_repository(name='test1', tool_name='Test')
self.create_repository(name='test2', tool_name='Test')
rsp = self.api_get(get_repository_list_url() + '?name=test1',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 1)
self.assertEqual(rsp['repositories'][0]['name'], 'test1')
def test_get_repositories_with_name_many(self):
"""Testing the GET repositories/?name= API and comma-separated list"""
self.create_repository(name='test1', tool_name='Test')
self.create_repository(name='test2', tool_name='Test')
self.create_repository(name='test3', tool_name='Test')
rsp = self.api_get(get_repository_list_url() + '?name=test1,test2',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 2)
self.assertEqual(rsp['repositories'][0]['name'], 'test1')
self.assertEqual(rsp['repositories'][1]['name'], 'test2')
def test_get_repositories_with_path(self):
"""Testing the GET repositories/?path= API"""
self.create_repository(name='test1', path='dummy1', tool_name='Test')
self.create_repository(name='test2', path='dummy2', tool_name='Test')
rsp = self.api_get(get_repository_list_url() + '?path=dummy1',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 1)
self.assertEqual(rsp['repositories'][0]['name'], 'test1')
def test_get_repositories_with_path_many(self):
"""Testing the GET repositories/?path= API and comma-separated lists"""
self.create_repository(name='test1', path='dummy1', tool_name='Test')
self.create_repository(name='test2', path='dummy2', tool_name='Test')
self.create_repository(name='test3', path='dummy3', tool_name='Test')
rsp = self.api_get(get_repository_list_url() + '?path=dummy1,dummy2',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 2)
self.assertEqual(rsp['repositories'][0]['name'], 'test1')
self.assertEqual(rsp['repositories'][1]['name'], 'test2')
def test_get_repositories_with_name_or_path(self):
"""Testing the GET repositories/?name-or-path= API"""
self.create_repository(name='test1', path='dummy1', tool_name='Test')
self.create_repository(name='test2', path='dummy2', tool_name='Test')
self.create_repository(name='test3', path='dummy3', tool_name='Test')
rsp = self.api_get(get_repository_list_url() + '?name-or-path=test1',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 1)
self.assertEqual(rsp['repositories'][0]['name'], 'test1')
rsp = self.api_get(get_repository_list_url() + '?name-or-path=dummy2',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 1)
self.assertEqual(rsp['repositories'][0]['name'], 'test2')
def test_get_repositories_with_name_or_path_many(self):
"""Testing the GET repositories/?name-or-path= API
and comma-separated list
"""
self.create_repository(name='test1', path='dummy1', tool_name='Test')
self.create_repository(name='test2', path='dummy2', tool_name='Test')
self.create_repository(name='test3', path='dummy3', tool_name='Test')
rsp = self.api_get(
get_repository_list_url() + '?name-or-path=test1,dummy2',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 2)
self.assertEqual(rsp['repositories'][0]['name'], 'test1')
self.assertEqual(rsp['repositories'][1]['name'], 'test2')
def test_get_repositories_with_tool(self):
"""Testing the GET repositories/?tool= API"""
self.create_repository(name='test1', path='dummy1', tool_name='Git')
self.create_repository(name='test2', path='dummy2', tool_name='Test')
rsp = self.api_get(get_repository_list_url() + '?tool=Git',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 1)
self.assertEqual(rsp['repositories'][0]['name'], 'test1')
def test_get_repositories_with_tool_many(self):
"""Testing the GET repositories/?tool= API and comma-separated list"""
self.create_repository(name='test1', path='dummy1', tool_name='Git')
self.create_repository(name='test2', path='dummy2', tool_name='Test')
self.create_repository(name='test3', path='dummy3',
tool_name='Subversion')
rsp = self.api_get(get_repository_list_url() + '?tool=Git,Subversion',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 2)
self.assertEqual(rsp['repositories'][0]['name'], 'test1')
self.assertEqual(rsp['repositories'][1]['name'], 'test3')
def test_get_repositories_with_hosting_service(self):
"""Testing the GET repositories/?hosting-service= API"""
hosting_account = HostingServiceAccount.objects.create(
service_name='github',
username='my-username')
Repository.objects.create(
name='My New Repository',
path='https://example.com',
tool=Tool.objects.get(name='Git'),
hosting_account=hosting_account)
rsp = self.api_get(
get_repository_list_url() + '?hosting-service=github',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 1)
self.assertEqual(rsp['repositories'][0]['name'],
'My New Repository')
def test_get_repositories_with_hosting_service_many(self):
"""Testing the GET repositories/?hosting-service= API
and comma-separated list
"""
hosting_account = HostingServiceAccount.objects.create(
service_name='github',
username='my-username')
Repository.objects.create(
name='My New Repository 1',
path='https://example.com',
tool=Tool.objects.get(name='Git'),
hosting_account=hosting_account)
hosting_account = HostingServiceAccount.objects.create(
service_name='beanstalk',
username='my-username')
Repository.objects.create(
name='My New Repository 2',
path='https://example.com',
tool=Tool.objects.get(name='Subversion'),
hosting_account=hosting_account)
rsp = self.api_get(
get_repository_list_url() + '?hosting-service=github,beanstalk',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 2)
self.assertEqual(rsp['repositories'][0]['name'],
'My New Repository 1')
self.assertEqual(rsp['repositories'][1]['name'],
'My New Repository 2')
def test_get_repositories_with_username(self):
"""Testing the GET repositories/?username= API"""
hosting_account = HostingServiceAccount.objects.create(
service_name='github',
username='my-username')
Repository.objects.create(
name='My New Repository 1',
path='https://example.com',
tool=Tool.objects.get(name='Git'),
hosting_account=hosting_account)
Repository.objects.create(
name='My New Repository 2',
path='https://example.com',
username='my-username',
tool=Tool.objects.get(name='Subversion'))
rsp = self.api_get(get_repository_list_url() + '?username=my-username',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 2)
self.assertEqual(rsp['repositories'][0]['name'],
'My New Repository 1')
self.assertEqual(rsp['repositories'][1]['name'],
'My New Repository 2')
def test_get_repositories_with_username_many(self):
"""Testing the GET repositories/?username= API
and comma-separated list
"""
hosting_account = HostingServiceAccount.objects.create(
service_name='github',
username='my-username')
Repository.objects.create(
name='My New Repository 1',
path='https://example.com',
tool=Tool.objects.get(name='Git'),
hosting_account=hosting_account)
Repository.objects.create(
name='My New Repository 2',
path='https://example.com',
username='my-username-2',
tool=Tool.objects.get(name='Subversion'))
rsp = self.api_get(
get_repository_list_url() + '?username=my-username,my-username-2',
expected_mimetype=repository_list_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(len(rsp['repositories']), 2)
self.assertEqual(rsp['repositories'][0]['name'],
'My New Repository 1')
self.assertEqual(rsp['repositories'][1]['name'],
'My New Repository 2')
#
# HTTP POST tests
#
def setup_basic_post_test(self, user, with_local_site, local_site_name,
post_valid_data):
return (get_repository_list_url(local_site_name),
repository_item_mimetype,
{
'name': 'Test Repository',
'path': self.sample_repo_path,
'tool': 'Test',
},
[])
def check_post_result(self, user, rsp):
self._verify_repository_info(rsp, 'Test Repository',
self.sample_repo_path, {})
def test_post_with_visible_False(self):
"""Testing the POST repositories/ API with visible=False"""
self._login_user(admin=True)
rsp = self._post_repository(False, data={'visible': False})
self.assertEqual(rsp['repository']['visible'], False)
def test_post_with_bad_host_key(self):
"""Testing the POST repositories/ API with Bad Host Key error"""
hostname = 'example.com'
key = key1
expected_key = key2
@classmethod
def _check_repository(cls, *args, **kwargs):
raise BadHostKeyError(hostname, key, expected_key)
TestTool.check_repository = _check_repository
self._login_user(admin=True)
rsp = self._post_repository(False, expected_status=403)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], BAD_HOST_KEY.code)
self.assertIn('hostname', rsp)
self.assertIn('expected_key', rsp)
self.assertIn('key', rsp)
self.assertEqual(rsp['hostname'], hostname)
self.assertEqual(rsp['expected_key'], expected_key.get_base64())
self.assertEqual(rsp['key'], key.get_base64())
def test_post_with_bad_host_key_and_trust_host(self):
"""Testing the POST repositories/ API
with Bad Host Key error and trust_host=1
"""
hostname = 'example.com'
key = key1
expected_key = key2
saw = {'replace_host_key': False}
def _replace_host_key(cls, _hostname, _expected_key, _key):
self.assertEqual(hostname, _hostname)
self.assertEqual(expected_key, _expected_key)
self.assertEqual(key, _key)
saw['replace_host_key'] = True
@classmethod
def _check_repository(cls, *args, **kwargs):
if not saw['replace_host_key']:
raise BadHostKeyError(hostname, key, expected_key)
TestTool.check_repository = _check_repository
SSHClient.replace_host_key = _replace_host_key
self._login_user(admin=True)
self._post_repository(False, data={
'trust_host': 1,
})
self.assertTrue(saw['replace_host_key'])
def test_post_with_unknown_host_key(self):
"""Testing the POST repositories/ API with Unknown Host Key error"""
hostname = 'example.com'
key = key1
@classmethod
def _check_repository(cls, *args, **kwargs):
raise UnknownHostKeyError(hostname, key)
TestTool.check_repository = _check_repository
self._login_user(admin=True)
rsp = self._post_repository(False, expected_status=403)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], UNVERIFIED_HOST_KEY.code)
self.assertIn('hostname', rsp)
self.assertIn('key', rsp)
self.assertEqual(rsp['hostname'], hostname)
self.assertEqual(rsp['key'], key.get_base64())
def test_post_with_unknown_host_key_and_trust_host(self):
"""Testing the POST repositories/ API
with Unknown Host Key error and trust_host=1
"""
hostname = 'example.com'
key = key1
saw = {'add_host_key': False}
def _add_host_key(cls, _hostname, _key):
self.assertEqual(hostname, _hostname)
self.assertEqual(key, _key)
saw['add_host_key'] = True
@classmethod
def _check_repository(cls, *args, **kwargs):
if not saw['add_host_key']:
raise UnknownHostKeyError(hostname, key)
TestTool.check_repository = _check_repository
SSHClient.add_host_key = _add_host_key
self._login_user(admin=True)
self._post_repository(False, data={
'trust_host': 1,
})
self.assertTrue(saw['add_host_key'])
def test_post_with_unknown_cert(self):
"""Testing the POST repositories/ API with Unknown Certificate error"""
class Certificate(object):
failures = ['failures']
fingerprint = 'fingerprint'
hostname = 'example.com'
issuer = 'issuer'
valid_from = 'valid_from'
valid_until = 'valid_until'
cert = Certificate()
@classmethod
def _check_repository(cls, *args, **kwargs):
raise UnverifiedCertificateError(cert)
TestTool.check_repository = _check_repository
self._login_user(admin=True)
rsp = self._post_repository(False, expected_status=403)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], UNVERIFIED_HOST_CERT.code)
self.assertIn('certificate', rsp)
self.assertEqual(rsp['certificate']['failures'], cert.failures)
self.assertEqual(rsp['certificate']['fingerprint'], cert.fingerprint)
self.assertEqual(rsp['certificate']['hostname'], cert.hostname)
self.assertEqual(rsp['certificate']['issuer'], cert.issuer)
self.assertEqual(rsp['certificate']['valid']['from'], cert.valid_from)
self.assertEqual(rsp['certificate']['valid']['until'],
cert.valid_until)
def test_post_with_unknown_cert_and_trust_host(self):
"""Testing the POST repositories/ API
with Unknown Certificate error and trust_host=1
"""
class Certificate(object):
failures = ['failures']
fingerprint = 'fingerprint'
hostname = 'example.com'
issuer = 'issuer'
valid_from = 'valid_from'
valid_until = 'valid_until'
cert = Certificate()
saw = {'accept_certificate': False}
@classmethod
def _check_repository(cls, *args, **kwargs):
if not saw['accept_certificate']:
raise UnverifiedCertificateError(cert)
@classmethod
def _accept_certificate(cls, path, local_site_name=None):
saw['accept_certificate'] = True
return {
'fingerprint': '123',
}
TestTool.check_repository = _check_repository
TestTool.accept_certificate = _accept_certificate
self._login_user(admin=True)
rsp = self._post_repository(False, data={
'trust_host': 1,
})
self.assertTrue(saw['accept_certificate'])
repository = Repository.objects.get(pk=rsp['repository']['id'])
self.assertIn('cert', repository.extra_data)
self.assertEqual(repository.extra_data['cert']['fingerprint'], '123')
def test_post_with_missing_user_key(self):
"""Testing the POST repositories/ API with Missing User Key error"""
@classmethod
def _check_repository(cls, *args, **kwargs):
raise AuthenticationError(allowed_types=['publickey'],
user_key=None)
TestTool.check_repository = _check_repository
self._login_user(admin=True)
rsp = self._post_repository(False, expected_status=403)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], MISSING_USER_KEY.code)
def test_post_with_authentication_error(self):
"""Testing the POST repositories/ API with Authentication Error"""
@classmethod
def _check_repository(cls, *args, **kwargs):
raise AuthenticationError
TestTool.check_repository = _check_repository
self._login_user(admin=True)
rsp = self._post_repository(False, expected_status=403)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], REPO_AUTHENTICATION_ERROR.code)
self.assertIn('reason', rsp)
def test_post_full_info(self):
"""Testing the POST repositories/ API with all available info"""
self._login_user(admin=True)
self._post_repository(False, {
'bug_tracker': 'http://bugtracker/%s/',
'encoding': 'UTF-8',
'mirror_path': 'http://svn.example.com/',
'username': 'user',
'password': '123',
'public': False,
'raw_file_url': 'http://example.com/<filename>/<version>',
})
def test_post_with_no_access(self):
"""Testing the POST repositories/ API with no access"""
self._login_user()
self._post_repository(False, expected_status=403)
def _post_repository(self, use_local_site, data={}, expected_status=201):
repo_name = 'Test Repository'
if 200 <= expected_status < 300:
expected_mimetype = repository_item_mimetype
else:
expected_mimetype = None
if use_local_site:
local_site_name = self.local_site_name
else:
local_site_name = None
rsp = self.api_post(
get_repository_list_url(local_site_name),
dict({
'name': repo_name,
'path': self.sample_repo_path,
'tool': 'Test',
}, **data),
expected_status=expected_status,
expected_mimetype=expected_mimetype)
if 200 <= expected_status < 300:
self._verify_repository_info(rsp, repo_name, self.sample_repo_path,
data)
self.assertEqual(
rsp['repository']['links']['self']['href'],
self.base_url +
get_repository_item_url(rsp['repository']['id'],
local_site_name))
return rsp
@six.add_metaclass(BasicTestsMetaclass)
class ResourceItemTests(BaseRepositoryTests):
"""Testing the RepositoryResource item APIs."""
sample_api_url = 'repositories/<id>/'
fixtures = ['test_users', 'test_scmtools']
test_http_methods = ('GET',)
resource = resources.repository
def compare_item(self, item_rsp, repository):
self.assertEqual(item_rsp['id'], repository.pk)
self.assertEqual(item_rsp['path'], repository.path)
#
# HTTP DELETE tests
#
def test_delete(self):
"""Testing the DELETE repositories/<id>/ API"""
self._login_user(admin=True)
repo_id = self._delete_repository(False, with_review_request=True)
repo = Repository.objects.get(pk=repo_id)
self.assertFalse(repo.visible)
def test_delete_empty_repository(self):
"""Testing the DELETE repositories/<id>/ API with no review requests"""
self._login_user(admin=True)
repo_id = self._delete_repository(False)
self.assertRaises(Repository.DoesNotExist,
Repository.objects.get,
pk=repo_id)
@add_fixtures(['test_site'])
def test_delete_with_site(self):
"""Testing the DELETE repositories/<id>/ API with a local site"""
self._login_user(local_site=True, admin=True)
repo_id = self._delete_repository(True, with_review_request=True)
repo = Repository.objects.get(pk=repo_id)
self.assertFalse(repo.visible)
@add_fixtures(['test_site'])
def test_delete_empty_repository_with_site(self):
"""Testing the DELETE repositories/<id>/ API
with a local site and no review requests
"""
self._login_user(local_site=True, admin=True)
repo_id = self._delete_repository(True)
self.assertRaises(Repository.DoesNotExist,
Repository.objects.get,
pk=repo_id)
def test_delete_with_no_access(self):
"""Testing the DELETE repositories/<id>/ API with no access"""
self._login_user()
self._delete_repository(False, expected_status=403)
@add_fixtures(['test_site'])
def test_delete_with_site_no_access(self):
"""Testing the DELETE repositories/<id>/ API
with a local site and no access
"""
self._login_user(local_site=True)
self._delete_repository(True, expected_status=403)
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name):
repository = self.create_repository(with_local_site=with_local_site)
return (get_repository_item_url(repository, local_site_name),
repository_item_mimetype,
repository)
#
# HTTP PUT tests
#
def test_put(self):
"""Testing the PUT repositories/<id>/ API"""
self._login_user(admin=True)
self._put_repository(False, {
'bug_tracker': 'http://bugtracker/%s/',
'encoding': 'UTF-8',
'mirror_path': 'http://svn.example.com/',
'username': 'user',
'password': '123',
'public': False,
'raw_file_url': 'http://example.com/<filename>/<version>',
})
@add_fixtures(['test_site'])
def test_put_with_site(self):
"""Testing the PUT repositories/<id>/ API with a local site"""
self._login_user(local_site=True, admin=True)
self._put_repository(True, {
'bug_tracker': 'http://bugtracker/%s/',
'encoding': 'UTF-8',
'mirror_path': 'http://svn.example.com/',
'username': 'user',
'password': '123',
'public': False,
'raw_file_url': 'http://example.com/<filename>/<version>',
})
def test_put_with_no_access(self):
"""Testing the PUT repositories/<id>/ API with no access"""
self._login_user()
self._put_repository(False, expected_status=403)
@add_fixtures(['test_site'])
def test_put_with_site_no_access(self):
"""Testing the PUT repositories/<id>/ API
with a local site and no access
"""
self._login_user(local_site=True)
self._put_repository(False, expected_status=403)
def test_put_with_archive(self):
"""Testing the PUT repositories/<id>/ API with archive_name=True"""
self._login_user(admin=True)
repo_id = self._put_repository(False, {'archive_name': True})
repo = Repository.objects.get(pk=repo_id)
self.assertEqual(repo.name[:23], 'ar:New Test Repository:')
self.assertTrue(repo.archived)
self.assertFalse(repo.public)
self.assertIsNotNone(repo.archived_timestamp)
def _put_repository(self, use_local_site, data={}, expected_status=200):
repo_name = 'New Test Repository'
repo = self.create_repository(with_local_site=use_local_site)
if use_local_site:
local_site_name = self.local_site_name
else:
local_site_name = None
if 200 <= expected_status < 300:
expected_mimetype = repository_item_mimetype
else:
expected_mimetype = None
rsp = self.api_put(
get_repository_item_url(repo, local_site_name),
dict({
'name': repo_name,
'path': self.sample_repo_path,
}, **data),
expected_status=expected_status,
expected_mimetype=expected_mimetype)
if 200 <= expected_status < 300:
self._verify_repository_info(rsp, repo_name, self.sample_repo_path,
data)
return repo.pk
def _delete_repository(self, use_local_site, expected_status=204,
with_review_request=False):
repo = self.create_repository(with_local_site=use_local_site)
if use_local_site:
local_site_name = self.local_site_name
else:
local_site_name = None
if with_review_request:
request = ReviewRequest.objects.create(self.user, repo)
request.save()
self.api_delete(get_repository_item_url(repo, local_site_name),
expected_status=expected_status)
return repo.pk
| 1tush/reviewboard | reviewboard/webapi/tests/test_repository.py | Python | mit | 31,592 |
import json
from .base import MyTestCase
from privacyidea.lib.error import (ParameterError, ConfigAdminError)
from privacyidea.lib.policy import PolicyClass
from urllib import urlencode
PWFILE = "tests/testdata/passwords"
POLICYFILE = "tests/testdata/policy.cfg"
POLICYEMPTY = "tests/testdata/policy_empty_file.cfg"
class APIConfigTestCase(MyTestCase):
def test_00_get_empty_config(self):
with self.app.test_request_context('/system/',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
self.assertTrue('"status": true' in res.data, res.data)
def test_00_failed_auth(self):
with self.app.test_request_context('/system/',
method='GET'):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 401, res)
def test_01_set_config(self):
with self.app.test_request_context('/system/setConfig',
data={"key1": "value1",
"key2": "value2",
"key3": "value3"},
method='POST',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
self.assertTrue('"key1": "insert"' in res.data, res.data)
self.assertTrue('"key2": "insert"' in res.data, res.data)
self.assertTrue('"key3": "insert"' in res.data, res.data)
def test_02_update_config(self):
with self.app.test_request_context('/system/setConfig',
data={"key3": "new value"},
method='POST',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
self.assertTrue('"key3": "update"' in res.data, res.data)
def test_03_set_and_del_default(self):
with self.app.test_request_context('/system/setDefault',
data={"DefaultMaxFailCount": 1,
"DefaultSyncWindow": 10,
"DefaultCountWindow": 12,
"DefaultOtpLen": 6,
"DefaultResetFailCount": 12},
method='POST',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
result = json.loads(res.data).get("result")
self.assertTrue(res.status_code == 200, res)
self.assertTrue(result["status"] is True, result)
self.assertTrue(result["value"]["DefaultOtpLen"] == "insert",
result)
with self.app.test_request_context('/system/DefaultMaxFailCount',
method='DELETE',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue(result["value"] == 1,result)
with self.app.test_request_context('/system/DefaultMaxFailCount',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue(result["value"] is None, result)
# test unknown parameter
with self.app.test_request_context('/system/setDefault',
data={"unknown": "xx"},
method='POST',
headers={'Authorization': self.at}):
# "unknown" is an unknown Default Parameter. So a ParamterError
# is raised.
self.assertRaises(ParameterError, self.app.full_dispatch_request)
def test_04_set_policy(self):
with self.app.test_request_context('/policy/pol1',
data={'action': "enroll",
'scope': "selfservice",
'realm': "r1",
'resolver': "test",
'user': "admin",
'time': "",
'client': "127.12.12.12",
'active': True},
method='POST',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue('"setPolicy pol1": 1' in res.data, res.data)
# setting policy with invalid name fails
with self.app.test_request_context('/policy/invalid policy name',
data={'action': "enroll",
'scope': "selfservice",
'client': "127.12.12.12",
'active': True},
method='POST',
headers={'Authorization': self.at}):
# An invalid policy name raises an exception
self.assertRaises(Exception, self.app.full_dispatch_request)
# setting policy with an empty name
with self.app.test_request_context('/policy/enroll',
data={'scope': "selfservice",
'client': "127.12.12.12",
'active': True},
method='POST',
headers={'Authorization': self.at}):
# An invalid policy name raises an exception
self.assertRaises(Exception, self.app.full_dispatch_request)
def test_05_get_policy(self):
with self.app.test_request_context('/policy/pol1',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue("pol1" == result["value"][0].get("name"), res.data)
def test_06_export_policy(self):
with self.app.test_request_context('/policy/export/test.cfg',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
body = res.data
self.assertTrue('name = pol1' in body, res.data)
self.assertTrue("[pol1]" in body, res.data)
def test_07_update_and_delete_policy(self):
with self.app.test_request_context('/policy/pol_update_del',
data={'action': "enroll",
'scope': "selfservice",
'realm': "r1",
'resolver': "test",
'user': "admin",
'time': "",
'client': "127.12.12.12",
'active': True},
method='POST',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue(result["value"]["setPolicy pol_update_del"] > 0,
res.data)
# update policy
with self.app.test_request_context('/policy/pol_update_del',
data={'action': "enroll",
'scope': "selfservice",
'realm': "r1",
'client': "1.1.1.1"},
method='POST',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["value"]["setPolicy pol_update_del"] > 0,
res.data)
# get policy
with self.app.test_request_context('/policy/pol_update_del',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
policy = {}
for pol in result["value"]:
if pol.get("name") == "pol_update_del":
policy = pol
break
self.assertTrue("1.1.1.1" in policy.get("client"),
res.data)
# delete policy again does not do anything
with self.app.test_request_context('/policy/pol_update_del',
method='DELETE',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# delete policy
with self.app.test_request_context('/policy/pol_update_del',
method='DELETE',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# check policy
with self.app.test_request_context('/policy/pol_update_del',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue(result["value"] == [], result)
# Resolvers
def test_08_pretestresolver(self):
# This test fails, as there is no server at localhost.
param = {'LDAPURI': 'ldap://localhost',
'LDAPBASE': 'o=test',
'BINDDN': 'cn=manager,ou=example,o=test',
'BINDPW': 'ldaptest',
'LOGINNAMEATTRIBUTE': 'cn',
'LDAPSEARCHFILTER': '(cn=*)',
'LDAPFILTER': '(&(cn=%s))',
'USERINFO': '{ "username": "cn",'
'"phone" : "telephoneNumber", '
'"mobile" : "mobile"'
', "email" : "mail", '
'"surname" : "sn", '
'"givenname" : "givenName" }',
'UIDTYPE': 'DN',
'type': 'ldapresolver'}
with self.app.test_request_context('/resolver/test',
data=param,
method='POST',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
detail = json.loads(res.data).get("detail")
self.assertFalse(result.get("value"), result)
self.assertTrue("no active server available in server pool" in
detail.get("description"),
detail.get("description"))
def test_08_resolvers(self):
with self.app.test_request_context('/resolver/resolver1',
data={'type': 'passwdresolver',
'filename': PWFILE},
method='POST',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue(result["value"] == 1, result)
with self.app.test_request_context('/resolver/',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue("resolver1" in result["value"], result)
self.assertTrue("filename" in result["value"]["resolver1"]["data"])
# Get a non existing resolver
with self.app.test_request_context('/resolver/unknown',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# The value is empty
self.assertTrue(result["value"] == {}, result)
# Get only editable resolvers
with self.app.test_request_context('/resolver/',
method='GET',
query_string=urlencode({
"editable": "1"}),
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# The value is empty
self.assertTrue(result["value"] == {}, result)
# this will fetch all resolvers
with self.app.test_request_context('/resolver/',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
value = result.get("value")
self.assertTrue("resolver1" in value, value)
# get non-editable resolvers
with self.app.test_request_context('/resolver/',
method='GET',
query_string=urlencode({
"editable": "0"}),
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
value = result.get("value")
self.assertTrue("resolver1" in value, value)
# get a resolver name
with self.app.test_request_context('/resolver/resolver1',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
result = json.loads(res.data).get("result")
self.assertTrue(res.status_code == 200, res)
self.assertTrue(result["status"] is True, result)
self.assertTrue("resolver1" in result["value"], result)
self.assertTrue("filename" in result["value"]["resolver1"]["data"])
# get a resolver name
with self.app.test_request_context('/resolver/resolver1',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue("resolver1" in result["value"], result)
self.assertTrue("filename" in result["value"]["resolver1"]["data"])
# delete the resolver
with self.app.test_request_context('/resolver/resolver1',
method='DELETE',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
print res.data
result = json.loads(res.data).get("result")
self.assertTrue(res.status_code == 200, res)
self.assertTrue(result["status"] is True, result)
self.assertTrue(result["value"] == 1, result)
# delete a non existing resolver
with self.app.test_request_context('/resolver/xycswwf',
method='DELETE',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
print res.data
result = json.loads(res.data).get("result")
self.assertTrue(res.status_code == 200, res)
self.assertTrue(result["status"] is True, result)
# Trying to delete a non existing resolver returns -1
self.assertTrue(result["value"] == -1, result)
def test_09_handle_realms(self):
resolvername = "reso1_with_realm"
realmname = "realm1_with_resolver"
# create a resolver
with self.app.test_request_context('/resolver/%s' % resolvername,
method='POST',
data={"filename": PWFILE,
"type": "passwdresolver"},
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# The resolver was created. The ID of the resolver is returend.
self.assertTrue(result["value"] > 0, result)
# create a realm
with self.app.test_request_context('/realm/%s' % realmname,
method='POST',
data={"resolvers": resolvername},
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# The resolver was created
self.assertTrue(len(result["value"].get("added")) == 1, result)
self.assertTrue(len(result["value"].get("failed")) == 0, result)
# display the realm
with self.app.test_request_context('/realm/',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# The resolver was created = 1
self.assertTrue(realmname in result["value"], result)
realm_contents = result["value"].get(realmname)
self.assertTrue(realm_contents.get("resolver")[0].get("name") ==
resolvername, result)
# get the superuser realms
with self.app.test_request_context('/realm/superuser',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue("adminrealm" in result["value"], result)
# try to delete the resolver in the realm
with self.app.test_request_context('/resolver/%s' % resolvername,
method='DELETE',
headers={'Authorization': self.at}):
# The resolver must not be deleted, since it is contained in a realm
self.assertRaises(ConfigAdminError, self.app.full_dispatch_request)
# delete the realm
with self.app.test_request_context('/realm/%s' % realmname,
method='DELETE',
headers={'Authorization': self.at}):
# The realm gets deleted
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# The realm is successfully deleted: value == 1
self.assertTrue(result["value"] == 1, result)
# Now, we can delete the resolver
with self.app.test_request_context('/resolver/%s' % resolvername,
method='DELETE',
headers={'Authorization': self.at}):
# The resolver must not be deleted, since it is contained in a realm
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# The resolver was deleted = 1
self.assertTrue(result["value"] == 1, result)
def test_10_default_realm(self):
resolvername = "defresolver"
realmname = "defrealm"
with self.app.test_request_context('/resolver/%s' % resolvername,
method='POST',
data={"filename": PWFILE,
"type": "passwdresolver"},
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# create a realm
with self.app.test_request_context('/realm/%s' % realmname,
method='POST',
data={"resolvers": resolvername,
"priority.defresolver": 10},
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# get the default realm
with self.app.test_request_context('/defaultrealm',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue("defrealm" in result["value"], result)
# clear the default realm
with self.app.test_request_context('/defaultrealm',
method='DELETE',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# get the default realm
with self.app.test_request_context('/defaultrealm',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue(result["value"] == {}, result)
# set the default realm
with self.app.test_request_context('/defaultrealm/defrealm',
method='POST',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
# get the default realm
with self.app.test_request_context('/defaultrealm',
method='GET',
headers={'Authorization': self.at}):
self.assertTrue(res.status_code == 200, res)
res = self.app.full_dispatch_request()
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue("defrealm" in result["value"], result)
def test_11_import_policy(self):
with self.app.test_request_context('/policy/import/policy.cfg',
method='POST',
data=dict(file=(POLICYFILE,
'policy.cfg')),
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result["status"] is True, result)
self.assertTrue(result["value"] == 2, result)
# check if policies are there
P = PolicyClass()
p1 = P.get_policies(name="importpol1")
self.assertTrue(len(p1) == 1, p1)
p2 = P.get_policies(name="importpol2")
self.assertTrue(len(p2) == 1, p2)
# import empty file
with self.app.test_request_context("/policy/import/"
"policy_empty_file.cfg",
method='POST',
data=dict(file=(POLICYEMPTY,
"policy_empty_file.cfg")),
headers={'Authorization': self.at}):
self.assertRaises(ParameterError, self.app.full_dispatch_request)
def test_12_test_check_policy(self):
# test invalid policy name "check"
with self.app.test_request_context('/policy/check',
method='POST',
data={"realm": "*",
"action": "action1, action2",
"scope": "scope1",
"user": "*, -user1",
"client": "172.16.0.0/16, "
"-172.16.1.1"},
headers={'Authorization': self.at}):
self.assertRaises(Exception, self.app.full_dispatch_request)
with self.app.test_request_context('/policy/pol1',
method='POST',
data={"realm": "*",
"action": "action1, action2",
"scope": "scope1",
"user": "*, -user1",
"client": "172.16.0.0/16, "
"-172.16.1.1"},
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
with self.app.test_request_context('/policy/pol2',
method='POST',
data={"realm": "*",
"action": "action3=value, "
"action4",
"scope": "scope1",
"user": "admin, superuser",
"client": "172.16.1.1"},
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
result = json.loads(res.data).get("result")
self.assertTrue(res.status_code == 200, res)
# CHECK: user=superuser, action=action1, client=172.16.1.1
# is not allowed
with self.app.test_request_context('/policy/check',
method='GET',
query_string=urlencode({"realm":
"realm1",
"action":
"action1",
"scope": "scope1",
"user": "superuser",
"client":
"172.16.1.1"}),
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertFalse(result.get("value").get("allowed"), result)
# CHECK: user=superuser, action=action1, client=172.16.1.2
# is allowed
with self.app.test_request_context('/policy/check',
method='GET',
query_string=urlencode({"realm": "realm2",
"action": "action1",
"scope": "scope1",
"user": "superuser",
"client": "172.16.1.2"}),
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result.get("value").get("allowed"), result)
# CHECK: user=superuser, action=action3, client=172.16.1.2
# is not allowed
with self.app.test_request_context('/policy/check',
method='GET',
query_string=urlencode({"realm": "realm3",
"action": "action3",
"scope": "scope1",
"user": "superuser",
"client": "172.16.1.2"}),
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertFalse(result.get("value").get("allowed"), result)
# CHECK: user=superuser, action=action3, client=172.16.1.1
# is allowed
with self.app.test_request_context('/policy/check',
method='GET',
query_string=urlencode({"realm": "realm1",
"action": "action3",
"scope": "scope1",
"user": "superuser",
"client": "172.16.1.1"}),
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
self.assertTrue(result.get("value").get("allowed"), result)
def test_13_get_policy_defs(self):
with self.app.test_request_context('/policy/defs',
method='GET',
data={},
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = json.loads(res.data).get("result")
policies = result.get("value")
admin_pol = policies.get("admin")
self.assertTrue("enable" in admin_pol, admin_pol)
self.assertTrue("enrollTOTP" in admin_pol, admin_pol)
self.assertTrue("enrollHOTP" in admin_pol, admin_pol)
self.assertTrue("enrollPW" in admin_pol, admin_pol)
with self.app.test_request_context('/policy/defs/admin',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
result = json.loads(res.data).get("result")
self.assertTrue(res.status_code == 200, res)
admin_pol = result.get("value")
self.assertTrue("enable" in admin_pol, admin_pol)
self.assertTrue("enrollTOTP" in admin_pol, admin_pol)
self.assertTrue("enrollHOTP" in admin_pol, admin_pol)
self.assertTrue("enrollPW" in admin_pol, admin_pol)
def test_14_enable_disable_policy(self):
with self.app.test_request_context('/policy/pol2',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
result = json.loads(res.data).get("result")
self.assertTrue(res.status_code == 200, res)
pol = result.get("value")
self.assertTrue(pol[0].get("active"), pol[0])
# Disable policy
with self.app.test_request_context('/policy/disable/pol2',
method='POST',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
result = json.loads(res.data).get("result")
self.assertTrue(res.status_code == 200, res)
with self.app.test_request_context('/policy/pol2',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
result = json.loads(res.data).get("result")
self.assertTrue(res.status_code == 200, res)
pol = result.get("value")
self.assertFalse(pol[0].get("active"), pol[0])
# enable Policy
with self.app.test_request_context('/policy/enable/pol2',
method='POST',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
result = json.loads(res.data).get("result")
self.assertTrue(res.status_code == 200, res)
with self.app.test_request_context('/policy/pol2',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
result = json.loads(res.data).get("result")
self.assertTrue(res.status_code == 200, res)
pol = result.get("value")
self.assertTrue(pol[0].get("active"), pol[0])
def test_15_get_documentation(self):
with self.app.test_request_context('/system/documentation',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
self.assertTrue("privacyIDEA configuration documentation" in
res.data)
| woddx/privacyidea | tests/test_api_system.py | Python | agpl-3.0 | 40,077 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def forwards(apps, schema_editor):
"""Create Newsletter group."""
Group = apps.get_model('auth', 'Group')
if not Group.objects.filter(name='Newsletter').exists():
Group.objects.create(name='Newsletter')
def backwards(apps, schema_editor):
"""Delete Newsletter group."""
Group = apps.get_model('auth', 'Group')
if Group.objects.filter(name='Newsletter').exists():
Group.objects.filter(name='Newsletter').delete()
class Migration(migrations.Migration):
dependencies = [
('profiles', '0011_groups_new_onboarding_group'),
]
operations = [
migrations.RunPython(forwards, backwards)
]
| mozilla/remo | remo/profiles/migrations/0012_groups_new_newsletter_group.py | Python | bsd-3-clause | 765 |
"""Capa's specialized use of codejail.safe_exec."""
from codejail.safe_exec import safe_exec as codejail_safe_exec
from codejail.safe_exec import not_safe_exec as codejail_not_safe_exec
from codejail.safe_exec import json_safe, SafeExecException
from . import lazymod
from statsd import statsd
import hashlib
# Establish the Python environment for Capa.
# Capa assumes float-friendly division always.
# The name "random" is a properly-seeded stand-in for the random module.
CODE_PROLOG = """\
from __future__ import division
import random as random_module
import sys
random = random_module.Random(%r)
random.Random = random_module.Random
sys.modules['random'] = random
"""
ASSUMED_IMPORTS=[
("numpy", "numpy"),
("math", "math"),
("scipy", "scipy"),
("calc", "calc"),
("eia", "eia"),
("chemcalc", "chem.chemcalc"),
("chemtools", "chem.chemtools"),
("miller", "chem.miller"),
("draganddrop", "verifiers.draganddrop"),
]
# We'll need the code from lazymod.py for use in safe_exec, so read it now.
lazymod_py_file = lazymod.__file__
if lazymod_py_file.endswith("c"):
lazymod_py_file = lazymod_py_file[:-1]
lazymod_py = open(lazymod_py_file).read()
LAZY_IMPORTS = [lazymod_py]
for name, modname in ASSUMED_IMPORTS:
LAZY_IMPORTS.append("{} = LazyModule('{}')\n".format(name, modname))
LAZY_IMPORTS = "".join(LAZY_IMPORTS)
def update_hash(hasher, obj):
"""
Update a `hashlib` hasher with a nested object.
To properly cache nested structures, we need to compute a hash from the
entire structure, canonicalizing at every level.
`hasher`'s `.update()` method is called a number of times, touching all of
`obj` in the process. Only primitive JSON-safe types are supported.
"""
hasher.update(str(type(obj)))
if isinstance(obj, (tuple, list)):
for e in obj:
update_hash(hasher, e)
elif isinstance(obj, dict):
for k in sorted(obj):
update_hash(hasher, k)
update_hash(hasher, obj[k])
else:
hasher.update(repr(obj))
@statsd.timed('capa.safe_exec.time')
def safe_exec(code, globals_dict, random_seed=None, python_path=None, cache=None, slug=None, unsafely=False):
"""
Execute python code safely.
`code` is the Python code to execute. It has access to the globals in `globals_dict`,
and any changes it makes to those globals are visible in `globals_dict` when this
function returns.
`random_seed` will be used to see the `random` module available to the code.
`python_path` is a list of directories to add to the Python path before execution.
`cache` is an object with .get(key) and .set(key, value) methods. It will be used
to cache the execution, taking into account the code, the values of the globals,
and the random seed.
`slug` is an arbitrary string, a description that's meaningful to the
caller, that will be used in log messages.
If `unsafely` is true, then the code will actually be executed without sandboxing.
"""
# Check the cache for a previous result.
if cache:
safe_globals = json_safe(globals_dict)
md5er = hashlib.md5()
md5er.update(repr(code))
update_hash(md5er, safe_globals)
key = "safe_exec.%r.%s" % (random_seed, md5er.hexdigest())
cached = cache.get(key)
if cached is not None:
# We have a cached result. The result is a pair: the exception
# message, if any, else None; and the resulting globals dictionary.
emsg, cleaned_results = cached
globals_dict.update(cleaned_results)
if emsg:
raise SafeExecException(emsg)
return
# Create the complete code we'll run.
code_prolog = CODE_PROLOG % random_seed
# Decide which code executor to use.
if unsafely:
exec_fn = codejail_not_safe_exec
else:
exec_fn = codejail_safe_exec
# Run the code! Results are side effects in globals_dict.
try:
exec_fn(
code_prolog + LAZY_IMPORTS + code, globals_dict,
python_path=python_path, slug=slug,
)
except SafeExecException as e:
emsg = e.message
else:
emsg = None
# Put the result back in the cache. This is complicated by the fact that
# the globals dict might not be entirely serializable.
if cache:
cleaned_results = json_safe(globals_dict)
cache.set(key, (emsg, cleaned_results))
# If an exception happened, raise it now.
if emsg:
raise e
| EduPepperPD/pepper2013 | common/lib/capa/capa/safe_exec/safe_exec.py | Python | agpl-3.0 | 4,572 |
data = (
'Po ', # 0x00
'Feng ', # 0x01
'Zhuan ', # 0x02
'Fu ', # 0x03
'She ', # 0x04
'Ke ', # 0x05
'Jiang ', # 0x06
'Jiang ', # 0x07
'Zhuan ', # 0x08
'Wei ', # 0x09
'Zun ', # 0x0a
'Xun ', # 0x0b
'Shu ', # 0x0c
'Dui ', # 0x0d
'Dao ', # 0x0e
'Xiao ', # 0x0f
'Ji ', # 0x10
'Shao ', # 0x11
'Er ', # 0x12
'Er ', # 0x13
'Er ', # 0x14
'Ga ', # 0x15
'Jian ', # 0x16
'Shu ', # 0x17
'Chen ', # 0x18
'Shang ', # 0x19
'Shang ', # 0x1a
'Mo ', # 0x1b
'Ga ', # 0x1c
'Chang ', # 0x1d
'Liao ', # 0x1e
'Xian ', # 0x1f
'Xian ', # 0x20
'[?] ', # 0x21
'Wang ', # 0x22
'Wang ', # 0x23
'You ', # 0x24
'Liao ', # 0x25
'Liao ', # 0x26
'Yao ', # 0x27
'Mang ', # 0x28
'Wang ', # 0x29
'Wang ', # 0x2a
'Wang ', # 0x2b
'Ga ', # 0x2c
'Yao ', # 0x2d
'Duo ', # 0x2e
'Kui ', # 0x2f
'Zhong ', # 0x30
'Jiu ', # 0x31
'Gan ', # 0x32
'Gu ', # 0x33
'Gan ', # 0x34
'Tui ', # 0x35
'Gan ', # 0x36
'Gan ', # 0x37
'Shi ', # 0x38
'Yin ', # 0x39
'Chi ', # 0x3a
'Kao ', # 0x3b
'Ni ', # 0x3c
'Jin ', # 0x3d
'Wei ', # 0x3e
'Niao ', # 0x3f
'Ju ', # 0x40
'Pi ', # 0x41
'Ceng ', # 0x42
'Xi ', # 0x43
'Bi ', # 0x44
'Ju ', # 0x45
'Jie ', # 0x46
'Tian ', # 0x47
'Qu ', # 0x48
'Ti ', # 0x49
'Jie ', # 0x4a
'Wu ', # 0x4b
'Diao ', # 0x4c
'Shi ', # 0x4d
'Shi ', # 0x4e
'Ping ', # 0x4f
'Ji ', # 0x50
'Xie ', # 0x51
'Chen ', # 0x52
'Xi ', # 0x53
'Ni ', # 0x54
'Zhan ', # 0x55
'Xi ', # 0x56
'[?] ', # 0x57
'Man ', # 0x58
'E ', # 0x59
'Lou ', # 0x5a
'Ping ', # 0x5b
'Ti ', # 0x5c
'Fei ', # 0x5d
'Shu ', # 0x5e
'Xie ', # 0x5f
'Tu ', # 0x60
'Lu ', # 0x61
'Lu ', # 0x62
'Xi ', # 0x63
'Ceng ', # 0x64
'Lu ', # 0x65
'Ju ', # 0x66
'Xie ', # 0x67
'Ju ', # 0x68
'Jue ', # 0x69
'Liao ', # 0x6a
'Jue ', # 0x6b
'Shu ', # 0x6c
'Xi ', # 0x6d
'Che ', # 0x6e
'Tun ', # 0x6f
'Ni ', # 0x70
'Shan ', # 0x71
'[?] ', # 0x72
'Xian ', # 0x73
'Li ', # 0x74
'Xue ', # 0x75
'Nata ', # 0x76
'[?] ', # 0x77
'Long ', # 0x78
'Yi ', # 0x79
'Qi ', # 0x7a
'Ren ', # 0x7b
'Wu ', # 0x7c
'Han ', # 0x7d
'Shen ', # 0x7e
'Yu ', # 0x7f
'Chu ', # 0x80
'Sui ', # 0x81
'Qi ', # 0x82
'[?] ', # 0x83
'Yue ', # 0x84
'Ban ', # 0x85
'Yao ', # 0x86
'Ang ', # 0x87
'Ya ', # 0x88
'Wu ', # 0x89
'Jie ', # 0x8a
'E ', # 0x8b
'Ji ', # 0x8c
'Qian ', # 0x8d
'Fen ', # 0x8e
'Yuan ', # 0x8f
'Qi ', # 0x90
'Cen ', # 0x91
'Qian ', # 0x92
'Qi ', # 0x93
'Cha ', # 0x94
'Jie ', # 0x95
'Qu ', # 0x96
'Gang ', # 0x97
'Xian ', # 0x98
'Ao ', # 0x99
'Lan ', # 0x9a
'Dao ', # 0x9b
'Ba ', # 0x9c
'Zuo ', # 0x9d
'Zuo ', # 0x9e
'Yang ', # 0x9f
'Ju ', # 0xa0
'Gang ', # 0xa1
'Ke ', # 0xa2
'Gou ', # 0xa3
'Xue ', # 0xa4
'Bei ', # 0xa5
'Li ', # 0xa6
'Tiao ', # 0xa7
'Ju ', # 0xa8
'Yan ', # 0xa9
'Fu ', # 0xaa
'Xiu ', # 0xab
'Jia ', # 0xac
'Ling ', # 0xad
'Tuo ', # 0xae
'Pei ', # 0xaf
'You ', # 0xb0
'Dai ', # 0xb1
'Kuang ', # 0xb2
'Yue ', # 0xb3
'Qu ', # 0xb4
'Hu ', # 0xb5
'Po ', # 0xb6
'Min ', # 0xb7
'An ', # 0xb8
'Tiao ', # 0xb9
'Ling ', # 0xba
'Chi ', # 0xbb
'Yuri ', # 0xbc
'Dong ', # 0xbd
'Cem ', # 0xbe
'Kui ', # 0xbf
'Xiu ', # 0xc0
'Mao ', # 0xc1
'Tong ', # 0xc2
'Xue ', # 0xc3
'Yi ', # 0xc4
'Kura ', # 0xc5
'He ', # 0xc6
'Ke ', # 0xc7
'Luo ', # 0xc8
'E ', # 0xc9
'Fu ', # 0xca
'Xun ', # 0xcb
'Die ', # 0xcc
'Lu ', # 0xcd
'An ', # 0xce
'Er ', # 0xcf
'Gai ', # 0xd0
'Quan ', # 0xd1
'Tong ', # 0xd2
'Yi ', # 0xd3
'Mu ', # 0xd4
'Shi ', # 0xd5
'An ', # 0xd6
'Wei ', # 0xd7
'Hu ', # 0xd8
'Zhi ', # 0xd9
'Mi ', # 0xda
'Li ', # 0xdb
'Ji ', # 0xdc
'Tong ', # 0xdd
'Wei ', # 0xde
'You ', # 0xdf
'Sang ', # 0xe0
'Xia ', # 0xe1
'Li ', # 0xe2
'Yao ', # 0xe3
'Jiao ', # 0xe4
'Zheng ', # 0xe5
'Luan ', # 0xe6
'Jiao ', # 0xe7
'E ', # 0xe8
'E ', # 0xe9
'Yu ', # 0xea
'Ye ', # 0xeb
'Bu ', # 0xec
'Qiao ', # 0xed
'Qun ', # 0xee
'Feng ', # 0xef
'Feng ', # 0xf0
'Nao ', # 0xf1
'Li ', # 0xf2
'You ', # 0xf3
'Xian ', # 0xf4
'Hong ', # 0xf5
'Dao ', # 0xf6
'Shen ', # 0xf7
'Cheng ', # 0xf8
'Tu ', # 0xf9
'Geng ', # 0xfa
'Jun ', # 0xfb
'Hao ', # 0xfc
'Xia ', # 0xfd
'Yin ', # 0xfe
'Yu ', # 0xff
)
| gquirozbogner/contentbox-master | third_party/unidecode/x05c.py | Python | apache-2.0 | 4,870 |
import os
import torch
import argparse
import numpy as np
import scipy.misc as misc
from ptsemseg.models import get_model
from ptsemseg.loader import get_loader
from ptsemseg.utils import convert_state_dict
try:
import pydensecrf.densecrf as dcrf
except:
print(
"Failed to import pydensecrf,\
CRF post-processing will not work"
)
def test(args):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model_file_name = os.path.split(args.model_path)[1]
model_name = model_file_name[: model_file_name.find("_")]
# Setup image
print("Read Input Image from : {}".format(args.img_path))
img = misc.imread(args.img_path)
data_loader = get_loader(args.dataset)
loader = data_loader(root=None, is_transform=True, img_norm=args.img_norm, test_mode=True)
n_classes = loader.n_classes
resized_img = misc.imresize(img, (loader.img_size[0], loader.img_size[1]), interp="bicubic")
orig_size = img.shape[:-1]
if model_name in ["pspnet", "icnet", "icnetBN"]:
# uint8 with RGB mode, resize width and height which are odd numbers
img = misc.imresize(img, (orig_size[0] // 2 * 2 + 1, orig_size[1] // 2 * 2 + 1))
else:
img = misc.imresize(img, (loader.img_size[0], loader.img_size[1]))
img = img[:, :, ::-1]
img = img.astype(np.float64)
img -= loader.mean
if args.img_norm:
img = img.astype(float) / 255.0
# NHWC -> NCHW
img = img.transpose(2, 0, 1)
img = np.expand_dims(img, 0)
img = torch.from_numpy(img).float()
# Setup Model
model_dict = {"arch": model_name}
model = get_model(model_dict, n_classes, version=args.dataset)
state = convert_state_dict(torch.load(args.model_path)["model_state"])
model.load_state_dict(state)
model.eval()
model.to(device)
images = img.to(device)
outputs = model(images)
if args.dcrf:
unary = outputs.data.cpu().numpy()
unary = np.squeeze(unary, 0)
unary = -np.log(unary)
unary = unary.transpose(2, 1, 0)
w, h, c = unary.shape
unary = unary.transpose(2, 0, 1).reshape(loader.n_classes, -1)
unary = np.ascontiguousarray(unary)
resized_img = np.ascontiguousarray(resized_img)
d = dcrf.DenseCRF2D(w, h, loader.n_classes)
d.setUnaryEnergy(unary)
d.addPairwiseBilateral(sxy=5, srgb=3, rgbim=resized_img, compat=1)
q = d.inference(50)
mask = np.argmax(q, axis=0).reshape(w, h).transpose(1, 0)
decoded_crf = loader.decode_segmap(np.array(mask, dtype=np.uint8))
dcrf_path = args.out_path[:-4] + "_drf.png"
misc.imsave(dcrf_path, decoded_crf)
print("Dense CRF Processed Mask Saved at: {}".format(dcrf_path))
pred = np.squeeze(outputs.data.max(1)[1].cpu().numpy(), axis=0)
if model_name in ["pspnet", "icnet", "icnetBN"]:
pred = pred.astype(np.float32)
# float32 with F mode, resize back to orig_size
pred = misc.imresize(pred, orig_size, "nearest", mode="F")
decoded = loader.decode_segmap(pred)
print("Classes found: ", np.unique(pred))
misc.imsave(args.out_path, decoded)
print("Segmentation Mask Saved at: {}".format(args.out_path))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Params")
parser.add_argument(
"--model_path",
nargs="?",
type=str,
default="fcn8s_pascal_1_26.pkl",
help="Path to the saved model",
)
parser.add_argument(
"--dataset",
nargs="?",
type=str,
default="pascal",
help="Dataset to use ['pascal, camvid, ade20k etc']",
)
parser.add_argument(
"--img_norm",
dest="img_norm",
action="store_true",
help="Enable input image scales normalization [0, 1] \
| True by default",
)
parser.add_argument(
"--no-img_norm",
dest="img_norm",
action="store_false",
help="Disable input image scales normalization [0, 1] |\
True by default",
)
parser.set_defaults(img_norm=True)
parser.add_argument(
"--dcrf",
dest="dcrf",
action="store_true",
help="Enable DenseCRF based post-processing | \
False by default",
)
parser.add_argument(
"--no-dcrf",
dest="dcrf",
action="store_false",
help="Disable DenseCRF based post-processing | \
False by default",
)
parser.set_defaults(dcrf=False)
parser.add_argument(
"--img_path", nargs="?", type=str, default=None, help="Path of the input image"
)
parser.add_argument(
"--out_path", nargs="?", type=str, default=None, help="Path of the output segmap"
)
args = parser.parse_args()
test(args)
| meetshah1995/pytorch-semseg | test.py | Python | mit | 4,907 |
# -*- coding:utf-8 -*-
#
# Copyright (C) 2005 - 2007, TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
import optparse
import gettext
__trans = gettext.translation('pisilinux', fallback=True)
_ = __trans.ugettext
import pisilinux.cli.command as command
import pisilinux.context as ctx
import pisilinux.db
class ListComponents(command.Command, metaclass=command.autocommand):
__doc__ = _("""List available components
Usage: list-components
Gives a brief list of pisilinux components published in the
repositories.
""")
def __init__(self, args):
super(ListComponents, self).__init__(args)
self.componentdb = pisilinux.db.componentdb.ComponentDB()
name = ("list-components", "lc")
def options(self):
group = optparse.OptionGroup(self.parser, _("list-components options"))
group.add_option("-l", "--long", action="store_true",
default=False, help=_("Show in long format"))
group.add_option("-r", "--repository", action="store",
type="string", default=None, help=_('Name of the source or package repository'))
self.parser.add_option_group(group)
def run(self):
self.init(database = True, write = False)
l = self.componentdb.list_components(ctx.get_option('repository'))
l.sort()
for p in l:
component = self.componentdb.get_component(p)
if self.options.long:
ctx.ui.info(str(component))
else:
lenp = len(p)
#if p in installed_list:
# p = util.colorize(p, 'cyan')
p = p + ' ' * max(0, 15 - lenp)
ctx.ui.info('%s - %s ' % (component.name, str(component.summary)))
| hknyldz/pisitools | pisilinux/pisilinux/cli/listcomponents.py | Python | gpl-3.0 | 2,017 |
'''
Copyright (C) 2014 Travis DeWolf
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import controllers.shell as shell
import controllers.forcefield as forcefield
import numpy as np
def Task(arm, controller_class,
force=None, write_to_file=False, **kwargs):
"""
This task sets up the arm to follow the mouse
with its end-effector.
"""
# check controller type ------------------
controller_name = controller_class.__name__.split('.')[1]
if controller_name not in ('lqr', 'osc'):
raise Exception('Cannot perform reaching task with this controller.')
# set arm specific parameters ------------
if arm.DOF == 1:
kp = 5
elif arm.DOF == 2:
kp = 20
elif arm.DOF == 3:
kp = 50
# generate control shell -----------------
additions = []
if force is not None:
print 'applying joint velocity based forcefield...'
additions.append(forcefield.Addition(scale=force))
task = 'arm%i/forcefield'%arm.DOF
controller = controller_class.Control(
additions=additions,
kp=kp,
kv=np.sqrt(kp),
task='arm%i/follow_mouse'%arm.DOF,
write_to_file=write_to_file)
control_shell = shell.Shell(controller=controller)
# generate runner parameters -----------
runner_pars = {'control_type':'osc',
'title':'Task: Follow mouse',
'mouse_control':True}
return (control_shell, runner_pars)
| studywolf/control | studywolf_control/tasks/follow_mouse.py | Python | gpl-3.0 | 2,215 |
import os
from setuptools import setup, find_packages
README = os.path.join(os.path.dirname(__file__), 'README.rst')
REQUIREMENTS = os.path.join(os.path.dirname(__file__), 'requirements.txt')
REQUIREMENTS = open(REQUIREMENTS, 'r').read().splitlines()
VERSION = os.path.join(os.path.dirname(__file__), 'VERSION')
VERSION = open(VERSION, 'r').read().strip()
setup(
name='grano',
version=VERSION,
description="An investigative toolkit for influence influence mapping",
long_description=open(README, 'r').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='sql graph sna networks journalism ddj entities',
author='Code for Africa',
author_email='support@codeforafrica.org',
url='http://github.com/CodeForAfrica/grano',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['grano'],
package_data={'grano': ['fixtures/base.yaml']},
include_package_data=True,
zip_safe=False,
install_requires=REQUIREMENTS,
dependency_links=[
'https://github.com/CodeForAfrica/grano-client/tarball/master#egg=grano-client',
'https://github.com/CodeForAfrica/grano-ui/tarball/master#egg=grano-ui'
],
entry_points={
'grano.entity.change': [],
'grano.relation.change': [],
'grano.project.change': [],
'grano.schema.change': [],
'grano.startup': [
'bidi_create = grano.query.bidi:GenerateBidi',
'levenshtein = grano.logic.reconcile:ConfigurePostgres'
],
'grano.periodic': [
'degrees = grano.logic.metrics:Degrees',
'bidi_refresh = grano.query.bidi:GenerateBidi'
],
'console_scripts': [
'grano = grano.manage:run',
]
},
tests_require=[],
test_suite='grano.test'
)
| granoproject/grano | setup.py | Python | mit | 1,993 |
#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test processing of feefilter messages."""
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
def hashToHex(hash):
return format(hash, '064x')
# Wait up to 60 secs to see if the testnode has received all the expected invs
def allInvsMatch(invsExpected, testnode):
for x in range(60):
with mininode_lock:
if (sorted(invsExpected) == sorted(testnode.txinvs)):
return True
time.sleep(1)
return False
# TestNode: bare-bones "peer". Used to track which invs are received from a node
# and to send the node feefilter messages.
class TestNode(NodeConnCB):
def __init__(self):
super().__init__()
self.txinvs = []
def on_inv(self, conn, message):
for i in message.inv:
if (i.type == 1):
self.txinvs.append(hashToHex(i.hash))
def clear_invs(self):
with mininode_lock:
self.txinvs = []
def send_filter(self, feerate):
self.send_message(msg_feefilter(feerate))
self.sync_with_ping()
class FeeFilterTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
self.setup_clean_chain = False
def setup_network(self):
# Node1 will be used to generate txs which should be relayed from Node0
# to our test node
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir))
self.nodes.append(start_node(1, self.options.tmpdir))
connect_nodes(self.nodes[0], 1)
def run_test(self):
node1 = self.nodes[1]
node0 = self.nodes[0]
# Get out of IBD
node1.generate(1)
sync_blocks(self.nodes)
# Setup the p2p connections and start up the network thread.
test_node = TestNode()
connection = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node)
test_node.add_connection(connection)
NetworkThread().start()
test_node.wait_for_verack()
# Test that invs are received for all txs at feerate of 20 sat/byte
node1.settxfee(Decimal("0.00020000"))
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
assert(allInvsMatch(txids, test_node))
test_node.clear_invs()
# Set a filter of 15 sat/byte
test_node.send_filter(15000)
# Test that txs are still being received (paying 20 sat/byte)
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
assert(allInvsMatch(txids, test_node))
test_node.clear_invs()
# Change tx fee rate to 10 sat/byte and test they are no longer received
node1.settxfee(Decimal("0.00010000"))
[node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
sync_mempools(self.nodes) # must be sure node 0 has received all txs
# Send one transaction from node0 that should be received, so that we
# we can sync the test on receipt (if node1's txs were relayed, they'd
# be received by the time this node0 tx is received). This is
# unfortunately reliant on the current relay behavior where we batch up
# to 35 entries in an inv, which means that when this next transaction
# is eligible for relay, the prior transactions from node1 are eligible
# as well.
node0.settxfee(Decimal("0.00020000"))
txids = [node0.sendtoaddress(node0.getnewaddress(), 1)]
assert(allInvsMatch(txids, test_node))
test_node.clear_invs()
# Remove fee filter and check that txs are received again
test_node.send_filter(0)
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
assert(allInvsMatch(txids, test_node))
test_node.clear_invs()
if __name__ == '__main__':
FeeFilterTest().main()
| jimmysong/bitcoin | test/functional/p2p-feefilter.py | Python | mit | 4,157 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import os
import eventlet
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import loopingcall
from oslo_utils import importutils
from neutron.agent.linux import dhcp
from neutron.agent.linux import external_process
from neutron.agent.metadata import driver as metadata_driver
from neutron.agent import rpc as agent_rpc
from neutron.common import constants
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron import context
from neutron.i18n import _LE, _LI, _LW
from neutron import manager
LOG = logging.getLogger(__name__)
class DhcpAgent(manager.Manager):
"""DHCP agent service manager.
Note that the public methods of this class are exposed as the server side
of an rpc interface. The neutron server uses
neutron.api.rpc.agentnotifiers.dhcp_rpc_agent_api.DhcpAgentNotifyApi as the
client side to execute the methods here. For more information about
changing rpc interfaces, see doc/source/devref/rpc_api.rst.
"""
target = oslo_messaging.Target(version='1.0')
def __init__(self, host=None, conf=None):
super(DhcpAgent, self).__init__(host=host)
self.needs_resync_reasons = collections.defaultdict(list)
self.conf = conf or cfg.CONF
self.cache = NetworkCache()
self.dhcp_driver_cls = importutils.import_class(self.conf.dhcp_driver)
ctx = context.get_admin_context_without_session()
self.plugin_rpc = DhcpPluginApi(topics.PLUGIN,
ctx, self.conf.use_namespaces,
self.conf.host)
# create dhcp dir to store dhcp info
dhcp_dir = os.path.dirname("/%s/dhcp/" % self.conf.state_path)
utils.ensure_dir(dhcp_dir)
self.dhcp_version = self.dhcp_driver_cls.check_version()
self._populate_networks_cache()
self._process_monitor = external_process.ProcessMonitor(
config=self.conf,
resource_type='dhcp')
def init_host(self):
self.sync_state()
def _populate_networks_cache(self):
"""Populate the networks cache when the DHCP-agent starts."""
try:
existing_networks = self.dhcp_driver_cls.existing_dhcp_networks(
self.conf
)
for net_id in existing_networks:
net = dhcp.NetModel(self.conf.use_namespaces,
{"id": net_id,
"subnets": [],
"ports": []})
self.cache.put(net)
except NotImplementedError:
# just go ahead with an empty networks cache
LOG.debug("The '%s' DHCP-driver does not support retrieving of a "
"list of existing networks",
self.conf.dhcp_driver)
def after_start(self):
self.run()
LOG.info(_LI("DHCP agent started"))
def run(self):
"""Activate the DHCP agent."""
self.sync_state()
self.periodic_resync()
def call_driver(self, action, network, **action_kwargs):
"""Invoke an action on a DHCP driver instance."""
LOG.debug('Calling driver for network: %(net)s action: %(action)s',
{'net': network.id, 'action': action})
try:
# the Driver expects something that is duck typed similar to
# the base models.
driver = self.dhcp_driver_cls(self.conf,
network,
self._process_monitor,
self.dhcp_version,
self.plugin_rpc)
getattr(driver, action)(**action_kwargs)
return True
except exceptions.Conflict:
# No need to resync here, the agent will receive the event related
# to a status update for the network
LOG.warning(_LW('Unable to %(action)s dhcp for %(net_id)s: there '
'is a conflict with its current state; please '
'check that the network and/or its subnet(s) '
'still exist.'),
{'net_id': network.id, 'action': action})
except Exception as e:
if getattr(e, 'exc_type', '') != 'IpAddressGenerationFailure':
# Don't resync if port could not be created because of an IP
# allocation failure. When the subnet is updated with a new
# allocation pool or a port is deleted to free up an IP, this
# will automatically be retried on the notification
self.schedule_resync(e, network.id)
if (isinstance(e, oslo_messaging.RemoteError)
and e.exc_type == 'NetworkNotFound'
or isinstance(e, exceptions.NetworkNotFound)):
LOG.warning(_LW("Network %s has been deleted."), network.id)
else:
LOG.exception(_LE('Unable to %(action)s dhcp for %(net_id)s.'),
{'net_id': network.id, 'action': action})
def schedule_resync(self, reason, network_id=None):
"""Schedule a resync for a given network and reason. If no network is
specified, resync all networks.
"""
self.needs_resync_reasons[network_id].append(reason)
@utils.synchronized('dhcp-agent')
def sync_state(self, networks=None):
"""Sync the local DHCP state with Neutron. If no networks are passed,
or 'None' is one of the networks, sync all of the networks.
"""
only_nets = set([] if (not networks or None in networks) else networks)
LOG.info(_LI('Synchronizing state'))
pool = eventlet.GreenPool(self.conf.num_sync_threads)
known_network_ids = set(self.cache.get_network_ids())
try:
active_networks = self.plugin_rpc.get_active_networks_info()
active_network_ids = set(network.id for network in active_networks)
for deleted_id in known_network_ids - active_network_ids:
try:
self.disable_dhcp_helper(deleted_id)
except Exception as e:
self.schedule_resync(e, deleted_id)
LOG.exception(_LE('Unable to sync network state on '
'deleted network %s'), deleted_id)
for network in active_networks:
if (not only_nets or # specifically resync all
network.id not in known_network_ids or # missing net
network.id in only_nets): # specific network to sync
pool.spawn(self.safe_configure_dhcp_for_network, network)
pool.waitall()
LOG.info(_LI('Synchronizing state complete'))
except Exception as e:
if only_nets:
for network_id in only_nets:
self.schedule_resync(e, network_id)
else:
self.schedule_resync(e)
LOG.exception(_LE('Unable to sync network state.'))
@utils.exception_logger()
def _periodic_resync_helper(self):
"""Resync the dhcp state at the configured interval."""
while True:
eventlet.sleep(self.conf.resync_interval)
if self.needs_resync_reasons:
# be careful to avoid a race with additions to list
# from other threads
reasons = self.needs_resync_reasons
self.needs_resync_reasons = collections.defaultdict(list)
for net, r in reasons.items():
if not net:
net = "*"
LOG.debug("resync (%(network)s): %(reason)s",
{"reason": r, "network": net})
self.sync_state(reasons.keys())
def periodic_resync(self):
"""Spawn a thread to periodically resync the dhcp state."""
eventlet.spawn(self._periodic_resync_helper)
def safe_get_network_info(self, network_id):
try:
network = self.plugin_rpc.get_network_info(network_id)
if not network:
LOG.warn(_LW('Network %s has been deleted.'), network_id)
return network
except Exception as e:
self.schedule_resync(e, network_id)
LOG.exception(_LE('Network %s info call failed.'), network_id)
def enable_dhcp_helper(self, network_id):
"""Enable DHCP for a network that meets enabling criteria."""
network = self.safe_get_network_info(network_id)
if network:
self.configure_dhcp_for_network(network)
@utils.exception_logger()
def safe_configure_dhcp_for_network(self, network):
try:
self.configure_dhcp_for_network(network)
except (exceptions.NetworkNotFound, RuntimeError):
LOG.warn(_LW('Network %s may have been deleted and its resources '
'may have already been disposed.'), network.id)
def configure_dhcp_for_network(self, network):
if not network.admin_state_up:
return
enable_metadata = self.dhcp_driver_cls.should_enable_metadata(
self.conf, network)
dhcp_network_enabled = False
for subnet in network.subnets:
if subnet.enable_dhcp:
if self.call_driver('enable', network):
dhcp_network_enabled = True
self.cache.put(network)
break
if enable_metadata and dhcp_network_enabled:
for subnet in network.subnets:
if subnet.ip_version == 4 and subnet.enable_dhcp:
self.enable_isolated_metadata_proxy(network)
break
def disable_dhcp_helper(self, network_id):
"""Disable DHCP for a network known to the agent."""
network = self.cache.get_network_by_id(network_id)
if network:
if (self.conf.use_namespaces and
self.conf.enable_isolated_metadata):
# NOTE(jschwarz): In the case where a network is deleted, all
# the subnets and ports are deleted before this function is
# called, so checking if 'should_enable_metadata' is True
# for any subnet is false logic here.
self.disable_isolated_metadata_proxy(network)
if self.call_driver('disable', network):
self.cache.remove(network)
def refresh_dhcp_helper(self, network_id):
"""Refresh or disable DHCP for a network depending on the current state
of the network.
"""
old_network = self.cache.get_network_by_id(network_id)
if not old_network:
# DHCP current not running for network.
return self.enable_dhcp_helper(network_id)
network = self.safe_get_network_info(network_id)
if not network:
return
old_cidrs = set(s.cidr for s in old_network.subnets if s.enable_dhcp)
new_cidrs = set(s.cidr for s in network.subnets if s.enable_dhcp)
if new_cidrs and old_cidrs == new_cidrs:
self.call_driver('reload_allocations', network)
self.cache.put(network)
elif new_cidrs:
if self.call_driver('restart', network):
self.cache.put(network)
else:
self.disable_dhcp_helper(network.id)
@utils.synchronized('dhcp-agent')
def network_create_end(self, context, payload):
"""Handle the network.create.end notification event."""
network_id = payload['network']['id']
self.enable_dhcp_helper(network_id)
@utils.synchronized('dhcp-agent')
def network_update_end(self, context, payload):
"""Handle the network.update.end notification event."""
network_id = payload['network']['id']
if payload['network']['admin_state_up']:
self.enable_dhcp_helper(network_id)
else:
self.disable_dhcp_helper(network_id)
@utils.synchronized('dhcp-agent')
def network_delete_end(self, context, payload):
"""Handle the network.delete.end notification event."""
self.disable_dhcp_helper(payload['network_id'])
@utils.synchronized('dhcp-agent')
def subnet_update_end(self, context, payload):
"""Handle the subnet.update.end notification event."""
network_id = payload['subnet']['network_id']
self.refresh_dhcp_helper(network_id)
# Use the update handler for the subnet create event.
subnet_create_end = subnet_update_end
@utils.synchronized('dhcp-agent')
def subnet_delete_end(self, context, payload):
"""Handle the subnet.delete.end notification event."""
subnet_id = payload['subnet_id']
network = self.cache.get_network_by_subnet_id(subnet_id)
if network:
self.refresh_dhcp_helper(network.id)
@utils.synchronized('dhcp-agent')
def port_update_end(self, context, payload):
"""Handle the port.update.end notification event."""
updated_port = dhcp.DictModel(payload['port'])
network = self.cache.get_network_by_id(updated_port.network_id)
if network:
LOG.info(_LI("Trigger reload_allocations for port %s"),
updated_port)
driver_action = 'reload_allocations'
if self._is_port_on_this_agent(updated_port):
orig = self.cache.get_port_by_id(updated_port['id'])
# assume IP change if not in cache
old_ips = {i['ip_address'] for i in orig['fixed_ips'] or []}
new_ips = {i['ip_address'] for i in updated_port['fixed_ips']}
if old_ips != new_ips:
driver_action = 'restart'
self.cache.put_port(updated_port)
self.call_driver(driver_action, network)
def _is_port_on_this_agent(self, port):
thishost = utils.get_dhcp_agent_device_id(
port['network_id'], self.conf.host)
return port['device_id'] == thishost
# Use the update handler for the port create event.
port_create_end = port_update_end
@utils.synchronized('dhcp-agent')
def port_delete_end(self, context, payload):
"""Handle the port.delete.end notification event."""
port = self.cache.get_port_by_id(payload['port_id'])
if port:
network = self.cache.get_network_by_id(port.network_id)
self.cache.remove_port(port)
self.call_driver('reload_allocations', network)
def enable_isolated_metadata_proxy(self, network):
# The proxy might work for either a single network
# or all the networks connected via a router
# to the one passed as a parameter
kwargs = {'network_id': network.id}
# When the metadata network is enabled, the proxy might
# be started for the router attached to the network
if self.conf.enable_metadata_network:
router_ports = [port for port in network.ports
if (port.device_owner in
constants.ROUTER_INTERFACE_OWNERS)]
if router_ports:
# Multiple router ports should not be allowed
if len(router_ports) > 1:
LOG.warning(_LW("%(port_num)d router ports found on the "
"metadata access network. Only the port "
"%(port_id)s, for router %(router_id)s "
"will be considered"),
{'port_num': len(router_ports),
'port_id': router_ports[0].id,
'router_id': router_ports[0].device_id})
kwargs = {'router_id': router_ports[0].device_id}
metadata_driver.MetadataDriver.spawn_monitored_metadata_proxy(
self._process_monitor, network.namespace, dhcp.METADATA_PORT,
self.conf, **kwargs)
def disable_isolated_metadata_proxy(self, network):
metadata_driver.MetadataDriver.destroy_monitored_metadata_proxy(
self._process_monitor, network.id, self.conf)
class DhcpPluginApi(object):
"""Agent side of the dhcp rpc API.
This class implements the client side of an rpc interface. The server side
of this interface can be found in
neutron.api.rpc.handlers.dhcp_rpc.DhcpRpcCallback. For more information
about changing rpc interfaces, see doc/source/devref/rpc_api.rst.
API version history:
1.0 - Initial version.
1.1 - Added get_active_networks_info, create_dhcp_port,
and update_dhcp_port methods.
"""
def __init__(self, topic, context, use_namespaces, host):
self.context = context
self.host = host
self.use_namespaces = use_namespaces
target = oslo_messaging.Target(
topic=topic,
namespace=constants.RPC_NAMESPACE_DHCP_PLUGIN,
version='1.0')
self.client = n_rpc.get_client(target)
def get_active_networks_info(self):
"""Make a remote process call to retrieve all network info."""
cctxt = self.client.prepare(version='1.1')
networks = cctxt.call(self.context, 'get_active_networks_info',
host=self.host)
return [dhcp.NetModel(self.use_namespaces, n) for n in networks]
def get_network_info(self, network_id):
"""Make a remote process call to retrieve network info."""
cctxt = self.client.prepare()
network = cctxt.call(self.context, 'get_network_info',
network_id=network_id, host=self.host)
if network:
return dhcp.NetModel(self.use_namespaces, network)
def create_dhcp_port(self, port):
"""Make a remote process call to create the dhcp port."""
cctxt = self.client.prepare(version='1.1')
port = cctxt.call(self.context, 'create_dhcp_port',
port=port, host=self.host)
if port:
return dhcp.DictModel(port)
def update_dhcp_port(self, port_id, port):
"""Make a remote process call to update the dhcp port."""
cctxt = self.client.prepare(version='1.1')
port = cctxt.call(self.context, 'update_dhcp_port',
port_id=port_id, port=port, host=self.host)
if port:
return dhcp.DictModel(port)
def release_dhcp_port(self, network_id, device_id):
"""Make a remote process call to release the dhcp port."""
cctxt = self.client.prepare()
return cctxt.call(self.context, 'release_dhcp_port',
network_id=network_id, device_id=device_id,
host=self.host)
class NetworkCache(object):
"""Agent cache of the current network state."""
def __init__(self):
self.cache = {}
self.subnet_lookup = {}
self.port_lookup = {}
def get_network_ids(self):
return self.cache.keys()
def get_network_by_id(self, network_id):
return self.cache.get(network_id)
def get_network_by_subnet_id(self, subnet_id):
return self.cache.get(self.subnet_lookup.get(subnet_id))
def get_network_by_port_id(self, port_id):
return self.cache.get(self.port_lookup.get(port_id))
def put(self, network):
if network.id in self.cache:
self.remove(self.cache[network.id])
self.cache[network.id] = network
for subnet in network.subnets:
self.subnet_lookup[subnet.id] = network.id
for port in network.ports:
self.port_lookup[port.id] = network.id
def remove(self, network):
del self.cache[network.id]
for subnet in network.subnets:
del self.subnet_lookup[subnet.id]
for port in network.ports:
del self.port_lookup[port.id]
def put_port(self, port):
network = self.get_network_by_id(port.network_id)
for index in range(len(network.ports)):
if network.ports[index].id == port.id:
network.ports[index] = port
break
else:
network.ports.append(port)
self.port_lookup[port.id] = network.id
def remove_port(self, port):
network = self.get_network_by_port_id(port.id)
for index in range(len(network.ports)):
if network.ports[index] == port:
del network.ports[index]
del self.port_lookup[port.id]
break
def get_port_by_id(self, port_id):
network = self.get_network_by_port_id(port_id)
if network:
for port in network.ports:
if port.id == port_id:
return port
def get_state(self):
net_ids = self.get_network_ids()
num_nets = len(net_ids)
num_subnets = 0
num_ports = 0
for net_id in net_ids:
network = self.get_network_by_id(net_id)
num_subnets += len(network.subnets)
num_ports += len(network.ports)
return {'networks': num_nets,
'subnets': num_subnets,
'ports': num_ports}
class DhcpAgentWithStateReport(DhcpAgent):
def __init__(self, host=None, conf=None):
super(DhcpAgentWithStateReport, self).__init__(host=host, conf=conf)
self.state_rpc = agent_rpc.PluginReportStateAPI(topics.REPORTS)
self.agent_state = {
'binary': 'neutron-dhcp-agent',
'host': host,
'availability_zone': self.conf.AGENT.availability_zone,
'topic': topics.DHCP_AGENT,
'configurations': {
'dhcp_driver': self.conf.dhcp_driver,
'use_namespaces': self.conf.use_namespaces,
'dhcp_lease_duration': self.conf.dhcp_lease_duration,
'log_agent_heartbeats': self.conf.AGENT.log_agent_heartbeats},
'start_flag': True,
'agent_type': constants.AGENT_TYPE_DHCP}
report_interval = self.conf.AGENT.report_interval
self.use_call = True
if report_interval:
self.heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
self.heartbeat.start(interval=report_interval)
def _report_state(self):
try:
self.agent_state.get('configurations').update(
self.cache.get_state())
ctx = context.get_admin_context_without_session()
self.state_rpc.report_state(ctx, self.agent_state, self.use_call)
self.use_call = False
except AttributeError:
# This means the server does not support report_state
LOG.warn(_LW("Neutron server does not support state report."
" State report for this agent will be disabled."))
self.heartbeat.stop()
self.run()
return
except Exception:
LOG.exception(_LE("Failed reporting state!"))
return
if self.agent_state.pop('start_flag', None):
self.run()
def agent_updated(self, context, payload):
"""Handle the agent_updated notification event."""
self.schedule_resync(_("Agent updated: %(payload)s") %
{"payload": payload})
LOG.info(_LI("agent_updated by server side %s!"), payload)
def after_start(self):
LOG.info(_LI("DHCP agent started"))
| yanheven/neutron | neutron/agent/dhcp/agent.py | Python | apache-2.0 | 24,464 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.